]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fold-const.c
coroutines: Make call argument handling more robust [PR95440]
[thirdparty/gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
89
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
110 };
111
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
141
142
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
145
146 static location_t
147 expr_location_or (tree t, location_t loc)
148 {
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 }
152
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
155
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
158 {
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
164 {
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
167 }
168 return x;
169 }
170 \f
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
174
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
177 {
178 widest_int quo;
179
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
183
184 return NULL_TREE;
185 }
186 \f
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
195
196 static int fold_deferring_overflow_warnings;
197
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
202
203 static const char* fold_deferred_overflow_warning;
204
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
207
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
209
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
212
213 void
214 fold_defer_overflow_warnings (void)
215 {
216 ++fold_deferring_overflow_warnings;
217 }
218
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
227
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
230 {
231 const char *warnmsg;
232 location_t locus;
233
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
237 {
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
243 }
244
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
247
248 if (!issue || warnmsg == NULL)
249 return;
250
251 if (gimple_no_warning_p (stmt))
252 return;
253
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
258
259 if (!issue_strict_overflow_warning (code))
260 return;
261
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
267 }
268
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
271
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
274 {
275 fold_undefer_overflow_warnings (false, NULL, 0);
276 }
277
278 /* Whether we are deferring overflow warnings. */
279
280 bool
281 fold_deferring_overflow_warnings_p (void)
282 {
283 return fold_deferring_overflow_warnings > 0;
284 }
285
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
288
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
291 {
292 if (fold_deferring_overflow_warnings > 0)
293 {
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
296 {
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
299 }
300 }
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
303 }
304 \f
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
307
308 bool
309 negate_mathfn_p (combined_fn fn)
310 {
311 switch (fn)
312 {
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_ROUNDEVEN:
332 CASE_CFN_ROUNDEVEN_FN:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
339
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
345
346 default:
347 break;
348 }
349 return false;
350 }
351
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
354
355 bool
356 may_negate_without_overflow_p (const_tree t)
357 {
358 tree type;
359
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
361
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
365
366 return !wi::only_sign_bit_p (wi::to_wide (t));
367 }
368
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
371
372 static bool
373 negate_expr_p (tree t)
374 {
375 tree type;
376
377 if (t == 0)
378 return false;
379
380 type = TREE_TYPE (t);
381
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
384 {
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 return true;
388
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
394
395 case FIXED_CST:
396 return true;
397
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
400
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
409
410 case VECTOR_CST:
411 {
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
414
415 /* Steps don't prevent negation. */
416 unsigned int count = vector_cst_encoded_nelts (t);
417 for (unsigned int i = 0; i < count; ++i)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 return false;
420
421 return true;
422 }
423
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
427
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
430
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (ANY_INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
442
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! ANY_INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
449
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 break;
464
465 /* Fall through. */
466
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
472
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 if (TYPE_UNSIGNED (type))
477 break;
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t, 0)))
482 return true;
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t, 1))))
490 return negate_expr_p (TREE_OPERAND (t, 1));
491 break;
492
493 case NOP_EXPR:
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type) == REAL_TYPE)
496 {
497 tree tem = strip_float_extensions (t);
498 if (tem != t)
499 return negate_expr_p (tem);
500 }
501 break;
502
503 case CALL_EXPR:
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t)))
506 return negate_expr_p (CALL_EXPR_ARG (t, 0));
507 break;
508
509 case RSHIFT_EXPR:
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 {
513 tree op1 = TREE_OPERAND (t, 1);
514 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
515 return true;
516 }
517 break;
518
519 default:
520 break;
521 }
522 return false;
523 }
524
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
528 returned. */
529
530 static tree
531 fold_negate_expr_1 (location_t loc, tree t)
532 {
533 tree type = TREE_TYPE (t);
534 tree tem;
535
536 switch (TREE_CODE (t))
537 {
538 /* Convert - (~A) to A + 1. */
539 case BIT_NOT_EXPR:
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_one_cst (type));
543 break;
544
545 case INTEGER_CST:
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || (ANY_INTEGRAL_TYPE_P (type)
549 && !TYPE_OVERFLOW_TRAPS (type)
550 && TYPE_OVERFLOW_WRAPS (type))
551 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 return tem;
553 break;
554
555 case POLY_INT_CST:
556 case REAL_CST:
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
560
561 case COMPLEX_CST:
562 {
563 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 if (rpart && ipart)
566 return build_complex (type, rpart, ipart);
567 }
568 break;
569
570 case VECTOR_CST:
571 {
572 tree_vector_builder elts;
573 elts.new_unary_operation (type, t, true);
574 unsigned int count = elts.encoded_nelts ();
575 for (unsigned int i = 0; i < count; ++i)
576 {
577 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 if (elt == NULL_TREE)
579 return NULL_TREE;
580 elts.quick_push (elt);
581 }
582
583 return elts.build ();
584 }
585
586 case COMPLEX_EXPR:
587 if (negate_expr_p (t))
588 return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 break;
592
593 case CONJ_EXPR:
594 if (negate_expr_p (t))
595 return fold_build1_loc (loc, CONJ_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 break;
598
599 case NEGATE_EXPR:
600 if (!TYPE_OVERFLOW_SANITIZED (type))
601 return TREE_OPERAND (t, 0);
602 break;
603
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 && !HONOR_SIGNED_ZEROS (element_mode (type)))
607 {
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1)))
610 {
611 tem = negate_expr (TREE_OPERAND (t, 1));
612 return fold_build2_loc (loc, MINUS_EXPR, type,
613 tem, TREE_OPERAND (t, 0));
614 }
615
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t, 0)))
618 {
619 tem = negate_expr (TREE_OPERAND (t, 0));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 1));
622 }
623 }
624 break;
625
626 case MINUS_EXPR:
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632 break;
633
634 case MULT_EXPR:
635 if (TYPE_UNSIGNED (type))
636 break;
637
638 /* Fall through. */
639
640 case RDIV_EXPR:
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
642 {
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
647 tem = TREE_OPERAND (t, 0);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 negate_expr (tem), TREE_OPERAND (t, 1));
651 }
652 break;
653
654 case TRUNC_DIV_EXPR:
655 case ROUND_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 if (TYPE_UNSIGNED (type))
658 break;
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t, 0)))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (TREE_OPERAND (t, 0)),
665 TREE_OPERAND (t, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t, 1))))
673 && negate_expr_p (TREE_OPERAND (t, 1)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0),
676 negate_expr (TREE_OPERAND (t, 1)));
677 break;
678
679 case NOP_EXPR:
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type) == REAL_TYPE)
682 {
683 tem = strip_float_extensions (t);
684 if (tem != t && negate_expr_p (tem))
685 return fold_convert_loc (loc, type, negate_expr (tem));
686 }
687 break;
688
689 case CALL_EXPR:
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t))
692 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
693 {
694 tree fndecl, arg;
695
696 fndecl = get_callee_fndecl (t);
697 arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 return build_call_expr_loc (loc, fndecl, 1, arg);
699 }
700 break;
701
702 case RSHIFT_EXPR:
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
705 {
706 tree op1 = TREE_OPERAND (t, 1);
707 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
708 {
709 tree ntype = TYPE_UNSIGNED (type)
710 ? signed_type_for (type)
711 : unsigned_type_for (type);
712 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 return fold_convert_loc (loc, type, temp);
715 }
716 }
717 break;
718
719 default:
720 break;
721 }
722
723 return NULL_TREE;
724 }
725
726 /* A wrapper for fold_negate_expr_1. */
727
728 static tree
729 fold_negate_expr (location_t loc, tree t)
730 {
731 tree type = TREE_TYPE (t);
732 STRIP_SIGN_NOPS (t);
733 tree tem = fold_negate_expr_1 (loc, t);
734 if (tem == NULL_TREE)
735 return NULL_TREE;
736 return fold_convert_loc (loc, type, tem);
737 }
738
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
741 return NULL_TREE. */
742
743 static tree
744 negate_expr (tree t)
745 {
746 tree type, tem;
747 location_t loc;
748
749 if (t == NULL_TREE)
750 return NULL_TREE;
751
752 loc = EXPR_LOCATION (t);
753 type = TREE_TYPE (t);
754 STRIP_SIGN_NOPS (t);
755
756 tem = fold_negate_expr (loc, t);
757 if (!tem)
758 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759 return fold_convert_loc (loc, type, tem);
760 }
761 \f
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
769
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
773
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
779
780 If IN is itself a literal or constant, return it as appropriate.
781
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
784
785 static tree
786 split_tree (tree in, tree type, enum tree_code code,
787 tree *minus_varp, tree *conp, tree *minus_conp,
788 tree *litp, tree *minus_litp, int negate_p)
789 {
790 tree var = 0;
791 *minus_varp = 0;
792 *conp = 0;
793 *minus_conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
796
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
799
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 || (code == MINUS_EXPR
813 && (TREE_CODE (in) == PLUS_EXPR
814 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
815 {
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
820
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
828
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
833
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
842
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p && *conp)
847 *minus_conp = *conp, *conp = 0;
848 if (neg_var_p && var)
849 *minus_varp = var, var = 0;
850 }
851 else if (TREE_CONSTANT (in))
852 *conp = in;
853 else if (TREE_CODE (in) == BIT_NOT_EXPR
854 && code == PLUS_EXPR)
855 {
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp = build_minus_one_cst (type);
859 *minus_varp = TREE_OPERAND (in, 0);
860 }
861 else
862 var = in;
863
864 if (negate_p)
865 {
866 if (*litp)
867 *minus_litp = *litp, *litp = 0;
868 else if (*minus_litp)
869 *litp = *minus_litp, *minus_litp = 0;
870 if (*conp)
871 *minus_conp = *conp, *conp = 0;
872 else if (*minus_conp)
873 *conp = *minus_conp, *minus_conp = 0;
874 if (var)
875 *minus_varp = var, var = 0;
876 else if (*minus_varp)
877 var = *minus_varp, *minus_varp = 0;
878 }
879
880 if (*litp
881 && TREE_OVERFLOW_P (*litp))
882 *litp = drop_tree_overflow (*litp);
883 if (*minus_litp
884 && TREE_OVERFLOW_P (*minus_litp))
885 *minus_litp = drop_tree_overflow (*minus_litp);
886
887 return var;
888 }
889
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
894
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
897 {
898 if (t1 == 0)
899 {
900 gcc_assert (t2 == 0 || code != MINUS_EXPR);
901 return t2;
902 }
903 else if (t2 == 0)
904 return t1;
905
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 {
913 if (code == PLUS_EXPR)
914 {
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
927 }
928 else if (code == MINUS_EXPR)
929 {
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
932 }
933
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
936 }
937
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
940 }
941 \f
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
944
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 {
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
952
953 switch (code)
954 {
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
960
961 default:
962 break;
963 }
964
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
968 }
969
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
973
974 bool
975 wide_int_binop (wide_int &res,
976 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 signop sign, wi::overflow_type *overflow)
978 {
979 wide_int tmp;
980 *overflow = wi::OVF_NONE;
981 switch (code)
982 {
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
986
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
990
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
994
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
998 {
999 tmp = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1004 }
1005 else
1006 tmp = arg2;
1007
1008 if (code == RSHIFT_EXPR)
1009 /* It's unclear from the C standard whether shifts can overflow.
1010 The following code ignores overflow; perhaps a C standard
1011 interpretation ruling is needed. */
1012 res = wi::rshift (arg1, tmp, sign);
1013 else
1014 res = wi::lshift (arg1, tmp);
1015 break;
1016
1017 case RROTATE_EXPR:
1018 case LROTATE_EXPR:
1019 if (wi::neg_p (arg2))
1020 {
1021 tmp = -arg2;
1022 if (code == RROTATE_EXPR)
1023 code = LROTATE_EXPR;
1024 else
1025 code = RROTATE_EXPR;
1026 }
1027 else
1028 tmp = arg2;
1029
1030 if (code == RROTATE_EXPR)
1031 res = wi::rrotate (arg1, tmp);
1032 else
1033 res = wi::lrotate (arg1, tmp);
1034 break;
1035
1036 case PLUS_EXPR:
1037 res = wi::add (arg1, arg2, sign, overflow);
1038 break;
1039
1040 case MINUS_EXPR:
1041 res = wi::sub (arg1, arg2, sign, overflow);
1042 break;
1043
1044 case MULT_EXPR:
1045 res = wi::mul (arg1, arg2, sign, overflow);
1046 break;
1047
1048 case MULT_HIGHPART_EXPR:
1049 res = wi::mul_high (arg1, arg2, sign);
1050 break;
1051
1052 case TRUNC_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (arg2 == 0)
1055 return false;
1056 res = wi::div_trunc (arg1, arg2, sign, overflow);
1057 break;
1058
1059 case FLOOR_DIV_EXPR:
1060 if (arg2 == 0)
1061 return false;
1062 res = wi::div_floor (arg1, arg2, sign, overflow);
1063 break;
1064
1065 case CEIL_DIV_EXPR:
1066 if (arg2 == 0)
1067 return false;
1068 res = wi::div_ceil (arg1, arg2, sign, overflow);
1069 break;
1070
1071 case ROUND_DIV_EXPR:
1072 if (arg2 == 0)
1073 return false;
1074 res = wi::div_round (arg1, arg2, sign, overflow);
1075 break;
1076
1077 case TRUNC_MOD_EXPR:
1078 if (arg2 == 0)
1079 return false;
1080 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1081 break;
1082
1083 case FLOOR_MOD_EXPR:
1084 if (arg2 == 0)
1085 return false;
1086 res = wi::mod_floor (arg1, arg2, sign, overflow);
1087 break;
1088
1089 case CEIL_MOD_EXPR:
1090 if (arg2 == 0)
1091 return false;
1092 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1093 break;
1094
1095 case ROUND_MOD_EXPR:
1096 if (arg2 == 0)
1097 return false;
1098 res = wi::mod_round (arg1, arg2, sign, overflow);
1099 break;
1100
1101 case MIN_EXPR:
1102 res = wi::min (arg1, arg2, sign);
1103 break;
1104
1105 case MAX_EXPR:
1106 res = wi::max (arg1, arg2, sign);
1107 break;
1108
1109 default:
1110 return false;
1111 }
1112 return true;
1113 }
1114
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116 produce a new constant in RES. Return FALSE if we don't know how
1117 to evaluate CODE at compile-time. */
1118
1119 static bool
1120 poly_int_binop (poly_wide_int &res, enum tree_code code,
1121 const_tree arg1, const_tree arg2,
1122 signop sign, wi::overflow_type *overflow)
1123 {
1124 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1125 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1126 switch (code)
1127 {
1128 case PLUS_EXPR:
1129 res = wi::add (wi::to_poly_wide (arg1),
1130 wi::to_poly_wide (arg2), sign, overflow);
1131 break;
1132
1133 case MINUS_EXPR:
1134 res = wi::sub (wi::to_poly_wide (arg1),
1135 wi::to_poly_wide (arg2), sign, overflow);
1136 break;
1137
1138 case MULT_EXPR:
1139 if (TREE_CODE (arg2) == INTEGER_CST)
1140 res = wi::mul (wi::to_poly_wide (arg1),
1141 wi::to_wide (arg2), sign, overflow);
1142 else if (TREE_CODE (arg1) == INTEGER_CST)
1143 res = wi::mul (wi::to_poly_wide (arg2),
1144 wi::to_wide (arg1), sign, overflow);
1145 else
1146 return NULL_TREE;
1147 break;
1148
1149 case LSHIFT_EXPR:
1150 if (TREE_CODE (arg2) == INTEGER_CST)
1151 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1152 else
1153 return false;
1154 break;
1155
1156 case BIT_IOR_EXPR:
1157 if (TREE_CODE (arg2) != INTEGER_CST
1158 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1159 &res))
1160 return false;
1161 break;
1162
1163 default:
1164 return false;
1165 }
1166 return true;
1167 }
1168
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170 produce a new constant. Return NULL_TREE if we don't know how to
1171 evaluate CODE at compile-time. */
1172
1173 tree
1174 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1175 int overflowable)
1176 {
1177 poly_wide_int poly_res;
1178 tree type = TREE_TYPE (arg1);
1179 signop sign = TYPE_SIGN (type);
1180 wi::overflow_type overflow = wi::OVF_NONE;
1181
1182 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1183 {
1184 wide_int warg1 = wi::to_wide (arg1), res;
1185 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1187 return NULL_TREE;
1188 poly_res = res;
1189 }
1190 else if (!poly_int_tree_p (arg1)
1191 || !poly_int_tree_p (arg2)
1192 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1193 return NULL_TREE;
1194 return force_fit_type (type, poly_res, overflowable,
1195 (((sign == SIGNED || overflowable == -1)
1196 && overflow)
1197 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1198 }
1199
1200 /* Return true if binary operation OP distributes over addition in operand
1201 OPNO, with the other operand being held constant. OPNO counts from 1. */
1202
1203 static bool
1204 distributes_over_addition_p (tree_code op, int opno)
1205 {
1206 switch (op)
1207 {
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 return true;
1212
1213 case LSHIFT_EXPR:
1214 return opno == 1;
1215
1216 default:
1217 return false;
1218 }
1219 }
1220
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222 constant. We assume ARG1 and ARG2 have the same data type, or at least
1223 are the same kind of constant and the same machine mode. Return zero if
1224 combining the constants is not allowed in the current operating mode. */
1225
1226 static tree
1227 const_binop (enum tree_code code, tree arg1, tree arg2)
1228 {
1229 /* Sanity check for the recursive cases. */
1230 if (!arg1 || !arg2)
1231 return NULL_TREE;
1232
1233 STRIP_NOPS (arg1);
1234 STRIP_NOPS (arg2);
1235
1236 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1237 {
1238 if (code == POINTER_PLUS_EXPR)
1239 return int_const_binop (PLUS_EXPR,
1240 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1241
1242 return int_const_binop (code, arg1, arg2);
1243 }
1244
1245 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1246 {
1247 machine_mode mode;
1248 REAL_VALUE_TYPE d1;
1249 REAL_VALUE_TYPE d2;
1250 REAL_VALUE_TYPE value;
1251 REAL_VALUE_TYPE result;
1252 bool inexact;
1253 tree t, type;
1254
1255 /* The following codes are handled by real_arithmetic. */
1256 switch (code)
1257 {
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 case MULT_EXPR:
1261 case RDIV_EXPR:
1262 case MIN_EXPR:
1263 case MAX_EXPR:
1264 break;
1265
1266 default:
1267 return NULL_TREE;
1268 }
1269
1270 d1 = TREE_REAL_CST (arg1);
1271 d2 = TREE_REAL_CST (arg2);
1272
1273 type = TREE_TYPE (arg1);
1274 mode = TYPE_MODE (type);
1275
1276 /* Don't perform operation if we honor signaling NaNs and
1277 either operand is a signaling NaN. */
1278 if (HONOR_SNANS (mode)
1279 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1280 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1281 return NULL_TREE;
1282
1283 /* Don't perform operation if it would raise a division
1284 by zero exception. */
1285 if (code == RDIV_EXPR
1286 && real_equal (&d2, &dconst0)
1287 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1288 return NULL_TREE;
1289
1290 /* If either operand is a NaN, just return it. Otherwise, set up
1291 for floating-point trap; we return an overflow. */
1292 if (REAL_VALUE_ISNAN (d1))
1293 {
1294 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1295 is off. */
1296 d1.signalling = 0;
1297 t = build_real (type, d1);
1298 return t;
1299 }
1300 else if (REAL_VALUE_ISNAN (d2))
1301 {
1302 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1303 is off. */
1304 d2.signalling = 0;
1305 t = build_real (type, d2);
1306 return t;
1307 }
1308
1309 inexact = real_arithmetic (&value, code, &d1, &d2);
1310 real_convert (&result, mode, &value);
1311
1312 /* Don't constant fold this floating point operation if
1313 the result has overflowed and flag_trapping_math. */
1314 if (flag_trapping_math
1315 && MODE_HAS_INFINITIES (mode)
1316 && REAL_VALUE_ISINF (result)
1317 && !REAL_VALUE_ISINF (d1)
1318 && !REAL_VALUE_ISINF (d2))
1319 return NULL_TREE;
1320
1321 /* Don't constant fold this floating point operation if the
1322 result may dependent upon the run-time rounding mode and
1323 flag_rounding_math is set, or if GCC's software emulation
1324 is unable to accurately represent the result. */
1325 if ((flag_rounding_math
1326 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1327 && (inexact || !real_identical (&result, &value)))
1328 return NULL_TREE;
1329
1330 t = build_real (type, result);
1331
1332 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1333 return t;
1334 }
1335
1336 if (TREE_CODE (arg1) == FIXED_CST)
1337 {
1338 FIXED_VALUE_TYPE f1;
1339 FIXED_VALUE_TYPE f2;
1340 FIXED_VALUE_TYPE result;
1341 tree t, type;
1342 int sat_p;
1343 bool overflow_p;
1344
1345 /* The following codes are handled by fixed_arithmetic. */
1346 switch (code)
1347 {
1348 case PLUS_EXPR:
1349 case MINUS_EXPR:
1350 case MULT_EXPR:
1351 case TRUNC_DIV_EXPR:
1352 if (TREE_CODE (arg2) != FIXED_CST)
1353 return NULL_TREE;
1354 f2 = TREE_FIXED_CST (arg2);
1355 break;
1356
1357 case LSHIFT_EXPR:
1358 case RSHIFT_EXPR:
1359 {
1360 if (TREE_CODE (arg2) != INTEGER_CST)
1361 return NULL_TREE;
1362 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1363 f2.data.high = w2.elt (1);
1364 f2.data.low = w2.ulow ();
1365 f2.mode = SImode;
1366 }
1367 break;
1368
1369 default:
1370 return NULL_TREE;
1371 }
1372
1373 f1 = TREE_FIXED_CST (arg1);
1374 type = TREE_TYPE (arg1);
1375 sat_p = TYPE_SATURATING (type);
1376 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1377 t = build_fixed (type, result);
1378 /* Propagate overflow flags. */
1379 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1380 TREE_OVERFLOW (t) = 1;
1381 return t;
1382 }
1383
1384 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1385 {
1386 tree type = TREE_TYPE (arg1);
1387 tree r1 = TREE_REALPART (arg1);
1388 tree i1 = TREE_IMAGPART (arg1);
1389 tree r2 = TREE_REALPART (arg2);
1390 tree i2 = TREE_IMAGPART (arg2);
1391 tree real, imag;
1392
1393 switch (code)
1394 {
1395 case PLUS_EXPR:
1396 case MINUS_EXPR:
1397 real = const_binop (code, r1, r2);
1398 imag = const_binop (code, i1, i2);
1399 break;
1400
1401 case MULT_EXPR:
1402 if (COMPLEX_FLOAT_TYPE_P (type))
1403 return do_mpc_arg2 (arg1, arg2, type,
1404 /* do_nonfinite= */ folding_initializer,
1405 mpc_mul);
1406
1407 real = const_binop (MINUS_EXPR,
1408 const_binop (MULT_EXPR, r1, r2),
1409 const_binop (MULT_EXPR, i1, i2));
1410 imag = const_binop (PLUS_EXPR,
1411 const_binop (MULT_EXPR, r1, i2),
1412 const_binop (MULT_EXPR, i1, r2));
1413 break;
1414
1415 case RDIV_EXPR:
1416 if (COMPLEX_FLOAT_TYPE_P (type))
1417 return do_mpc_arg2 (arg1, arg2, type,
1418 /* do_nonfinite= */ folding_initializer,
1419 mpc_div);
1420 /* Fallthru. */
1421 case TRUNC_DIV_EXPR:
1422 case CEIL_DIV_EXPR:
1423 case FLOOR_DIV_EXPR:
1424 case ROUND_DIV_EXPR:
1425 if (flag_complex_method == 0)
1426 {
1427 /* Keep this algorithm in sync with
1428 tree-complex.c:expand_complex_div_straight().
1429
1430 Expand complex division to scalars, straightforward algorithm.
1431 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1432 t = br*br + bi*bi
1433 */
1434 tree magsquared
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r2, r2),
1437 const_binop (MULT_EXPR, i2, i2));
1438 tree t1
1439 = const_binop (PLUS_EXPR,
1440 const_binop (MULT_EXPR, r1, r2),
1441 const_binop (MULT_EXPR, i1, i2));
1442 tree t2
1443 = const_binop (MINUS_EXPR,
1444 const_binop (MULT_EXPR, i1, r2),
1445 const_binop (MULT_EXPR, r1, i2));
1446
1447 real = const_binop (code, t1, magsquared);
1448 imag = const_binop (code, t2, magsquared);
1449 }
1450 else
1451 {
1452 /* Keep this algorithm in sync with
1453 tree-complex.c:expand_complex_div_wide().
1454
1455 Expand complex division to scalars, modified algorithm to minimize
1456 overflow with wide input ranges. */
1457 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1458 fold_abs_const (r2, TREE_TYPE (type)),
1459 fold_abs_const (i2, TREE_TYPE (type)));
1460
1461 if (integer_nonzerop (compare))
1462 {
1463 /* In the TRUE branch, we compute
1464 ratio = br/bi;
1465 div = (br * ratio) + bi;
1466 tr = (ar * ratio) + ai;
1467 ti = (ai * ratio) - ar;
1468 tr = tr / div;
1469 ti = ti / div; */
1470 tree ratio = const_binop (code, r2, i2);
1471 tree div = const_binop (PLUS_EXPR, i2,
1472 const_binop (MULT_EXPR, r2, ratio));
1473 real = const_binop (MULT_EXPR, r1, ratio);
1474 real = const_binop (PLUS_EXPR, real, i1);
1475 real = const_binop (code, real, div);
1476
1477 imag = const_binop (MULT_EXPR, i1, ratio);
1478 imag = const_binop (MINUS_EXPR, imag, r1);
1479 imag = const_binop (code, imag, div);
1480 }
1481 else
1482 {
1483 /* In the FALSE branch, we compute
1484 ratio = d/c;
1485 divisor = (d * ratio) + c;
1486 tr = (b * ratio) + a;
1487 ti = b - (a * ratio);
1488 tr = tr / div;
1489 ti = ti / div; */
1490 tree ratio = const_binop (code, i2, r2);
1491 tree div = const_binop (PLUS_EXPR, r2,
1492 const_binop (MULT_EXPR, i2, ratio));
1493
1494 real = const_binop (MULT_EXPR, i1, ratio);
1495 real = const_binop (PLUS_EXPR, real, r1);
1496 real = const_binop (code, real, div);
1497
1498 imag = const_binop (MULT_EXPR, r1, ratio);
1499 imag = const_binop (MINUS_EXPR, i1, imag);
1500 imag = const_binop (code, imag, div);
1501 }
1502 }
1503 break;
1504
1505 default:
1506 return NULL_TREE;
1507 }
1508
1509 if (real && imag)
1510 return build_complex (type, real, imag);
1511 }
1512
1513 if (TREE_CODE (arg1) == VECTOR_CST
1514 && TREE_CODE (arg2) == VECTOR_CST
1515 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1516 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1517 {
1518 tree type = TREE_TYPE (arg1);
1519 bool step_ok_p;
1520 if (VECTOR_CST_STEPPED_P (arg1)
1521 && VECTOR_CST_STEPPED_P (arg2))
1522 /* We can operate directly on the encoding if:
1523
1524 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1525 implies
1526 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1527
1528 Addition and subtraction are the supported operators
1529 for which this is true. */
1530 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1531 else if (VECTOR_CST_STEPPED_P (arg1))
1532 /* We can operate directly on stepped encodings if:
1533
1534 a3 - a2 == a2 - a1
1535 implies:
1536 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1537
1538 which is true if (x -> x op c) distributes over addition. */
1539 step_ok_p = distributes_over_addition_p (code, 1);
1540 else
1541 /* Similarly in reverse. */
1542 step_ok_p = distributes_over_addition_p (code, 2);
1543 tree_vector_builder elts;
1544 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1545 return NULL_TREE;
1546 unsigned int count = elts.encoded_nelts ();
1547 for (unsigned int i = 0; i < count; ++i)
1548 {
1549 tree elem1 = VECTOR_CST_ELT (arg1, i);
1550 tree elem2 = VECTOR_CST_ELT (arg2, i);
1551
1552 tree elt = const_binop (code, elem1, elem2);
1553
1554 /* It is possible that const_binop cannot handle the given
1555 code and return NULL_TREE */
1556 if (elt == NULL_TREE)
1557 return NULL_TREE;
1558 elts.quick_push (elt);
1559 }
1560
1561 return elts.build ();
1562 }
1563
1564 /* Shifts allow a scalar offset for a vector. */
1565 if (TREE_CODE (arg1) == VECTOR_CST
1566 && TREE_CODE (arg2) == INTEGER_CST)
1567 {
1568 tree type = TREE_TYPE (arg1);
1569 bool step_ok_p = distributes_over_addition_p (code, 1);
1570 tree_vector_builder elts;
1571 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1572 return NULL_TREE;
1573 unsigned int count = elts.encoded_nelts ();
1574 for (unsigned int i = 0; i < count; ++i)
1575 {
1576 tree elem1 = VECTOR_CST_ELT (arg1, i);
1577
1578 tree elt = const_binop (code, elem1, arg2);
1579
1580 /* It is possible that const_binop cannot handle the given
1581 code and return NULL_TREE. */
1582 if (elt == NULL_TREE)
1583 return NULL_TREE;
1584 elts.quick_push (elt);
1585 }
1586
1587 return elts.build ();
1588 }
1589 return NULL_TREE;
1590 }
1591
1592 /* Overload that adds a TYPE parameter to be able to dispatch
1593 to fold_relational_const. */
1594
1595 tree
1596 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1597 {
1598 if (TREE_CODE_CLASS (code) == tcc_comparison)
1599 return fold_relational_const (code, type, arg1, arg2);
1600
1601 /* ??? Until we make the const_binop worker take the type of the
1602 result as argument put those cases that need it here. */
1603 switch (code)
1604 {
1605 case VEC_SERIES_EXPR:
1606 if (CONSTANT_CLASS_P (arg1)
1607 && CONSTANT_CLASS_P (arg2))
1608 return build_vec_series (type, arg1, arg2);
1609 return NULL_TREE;
1610
1611 case COMPLEX_EXPR:
1612 if ((TREE_CODE (arg1) == REAL_CST
1613 && TREE_CODE (arg2) == REAL_CST)
1614 || (TREE_CODE (arg1) == INTEGER_CST
1615 && TREE_CODE (arg2) == INTEGER_CST))
1616 return build_complex (type, arg1, arg2);
1617 return NULL_TREE;
1618
1619 case POINTER_DIFF_EXPR:
1620 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1621 {
1622 poly_offset_int res = (wi::to_poly_offset (arg1)
1623 - wi::to_poly_offset (arg2));
1624 return force_fit_type (type, res, 1,
1625 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1626 }
1627 return NULL_TREE;
1628
1629 case VEC_PACK_TRUNC_EXPR:
1630 case VEC_PACK_FIX_TRUNC_EXPR:
1631 case VEC_PACK_FLOAT_EXPR:
1632 {
1633 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1634
1635 if (TREE_CODE (arg1) != VECTOR_CST
1636 || TREE_CODE (arg2) != VECTOR_CST)
1637 return NULL_TREE;
1638
1639 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1640 return NULL_TREE;
1641
1642 out_nelts = in_nelts * 2;
1643 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1644 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1645
1646 tree_vector_builder elts (type, out_nelts, 1);
1647 for (i = 0; i < out_nelts; i++)
1648 {
1649 tree elt = (i < in_nelts
1650 ? VECTOR_CST_ELT (arg1, i)
1651 : VECTOR_CST_ELT (arg2, i - in_nelts));
1652 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1653 ? NOP_EXPR
1654 : code == VEC_PACK_FLOAT_EXPR
1655 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1656 TREE_TYPE (type), elt);
1657 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1658 return NULL_TREE;
1659 elts.quick_push (elt);
1660 }
1661
1662 return elts.build ();
1663 }
1664
1665 case VEC_WIDEN_MULT_LO_EXPR:
1666 case VEC_WIDEN_MULT_HI_EXPR:
1667 case VEC_WIDEN_MULT_EVEN_EXPR:
1668 case VEC_WIDEN_MULT_ODD_EXPR:
1669 {
1670 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1671
1672 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1673 return NULL_TREE;
1674
1675 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1676 return NULL_TREE;
1677 out_nelts = in_nelts / 2;
1678 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1679 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1680
1681 if (code == VEC_WIDEN_MULT_LO_EXPR)
1682 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1683 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1684 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1685 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1686 scale = 1, ofs = 0;
1687 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1688 scale = 1, ofs = 1;
1689
1690 tree_vector_builder elts (type, out_nelts, 1);
1691 for (out = 0; out < out_nelts; out++)
1692 {
1693 unsigned int in = (out << scale) + ofs;
1694 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1695 VECTOR_CST_ELT (arg1, in));
1696 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1697 VECTOR_CST_ELT (arg2, in));
1698
1699 if (t1 == NULL_TREE || t2 == NULL_TREE)
1700 return NULL_TREE;
1701 tree elt = const_binop (MULT_EXPR, t1, t2);
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 return NULL_TREE;
1704 elts.quick_push (elt);
1705 }
1706
1707 return elts.build ();
1708 }
1709
1710 default:;
1711 }
1712
1713 if (TREE_CODE_CLASS (code) != tcc_binary)
1714 return NULL_TREE;
1715
1716 /* Make sure type and arg0 have the same saturating flag. */
1717 gcc_checking_assert (TYPE_SATURATING (type)
1718 == TYPE_SATURATING (TREE_TYPE (arg1)));
1719
1720 return const_binop (code, arg1, arg2);
1721 }
1722
1723 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1724 Return zero if computing the constants is not possible. */
1725
1726 tree
1727 const_unop (enum tree_code code, tree type, tree arg0)
1728 {
1729 /* Don't perform the operation, other than NEGATE and ABS, if
1730 flag_signaling_nans is on and the operand is a signaling NaN. */
1731 if (TREE_CODE (arg0) == REAL_CST
1732 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1733 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1734 && code != NEGATE_EXPR
1735 && code != ABS_EXPR
1736 && code != ABSU_EXPR)
1737 return NULL_TREE;
1738
1739 switch (code)
1740 {
1741 CASE_CONVERT:
1742 case FLOAT_EXPR:
1743 case FIX_TRUNC_EXPR:
1744 case FIXED_CONVERT_EXPR:
1745 return fold_convert_const (code, type, arg0);
1746
1747 case ADDR_SPACE_CONVERT_EXPR:
1748 /* If the source address is 0, and the source address space
1749 cannot have a valid object at 0, fold to dest type null. */
1750 if (integer_zerop (arg0)
1751 && !(targetm.addr_space.zero_address_valid
1752 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1753 return fold_convert_const (code, type, arg0);
1754 break;
1755
1756 case VIEW_CONVERT_EXPR:
1757 return fold_view_convert_expr (type, arg0);
1758
1759 case NEGATE_EXPR:
1760 {
1761 /* Can't call fold_negate_const directly here as that doesn't
1762 handle all cases and we might not be able to negate some
1763 constants. */
1764 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1765 if (tem && CONSTANT_CLASS_P (tem))
1766 return tem;
1767 break;
1768 }
1769
1770 case ABS_EXPR:
1771 case ABSU_EXPR:
1772 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1773 return fold_abs_const (arg0, type);
1774 break;
1775
1776 case CONJ_EXPR:
1777 if (TREE_CODE (arg0) == COMPLEX_CST)
1778 {
1779 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1780 TREE_TYPE (type));
1781 return build_complex (type, TREE_REALPART (arg0), ipart);
1782 }
1783 break;
1784
1785 case BIT_NOT_EXPR:
1786 if (TREE_CODE (arg0) == INTEGER_CST)
1787 return fold_not_const (arg0, type);
1788 else if (POLY_INT_CST_P (arg0))
1789 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1790 /* Perform BIT_NOT_EXPR on each element individually. */
1791 else if (TREE_CODE (arg0) == VECTOR_CST)
1792 {
1793 tree elem;
1794
1795 /* This can cope with stepped encodings because ~x == -1 - x. */
1796 tree_vector_builder elements;
1797 elements.new_unary_operation (type, arg0, true);
1798 unsigned int i, count = elements.encoded_nelts ();
1799 for (i = 0; i < count; ++i)
1800 {
1801 elem = VECTOR_CST_ELT (arg0, i);
1802 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1803 if (elem == NULL_TREE)
1804 break;
1805 elements.quick_push (elem);
1806 }
1807 if (i == count)
1808 return elements.build ();
1809 }
1810 break;
1811
1812 case TRUTH_NOT_EXPR:
1813 if (TREE_CODE (arg0) == INTEGER_CST)
1814 return constant_boolean_node (integer_zerop (arg0), type);
1815 break;
1816
1817 case REALPART_EXPR:
1818 if (TREE_CODE (arg0) == COMPLEX_CST)
1819 return fold_convert (type, TREE_REALPART (arg0));
1820 break;
1821
1822 case IMAGPART_EXPR:
1823 if (TREE_CODE (arg0) == COMPLEX_CST)
1824 return fold_convert (type, TREE_IMAGPART (arg0));
1825 break;
1826
1827 case VEC_UNPACK_LO_EXPR:
1828 case VEC_UNPACK_HI_EXPR:
1829 case VEC_UNPACK_FLOAT_LO_EXPR:
1830 case VEC_UNPACK_FLOAT_HI_EXPR:
1831 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1832 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1833 {
1834 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1835 enum tree_code subcode;
1836
1837 if (TREE_CODE (arg0) != VECTOR_CST)
1838 return NULL_TREE;
1839
1840 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1841 return NULL_TREE;
1842 out_nelts = in_nelts / 2;
1843 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1844
1845 unsigned int offset = 0;
1846 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1847 || code == VEC_UNPACK_FLOAT_LO_EXPR
1848 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1849 offset = out_nelts;
1850
1851 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1852 subcode = NOP_EXPR;
1853 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1854 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1855 subcode = FLOAT_EXPR;
1856 else
1857 subcode = FIX_TRUNC_EXPR;
1858
1859 tree_vector_builder elts (type, out_nelts, 1);
1860 for (i = 0; i < out_nelts; i++)
1861 {
1862 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1863 VECTOR_CST_ELT (arg0, i + offset));
1864 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1865 return NULL_TREE;
1866 elts.quick_push (elt);
1867 }
1868
1869 return elts.build ();
1870 }
1871
1872 case VEC_DUPLICATE_EXPR:
1873 if (CONSTANT_CLASS_P (arg0))
1874 return build_vector_from_val (type, arg0);
1875 return NULL_TREE;
1876
1877 default:
1878 break;
1879 }
1880
1881 return NULL_TREE;
1882 }
1883
1884 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1885 indicates which particular sizetype to create. */
1886
1887 tree
1888 size_int_kind (poly_int64 number, enum size_type_kind kind)
1889 {
1890 return build_int_cst (sizetype_tab[(int) kind], number);
1891 }
1892 \f
1893 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1894 is a tree code. The type of the result is taken from the operands.
1895 Both must be equivalent integer types, ala int_binop_types_match_p.
1896 If the operands are constant, so is the result. */
1897
1898 tree
1899 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1900 {
1901 tree type = TREE_TYPE (arg0);
1902
1903 if (arg0 == error_mark_node || arg1 == error_mark_node)
1904 return error_mark_node;
1905
1906 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1907 TREE_TYPE (arg1)));
1908
1909 /* Handle the special case of two poly_int constants faster. */
1910 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1911 {
1912 /* And some specific cases even faster than that. */
1913 if (code == PLUS_EXPR)
1914 {
1915 if (integer_zerop (arg0)
1916 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1917 return arg1;
1918 if (integer_zerop (arg1)
1919 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1920 return arg0;
1921 }
1922 else if (code == MINUS_EXPR)
1923 {
1924 if (integer_zerop (arg1)
1925 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1926 return arg0;
1927 }
1928 else if (code == MULT_EXPR)
1929 {
1930 if (integer_onep (arg0)
1931 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1932 return arg1;
1933 }
1934
1935 /* Handle general case of two integer constants. For sizetype
1936 constant calculations we always want to know about overflow,
1937 even in the unsigned case. */
1938 tree res = int_const_binop (code, arg0, arg1, -1);
1939 if (res != NULL_TREE)
1940 return res;
1941 }
1942
1943 return fold_build2_loc (loc, code, type, arg0, arg1);
1944 }
1945
1946 /* Given two values, either both of sizetype or both of bitsizetype,
1947 compute the difference between the two values. Return the value
1948 in signed type corresponding to the type of the operands. */
1949
1950 tree
1951 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1952 {
1953 tree type = TREE_TYPE (arg0);
1954 tree ctype;
1955
1956 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1957 TREE_TYPE (arg1)));
1958
1959 /* If the type is already signed, just do the simple thing. */
1960 if (!TYPE_UNSIGNED (type))
1961 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1962
1963 if (type == sizetype)
1964 ctype = ssizetype;
1965 else if (type == bitsizetype)
1966 ctype = sbitsizetype;
1967 else
1968 ctype = signed_type_for (type);
1969
1970 /* If either operand is not a constant, do the conversions to the signed
1971 type and subtract. The hardware will do the right thing with any
1972 overflow in the subtraction. */
1973 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1974 return size_binop_loc (loc, MINUS_EXPR,
1975 fold_convert_loc (loc, ctype, arg0),
1976 fold_convert_loc (loc, ctype, arg1));
1977
1978 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1979 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1980 overflow) and negate (which can't either). Special-case a result
1981 of zero while we're here. */
1982 if (tree_int_cst_equal (arg0, arg1))
1983 return build_int_cst (ctype, 0);
1984 else if (tree_int_cst_lt (arg1, arg0))
1985 return fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1987 else
1988 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1989 fold_convert_loc (loc, ctype,
1990 size_binop_loc (loc,
1991 MINUS_EXPR,
1992 arg1, arg0)));
1993 }
1994 \f
1995 /* A subroutine of fold_convert_const handling conversions of an
1996 INTEGER_CST to another integer type. */
1997
1998 static tree
1999 fold_convert_const_int_from_int (tree type, const_tree arg1)
2000 {
2001 /* Given an integer constant, make new constant with new type,
2002 appropriately sign-extended or truncated. Use widest_int
2003 so that any extension is done according ARG1's type. */
2004 return force_fit_type (type, wi::to_widest (arg1),
2005 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2006 TREE_OVERFLOW (arg1));
2007 }
2008
2009 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2010 to an integer type. */
2011
2012 static tree
2013 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2014 {
2015 bool overflow = false;
2016 tree t;
2017
2018 /* The following code implements the floating point to integer
2019 conversion rules required by the Java Language Specification,
2020 that IEEE NaNs are mapped to zero and values that overflow
2021 the target precision saturate, i.e. values greater than
2022 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2023 are mapped to INT_MIN. These semantics are allowed by the
2024 C and C++ standards that simply state that the behavior of
2025 FP-to-integer conversion is unspecified upon overflow. */
2026
2027 wide_int val;
2028 REAL_VALUE_TYPE r;
2029 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2030
2031 switch (code)
2032 {
2033 case FIX_TRUNC_EXPR:
2034 real_trunc (&r, VOIDmode, &x);
2035 break;
2036
2037 default:
2038 gcc_unreachable ();
2039 }
2040
2041 /* If R is NaN, return zero and show we have an overflow. */
2042 if (REAL_VALUE_ISNAN (r))
2043 {
2044 overflow = true;
2045 val = wi::zero (TYPE_PRECISION (type));
2046 }
2047
2048 /* See if R is less than the lower bound or greater than the
2049 upper bound. */
2050
2051 if (! overflow)
2052 {
2053 tree lt = TYPE_MIN_VALUE (type);
2054 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2055 if (real_less (&r, &l))
2056 {
2057 overflow = true;
2058 val = wi::to_wide (lt);
2059 }
2060 }
2061
2062 if (! overflow)
2063 {
2064 tree ut = TYPE_MAX_VALUE (type);
2065 if (ut)
2066 {
2067 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2068 if (real_less (&u, &r))
2069 {
2070 overflow = true;
2071 val = wi::to_wide (ut);
2072 }
2073 }
2074 }
2075
2076 if (! overflow)
2077 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2078
2079 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2080 return t;
2081 }
2082
2083 /* A subroutine of fold_convert_const handling conversions of a
2084 FIXED_CST to an integer type. */
2085
2086 static tree
2087 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2088 {
2089 tree t;
2090 double_int temp, temp_trunc;
2091 scalar_mode mode;
2092
2093 /* Right shift FIXED_CST to temp by fbit. */
2094 temp = TREE_FIXED_CST (arg1).data;
2095 mode = TREE_FIXED_CST (arg1).mode;
2096 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2097 {
2098 temp = temp.rshift (GET_MODE_FBIT (mode),
2099 HOST_BITS_PER_DOUBLE_INT,
2100 SIGNED_FIXED_POINT_MODE_P (mode));
2101
2102 /* Left shift temp to temp_trunc by fbit. */
2103 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2104 HOST_BITS_PER_DOUBLE_INT,
2105 SIGNED_FIXED_POINT_MODE_P (mode));
2106 }
2107 else
2108 {
2109 temp = double_int_zero;
2110 temp_trunc = double_int_zero;
2111 }
2112
2113 /* If FIXED_CST is negative, we need to round the value toward 0.
2114 By checking if the fractional bits are not zero to add 1 to temp. */
2115 if (SIGNED_FIXED_POINT_MODE_P (mode)
2116 && temp_trunc.is_negative ()
2117 && TREE_FIXED_CST (arg1).data != temp_trunc)
2118 temp += double_int_one;
2119
2120 /* Given a fixed-point constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t = force_fit_type (type, temp, -1,
2123 (temp.is_negative ()
2124 && (TYPE_UNSIGNED (type)
2125 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2126 | TREE_OVERFLOW (arg1));
2127
2128 return t;
2129 }
2130
2131 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2132 to another floating point type. */
2133
2134 static tree
2135 fold_convert_const_real_from_real (tree type, const_tree arg1)
2136 {
2137 REAL_VALUE_TYPE value;
2138 tree t;
2139
2140 /* Don't perform the operation if flag_signaling_nans is on
2141 and the operand is a signaling NaN. */
2142 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2143 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2144 return NULL_TREE;
2145
2146 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2147 t = build_real (type, value);
2148
2149 /* If converting an infinity or NAN to a representation that doesn't
2150 have one, set the overflow bit so that we can produce some kind of
2151 error message at the appropriate point if necessary. It's not the
2152 most user-friendly message, but it's better than nothing. */
2153 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2154 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2155 TREE_OVERFLOW (t) = 1;
2156 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2157 && !MODE_HAS_NANS (TYPE_MODE (type)))
2158 TREE_OVERFLOW (t) = 1;
2159 /* Regular overflow, conversion produced an infinity in a mode that
2160 can't represent them. */
2161 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2162 && REAL_VALUE_ISINF (value)
2163 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2164 TREE_OVERFLOW (t) = 1;
2165 else
2166 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2167 return t;
2168 }
2169
2170 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2171 to a floating point type. */
2172
2173 static tree
2174 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2175 {
2176 REAL_VALUE_TYPE value;
2177 tree t;
2178
2179 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2180 &TREE_FIXED_CST (arg1));
2181 t = build_real (type, value);
2182
2183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2184 return t;
2185 }
2186
2187 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2188 to another fixed-point type. */
2189
2190 static tree
2191 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2192 {
2193 FIXED_VALUE_TYPE value;
2194 tree t;
2195 bool overflow_p;
2196
2197 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2198 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2199 t = build_fixed (type, value);
2200
2201 /* Propagate overflow flags. */
2202 if (overflow_p | TREE_OVERFLOW (arg1))
2203 TREE_OVERFLOW (t) = 1;
2204 return t;
2205 }
2206
2207 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2208 to a fixed-point type. */
2209
2210 static tree
2211 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2212 {
2213 FIXED_VALUE_TYPE value;
2214 tree t;
2215 bool overflow_p;
2216 double_int di;
2217
2218 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2219
2220 di.low = TREE_INT_CST_ELT (arg1, 0);
2221 if (TREE_INT_CST_NUNITS (arg1) == 1)
2222 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2223 else
2224 di.high = TREE_INT_CST_ELT (arg1, 1);
2225
2226 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2227 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2228 TYPE_SATURATING (type));
2229 t = build_fixed (type, value);
2230
2231 /* Propagate overflow flags. */
2232 if (overflow_p | TREE_OVERFLOW (arg1))
2233 TREE_OVERFLOW (t) = 1;
2234 return t;
2235 }
2236
2237 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2238 to a fixed-point type. */
2239
2240 static tree
2241 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2242 {
2243 FIXED_VALUE_TYPE value;
2244 tree t;
2245 bool overflow_p;
2246
2247 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2248 &TREE_REAL_CST (arg1),
2249 TYPE_SATURATING (type));
2250 t = build_fixed (type, value);
2251
2252 /* Propagate overflow flags. */
2253 if (overflow_p | TREE_OVERFLOW (arg1))
2254 TREE_OVERFLOW (t) = 1;
2255 return t;
2256 }
2257
2258 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2259 type TYPE. If no simplification can be done return NULL_TREE. */
2260
2261 static tree
2262 fold_convert_const (enum tree_code code, tree type, tree arg1)
2263 {
2264 tree arg_type = TREE_TYPE (arg1);
2265 if (arg_type == type)
2266 return arg1;
2267
2268 /* We can't widen types, since the runtime value could overflow the
2269 original type before being extended to the new type. */
2270 if (POLY_INT_CST_P (arg1)
2271 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2272 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2273 return build_poly_int_cst (type,
2274 poly_wide_int::from (poly_int_cst_value (arg1),
2275 TYPE_PRECISION (type),
2276 TYPE_SIGN (arg_type)));
2277
2278 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2279 || TREE_CODE (type) == OFFSET_TYPE)
2280 {
2281 if (TREE_CODE (arg1) == INTEGER_CST)
2282 return fold_convert_const_int_from_int (type, arg1);
2283 else if (TREE_CODE (arg1) == REAL_CST)
2284 return fold_convert_const_int_from_real (code, type, arg1);
2285 else if (TREE_CODE (arg1) == FIXED_CST)
2286 return fold_convert_const_int_from_fixed (type, arg1);
2287 }
2288 else if (TREE_CODE (type) == REAL_TYPE)
2289 {
2290 if (TREE_CODE (arg1) == INTEGER_CST)
2291 return build_real_from_int_cst (type, arg1);
2292 else if (TREE_CODE (arg1) == REAL_CST)
2293 return fold_convert_const_real_from_real (type, arg1);
2294 else if (TREE_CODE (arg1) == FIXED_CST)
2295 return fold_convert_const_real_from_fixed (type, arg1);
2296 }
2297 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2298 {
2299 if (TREE_CODE (arg1) == FIXED_CST)
2300 return fold_convert_const_fixed_from_fixed (type, arg1);
2301 else if (TREE_CODE (arg1) == INTEGER_CST)
2302 return fold_convert_const_fixed_from_int (type, arg1);
2303 else if (TREE_CODE (arg1) == REAL_CST)
2304 return fold_convert_const_fixed_from_real (type, arg1);
2305 }
2306 else if (TREE_CODE (type) == VECTOR_TYPE)
2307 {
2308 if (TREE_CODE (arg1) == VECTOR_CST
2309 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2310 {
2311 tree elttype = TREE_TYPE (type);
2312 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2313 /* We can't handle steps directly when extending, since the
2314 values need to wrap at the original precision first. */
2315 bool step_ok_p
2316 = (INTEGRAL_TYPE_P (elttype)
2317 && INTEGRAL_TYPE_P (arg1_elttype)
2318 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2319 tree_vector_builder v;
2320 if (!v.new_unary_operation (type, arg1, step_ok_p))
2321 return NULL_TREE;
2322 unsigned int len = v.encoded_nelts ();
2323 for (unsigned int i = 0; i < len; ++i)
2324 {
2325 tree elt = VECTOR_CST_ELT (arg1, i);
2326 tree cvt = fold_convert_const (code, elttype, elt);
2327 if (cvt == NULL_TREE)
2328 return NULL_TREE;
2329 v.quick_push (cvt);
2330 }
2331 return v.build ();
2332 }
2333 }
2334 return NULL_TREE;
2335 }
2336
2337 /* Construct a vector of zero elements of vector type TYPE. */
2338
2339 static tree
2340 build_zero_vector (tree type)
2341 {
2342 tree t;
2343
2344 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2345 return build_vector_from_val (type, t);
2346 }
2347
2348 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2349
2350 bool
2351 fold_convertible_p (const_tree type, const_tree arg)
2352 {
2353 tree orig = TREE_TYPE (arg);
2354
2355 if (type == orig)
2356 return true;
2357
2358 if (TREE_CODE (arg) == ERROR_MARK
2359 || TREE_CODE (type) == ERROR_MARK
2360 || TREE_CODE (orig) == ERROR_MARK)
2361 return false;
2362
2363 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2364 return true;
2365
2366 switch (TREE_CODE (type))
2367 {
2368 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2369 case POINTER_TYPE: case REFERENCE_TYPE:
2370 case OFFSET_TYPE:
2371 return (INTEGRAL_TYPE_P (orig)
2372 || (POINTER_TYPE_P (orig)
2373 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2374 || TREE_CODE (orig) == OFFSET_TYPE);
2375
2376 case REAL_TYPE:
2377 case FIXED_POINT_TYPE:
2378 case VOID_TYPE:
2379 return TREE_CODE (type) == TREE_CODE (orig);
2380
2381 case VECTOR_TYPE:
2382 return (VECTOR_TYPE_P (orig)
2383 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2384 TYPE_VECTOR_SUBPARTS (orig))
2385 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2386
2387 default:
2388 return false;
2389 }
2390 }
2391
2392 /* Convert expression ARG to type TYPE. Used by the middle-end for
2393 simple conversions in preference to calling the front-end's convert. */
2394
2395 tree
2396 fold_convert_loc (location_t loc, tree type, tree arg)
2397 {
2398 tree orig = TREE_TYPE (arg);
2399 tree tem;
2400
2401 if (type == orig)
2402 return arg;
2403
2404 if (TREE_CODE (arg) == ERROR_MARK
2405 || TREE_CODE (type) == ERROR_MARK
2406 || TREE_CODE (orig) == ERROR_MARK)
2407 return error_mark_node;
2408
2409 switch (TREE_CODE (type))
2410 {
2411 case POINTER_TYPE:
2412 case REFERENCE_TYPE:
2413 /* Handle conversions between pointers to different address spaces. */
2414 if (POINTER_TYPE_P (orig)
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2417 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2418 /* fall through */
2419
2420 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2421 case OFFSET_TYPE:
2422 if (TREE_CODE (arg) == INTEGER_CST)
2423 {
2424 tem = fold_convert_const (NOP_EXPR, type, arg);
2425 if (tem != NULL_TREE)
2426 return tem;
2427 }
2428 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2429 || TREE_CODE (orig) == OFFSET_TYPE)
2430 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2431 if (TREE_CODE (orig) == COMPLEX_TYPE)
2432 return fold_convert_loc (loc, type,
2433 fold_build1_loc (loc, REALPART_EXPR,
2434 TREE_TYPE (orig), arg));
2435 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2436 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2437 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2438
2439 case REAL_TYPE:
2440 if (TREE_CODE (arg) == INTEGER_CST)
2441 {
2442 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2443 if (tem != NULL_TREE)
2444 return tem;
2445 }
2446 else if (TREE_CODE (arg) == REAL_CST)
2447 {
2448 tem = fold_convert_const (NOP_EXPR, type, arg);
2449 if (tem != NULL_TREE)
2450 return tem;
2451 }
2452 else if (TREE_CODE (arg) == FIXED_CST)
2453 {
2454 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2455 if (tem != NULL_TREE)
2456 return tem;
2457 }
2458
2459 switch (TREE_CODE (orig))
2460 {
2461 case INTEGER_TYPE:
2462 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2463 case POINTER_TYPE: case REFERENCE_TYPE:
2464 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2465
2466 case REAL_TYPE:
2467 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2468
2469 case FIXED_POINT_TYPE:
2470 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2471
2472 case COMPLEX_TYPE:
2473 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2474 return fold_convert_loc (loc, type, tem);
2475
2476 default:
2477 gcc_unreachable ();
2478 }
2479
2480 case FIXED_POINT_TYPE:
2481 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2482 || TREE_CODE (arg) == REAL_CST)
2483 {
2484 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2485 if (tem != NULL_TREE)
2486 goto fold_convert_exit;
2487 }
2488
2489 switch (TREE_CODE (orig))
2490 {
2491 case FIXED_POINT_TYPE:
2492 case INTEGER_TYPE:
2493 case ENUMERAL_TYPE:
2494 case BOOLEAN_TYPE:
2495 case REAL_TYPE:
2496 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2497
2498 case COMPLEX_TYPE:
2499 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2500 return fold_convert_loc (loc, type, tem);
2501
2502 default:
2503 gcc_unreachable ();
2504 }
2505
2506 case COMPLEX_TYPE:
2507 switch (TREE_CODE (orig))
2508 {
2509 case INTEGER_TYPE:
2510 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2512 case REAL_TYPE:
2513 case FIXED_POINT_TYPE:
2514 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2515 fold_convert_loc (loc, TREE_TYPE (type), arg),
2516 fold_convert_loc (loc, TREE_TYPE (type),
2517 integer_zero_node));
2518 case COMPLEX_TYPE:
2519 {
2520 tree rpart, ipart;
2521
2522 if (TREE_CODE (arg) == COMPLEX_EXPR)
2523 {
2524 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2525 TREE_OPERAND (arg, 0));
2526 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2527 TREE_OPERAND (arg, 1));
2528 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2529 }
2530
2531 arg = save_expr (arg);
2532 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2533 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2534 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2535 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2536 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2537 }
2538
2539 default:
2540 gcc_unreachable ();
2541 }
2542
2543 case VECTOR_TYPE:
2544 if (integer_zerop (arg))
2545 return build_zero_vector (type);
2546 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2547 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2548 || TREE_CODE (orig) == VECTOR_TYPE);
2549 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2550
2551 case VOID_TYPE:
2552 tem = fold_ignored_result (arg);
2553 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2554
2555 default:
2556 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2557 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2558 gcc_unreachable ();
2559 }
2560 fold_convert_exit:
2561 protected_set_expr_location_unshare (tem, loc);
2562 return tem;
2563 }
2564 \f
2565 /* Return false if expr can be assumed not to be an lvalue, true
2566 otherwise. */
2567
2568 static bool
2569 maybe_lvalue_p (const_tree x)
2570 {
2571 /* We only need to wrap lvalue tree codes. */
2572 switch (TREE_CODE (x))
2573 {
2574 case VAR_DECL:
2575 case PARM_DECL:
2576 case RESULT_DECL:
2577 case LABEL_DECL:
2578 case FUNCTION_DECL:
2579 case SSA_NAME:
2580
2581 case COMPONENT_REF:
2582 case MEM_REF:
2583 case INDIRECT_REF:
2584 case ARRAY_REF:
2585 case ARRAY_RANGE_REF:
2586 case BIT_FIELD_REF:
2587 case OBJ_TYPE_REF:
2588
2589 case REALPART_EXPR:
2590 case IMAGPART_EXPR:
2591 case PREINCREMENT_EXPR:
2592 case PREDECREMENT_EXPR:
2593 case SAVE_EXPR:
2594 case TRY_CATCH_EXPR:
2595 case WITH_CLEANUP_EXPR:
2596 case COMPOUND_EXPR:
2597 case MODIFY_EXPR:
2598 case TARGET_EXPR:
2599 case COND_EXPR:
2600 case BIND_EXPR:
2601 case VIEW_CONVERT_EXPR:
2602 break;
2603
2604 default:
2605 /* Assume the worst for front-end tree codes. */
2606 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2607 break;
2608 return false;
2609 }
2610
2611 return true;
2612 }
2613
2614 /* Return an expr equal to X but certainly not valid as an lvalue. */
2615
2616 tree
2617 non_lvalue_loc (location_t loc, tree x)
2618 {
2619 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2620 us. */
2621 if (in_gimple_form)
2622 return x;
2623
2624 if (! maybe_lvalue_p (x))
2625 return x;
2626 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2627 }
2628
2629 /* When pedantic, return an expr equal to X but certainly not valid as a
2630 pedantic lvalue. Otherwise, return X. */
2631
2632 static tree
2633 pedantic_non_lvalue_loc (location_t loc, tree x)
2634 {
2635 return protected_set_expr_location_unshare (x, loc);
2636 }
2637 \f
2638 /* Given a tree comparison code, return the code that is the logical inverse.
2639 It is generally not safe to do this for floating-point comparisons, except
2640 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2641 ERROR_MARK in this case. */
2642
2643 enum tree_code
2644 invert_tree_comparison (enum tree_code code, bool honor_nans)
2645 {
2646 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2647 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2648 return ERROR_MARK;
2649
2650 switch (code)
2651 {
2652 case EQ_EXPR:
2653 return NE_EXPR;
2654 case NE_EXPR:
2655 return EQ_EXPR;
2656 case GT_EXPR:
2657 return honor_nans ? UNLE_EXPR : LE_EXPR;
2658 case GE_EXPR:
2659 return honor_nans ? UNLT_EXPR : LT_EXPR;
2660 case LT_EXPR:
2661 return honor_nans ? UNGE_EXPR : GE_EXPR;
2662 case LE_EXPR:
2663 return honor_nans ? UNGT_EXPR : GT_EXPR;
2664 case LTGT_EXPR:
2665 return UNEQ_EXPR;
2666 case UNEQ_EXPR:
2667 return LTGT_EXPR;
2668 case UNGT_EXPR:
2669 return LE_EXPR;
2670 case UNGE_EXPR:
2671 return LT_EXPR;
2672 case UNLT_EXPR:
2673 return GE_EXPR;
2674 case UNLE_EXPR:
2675 return GT_EXPR;
2676 case ORDERED_EXPR:
2677 return UNORDERED_EXPR;
2678 case UNORDERED_EXPR:
2679 return ORDERED_EXPR;
2680 default:
2681 gcc_unreachable ();
2682 }
2683 }
2684
2685 /* Similar, but return the comparison that results if the operands are
2686 swapped. This is safe for floating-point. */
2687
2688 enum tree_code
2689 swap_tree_comparison (enum tree_code code)
2690 {
2691 switch (code)
2692 {
2693 case EQ_EXPR:
2694 case NE_EXPR:
2695 case ORDERED_EXPR:
2696 case UNORDERED_EXPR:
2697 case LTGT_EXPR:
2698 case UNEQ_EXPR:
2699 return code;
2700 case GT_EXPR:
2701 return LT_EXPR;
2702 case GE_EXPR:
2703 return LE_EXPR;
2704 case LT_EXPR:
2705 return GT_EXPR;
2706 case LE_EXPR:
2707 return GE_EXPR;
2708 case UNGT_EXPR:
2709 return UNLT_EXPR;
2710 case UNGE_EXPR:
2711 return UNLE_EXPR;
2712 case UNLT_EXPR:
2713 return UNGT_EXPR;
2714 case UNLE_EXPR:
2715 return UNGE_EXPR;
2716 default:
2717 gcc_unreachable ();
2718 }
2719 }
2720
2721
2722 /* Convert a comparison tree code from an enum tree_code representation
2723 into a compcode bit-based encoding. This function is the inverse of
2724 compcode_to_comparison. */
2725
2726 static enum comparison_code
2727 comparison_to_compcode (enum tree_code code)
2728 {
2729 switch (code)
2730 {
2731 case LT_EXPR:
2732 return COMPCODE_LT;
2733 case EQ_EXPR:
2734 return COMPCODE_EQ;
2735 case LE_EXPR:
2736 return COMPCODE_LE;
2737 case GT_EXPR:
2738 return COMPCODE_GT;
2739 case NE_EXPR:
2740 return COMPCODE_NE;
2741 case GE_EXPR:
2742 return COMPCODE_GE;
2743 case ORDERED_EXPR:
2744 return COMPCODE_ORD;
2745 case UNORDERED_EXPR:
2746 return COMPCODE_UNORD;
2747 case UNLT_EXPR:
2748 return COMPCODE_UNLT;
2749 case UNEQ_EXPR:
2750 return COMPCODE_UNEQ;
2751 case UNLE_EXPR:
2752 return COMPCODE_UNLE;
2753 case UNGT_EXPR:
2754 return COMPCODE_UNGT;
2755 case LTGT_EXPR:
2756 return COMPCODE_LTGT;
2757 case UNGE_EXPR:
2758 return COMPCODE_UNGE;
2759 default:
2760 gcc_unreachable ();
2761 }
2762 }
2763
2764 /* Convert a compcode bit-based encoding of a comparison operator back
2765 to GCC's enum tree_code representation. This function is the
2766 inverse of comparison_to_compcode. */
2767
2768 static enum tree_code
2769 compcode_to_comparison (enum comparison_code code)
2770 {
2771 switch (code)
2772 {
2773 case COMPCODE_LT:
2774 return LT_EXPR;
2775 case COMPCODE_EQ:
2776 return EQ_EXPR;
2777 case COMPCODE_LE:
2778 return LE_EXPR;
2779 case COMPCODE_GT:
2780 return GT_EXPR;
2781 case COMPCODE_NE:
2782 return NE_EXPR;
2783 case COMPCODE_GE:
2784 return GE_EXPR;
2785 case COMPCODE_ORD:
2786 return ORDERED_EXPR;
2787 case COMPCODE_UNORD:
2788 return UNORDERED_EXPR;
2789 case COMPCODE_UNLT:
2790 return UNLT_EXPR;
2791 case COMPCODE_UNEQ:
2792 return UNEQ_EXPR;
2793 case COMPCODE_UNLE:
2794 return UNLE_EXPR;
2795 case COMPCODE_UNGT:
2796 return UNGT_EXPR;
2797 case COMPCODE_LTGT:
2798 return LTGT_EXPR;
2799 case COMPCODE_UNGE:
2800 return UNGE_EXPR;
2801 default:
2802 gcc_unreachable ();
2803 }
2804 }
2805
2806 /* Return true if COND1 tests the opposite condition of COND2. */
2807
2808 bool
2809 inverse_conditions_p (const_tree cond1, const_tree cond2)
2810 {
2811 return (COMPARISON_CLASS_P (cond1)
2812 && COMPARISON_CLASS_P (cond2)
2813 && (invert_tree_comparison
2814 (TREE_CODE (cond1),
2815 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2816 && operand_equal_p (TREE_OPERAND (cond1, 0),
2817 TREE_OPERAND (cond2, 0), 0)
2818 && operand_equal_p (TREE_OPERAND (cond1, 1),
2819 TREE_OPERAND (cond2, 1), 0));
2820 }
2821
2822 /* Return a tree for the comparison which is the combination of
2823 doing the AND or OR (depending on CODE) of the two operations LCODE
2824 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2825 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2826 if this makes the transformation invalid. */
2827
2828 tree
2829 combine_comparisons (location_t loc,
2830 enum tree_code code, enum tree_code lcode,
2831 enum tree_code rcode, tree truth_type,
2832 tree ll_arg, tree lr_arg)
2833 {
2834 bool honor_nans = HONOR_NANS (ll_arg);
2835 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2836 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2837 int compcode;
2838
2839 switch (code)
2840 {
2841 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2842 compcode = lcompcode & rcompcode;
2843 break;
2844
2845 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2846 compcode = lcompcode | rcompcode;
2847 break;
2848
2849 default:
2850 return NULL_TREE;
2851 }
2852
2853 if (!honor_nans)
2854 {
2855 /* Eliminate unordered comparisons, as well as LTGT and ORD
2856 which are not used unless the mode has NaNs. */
2857 compcode &= ~COMPCODE_UNORD;
2858 if (compcode == COMPCODE_LTGT)
2859 compcode = COMPCODE_NE;
2860 else if (compcode == COMPCODE_ORD)
2861 compcode = COMPCODE_TRUE;
2862 }
2863 else if (flag_trapping_math)
2864 {
2865 /* Check that the original operation and the optimized ones will trap
2866 under the same condition. */
2867 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2868 && (lcompcode != COMPCODE_EQ)
2869 && (lcompcode != COMPCODE_ORD);
2870 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2871 && (rcompcode != COMPCODE_EQ)
2872 && (rcompcode != COMPCODE_ORD);
2873 bool trap = (compcode & COMPCODE_UNORD) == 0
2874 && (compcode != COMPCODE_EQ)
2875 && (compcode != COMPCODE_ORD);
2876
2877 /* In a short-circuited boolean expression the LHS might be
2878 such that the RHS, if evaluated, will never trap. For
2879 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2880 if neither x nor y is NaN. (This is a mixed blessing: for
2881 example, the expression above will never trap, hence
2882 optimizing it to x < y would be invalid). */
2883 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2884 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2885 rtrap = false;
2886
2887 /* If the comparison was short-circuited, and only the RHS
2888 trapped, we may now generate a spurious trap. */
2889 if (rtrap && !ltrap
2890 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2891 return NULL_TREE;
2892
2893 /* If we changed the conditions that cause a trap, we lose. */
2894 if ((ltrap || rtrap) != trap)
2895 return NULL_TREE;
2896 }
2897
2898 if (compcode == COMPCODE_TRUE)
2899 return constant_boolean_node (true, truth_type);
2900 else if (compcode == COMPCODE_FALSE)
2901 return constant_boolean_node (false, truth_type);
2902 else
2903 {
2904 enum tree_code tcode;
2905
2906 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2907 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2908 }
2909 }
2910 \f
2911 /* Return nonzero if two operands (typically of the same tree node)
2912 are necessarily equal. FLAGS modifies behavior as follows:
2913
2914 If OEP_ONLY_CONST is set, only return nonzero for constants.
2915 This function tests whether the operands are indistinguishable;
2916 it does not test whether they are equal using C's == operation.
2917 The distinction is important for IEEE floating point, because
2918 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2919 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2920
2921 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2922 even though it may hold multiple values during a function.
2923 This is because a GCC tree node guarantees that nothing else is
2924 executed between the evaluation of its "operands" (which may often
2925 be evaluated in arbitrary order). Hence if the operands themselves
2926 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2927 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2928 unset means assuming isochronic (or instantaneous) tree equivalence.
2929 Unless comparing arbitrary expression trees, such as from different
2930 statements, this flag can usually be left unset.
2931
2932 If OEP_PURE_SAME is set, then pure functions with identical arguments
2933 are considered the same. It is used when the caller has other ways
2934 to ensure that global memory is unchanged in between.
2935
2936 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2937 not values of expressions.
2938
2939 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2940 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2941
2942 If OEP_BITWISE is set, then require the values to be bitwise identical
2943 rather than simply numerically equal. Do not take advantage of things
2944 like math-related flags or undefined behavior; only return true for
2945 values that are provably bitwise identical in all circumstances.
2946
2947 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2948 any operand with side effect. This is unnecesarily conservative in the
2949 case we know that arg0 and arg1 are in disjoint code paths (such as in
2950 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2951 addresses with TREE_CONSTANT flag set so we know that &var == &var
2952 even if var is volatile. */
2953
2954 bool
2955 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2956 unsigned int flags)
2957 {
2958 bool r;
2959 if (verify_hash_value (arg0, arg1, flags, &r))
2960 return r;
2961
2962 STRIP_ANY_LOCATION_WRAPPER (arg0);
2963 STRIP_ANY_LOCATION_WRAPPER (arg1);
2964
2965 /* If either is ERROR_MARK, they aren't equal. */
2966 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2967 || TREE_TYPE (arg0) == error_mark_node
2968 || TREE_TYPE (arg1) == error_mark_node)
2969 return false;
2970
2971 /* Similar, if either does not have a type (like a template id),
2972 they aren't equal. */
2973 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2974 return false;
2975
2976 /* Bitwise identity makes no sense if the values have different layouts. */
2977 if ((flags & OEP_BITWISE)
2978 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2979 return false;
2980
2981 /* We cannot consider pointers to different address space equal. */
2982 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2983 && POINTER_TYPE_P (TREE_TYPE (arg1))
2984 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2985 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2986 return false;
2987
2988 /* Check equality of integer constants before bailing out due to
2989 precision differences. */
2990 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2991 {
2992 /* Address of INTEGER_CST is not defined; check that we did not forget
2993 to drop the OEP_ADDRESS_OF flags. */
2994 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2995 return tree_int_cst_equal (arg0, arg1);
2996 }
2997
2998 if (!(flags & OEP_ADDRESS_OF))
2999 {
3000 /* If both types don't have the same signedness, then we can't consider
3001 them equal. We must check this before the STRIP_NOPS calls
3002 because they may change the signedness of the arguments. As pointers
3003 strictly don't have a signedness, require either two pointers or
3004 two non-pointers as well. */
3005 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3006 || POINTER_TYPE_P (TREE_TYPE (arg0))
3007 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3008 return false;
3009
3010 /* If both types don't have the same precision, then it is not safe
3011 to strip NOPs. */
3012 if (element_precision (TREE_TYPE (arg0))
3013 != element_precision (TREE_TYPE (arg1)))
3014 return false;
3015
3016 STRIP_NOPS (arg0);
3017 STRIP_NOPS (arg1);
3018 }
3019 #if 0
3020 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3021 sanity check once the issue is solved. */
3022 else
3023 /* Addresses of conversions and SSA_NAMEs (and many other things)
3024 are not defined. Check that we did not forget to drop the
3025 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3026 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3027 && TREE_CODE (arg0) != SSA_NAME);
3028 #endif
3029
3030 /* In case both args are comparisons but with different comparison
3031 code, try to swap the comparison operands of one arg to produce
3032 a match and compare that variant. */
3033 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3034 && COMPARISON_CLASS_P (arg0)
3035 && COMPARISON_CLASS_P (arg1))
3036 {
3037 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3038
3039 if (TREE_CODE (arg0) == swap_code)
3040 return operand_equal_p (TREE_OPERAND (arg0, 0),
3041 TREE_OPERAND (arg1, 1), flags)
3042 && operand_equal_p (TREE_OPERAND (arg0, 1),
3043 TREE_OPERAND (arg1, 0), flags);
3044 }
3045
3046 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3047 {
3048 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3049 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3050 ;
3051 else if (flags & OEP_ADDRESS_OF)
3052 {
3053 /* If we are interested in comparing addresses ignore
3054 MEM_REF wrappings of the base that can appear just for
3055 TBAA reasons. */
3056 if (TREE_CODE (arg0) == MEM_REF
3057 && DECL_P (arg1)
3058 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3059 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3060 && integer_zerop (TREE_OPERAND (arg0, 1)))
3061 return true;
3062 else if (TREE_CODE (arg1) == MEM_REF
3063 && DECL_P (arg0)
3064 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3065 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3066 && integer_zerop (TREE_OPERAND (arg1, 1)))
3067 return true;
3068 return false;
3069 }
3070 else
3071 return false;
3072 }
3073
3074 /* When not checking adddresses, this is needed for conversions and for
3075 COMPONENT_REF. Might as well play it safe and always test this. */
3076 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3077 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3078 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3079 && !(flags & OEP_ADDRESS_OF)))
3080 return false;
3081
3082 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3083 We don't care about side effects in that case because the SAVE_EXPR
3084 takes care of that for us. In all other cases, two expressions are
3085 equal if they have no side effects. If we have two identical
3086 expressions with side effects that should be treated the same due
3087 to the only side effects being identical SAVE_EXPR's, that will
3088 be detected in the recursive calls below.
3089 If we are taking an invariant address of two identical objects
3090 they are necessarily equal as well. */
3091 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3092 && (TREE_CODE (arg0) == SAVE_EXPR
3093 || (flags & OEP_MATCH_SIDE_EFFECTS)
3094 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3095 return true;
3096
3097 /* Next handle constant cases, those for which we can return 1 even
3098 if ONLY_CONST is set. */
3099 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3100 switch (TREE_CODE (arg0))
3101 {
3102 case INTEGER_CST:
3103 return tree_int_cst_equal (arg0, arg1);
3104
3105 case FIXED_CST:
3106 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3107 TREE_FIXED_CST (arg1));
3108
3109 case REAL_CST:
3110 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3111 return true;
3112
3113 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3114 {
3115 /* If we do not distinguish between signed and unsigned zero,
3116 consider them equal. */
3117 if (real_zerop (arg0) && real_zerop (arg1))
3118 return true;
3119 }
3120 return false;
3121
3122 case VECTOR_CST:
3123 {
3124 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3125 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3126 return false;
3127
3128 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3129 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3130 return false;
3131
3132 unsigned int count = vector_cst_encoded_nelts (arg0);
3133 for (unsigned int i = 0; i < count; ++i)
3134 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3135 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3136 return false;
3137 return true;
3138 }
3139
3140 case COMPLEX_CST:
3141 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3142 flags)
3143 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3144 flags));
3145
3146 case STRING_CST:
3147 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3148 && ! memcmp (TREE_STRING_POINTER (arg0),
3149 TREE_STRING_POINTER (arg1),
3150 TREE_STRING_LENGTH (arg0)));
3151
3152 case ADDR_EXPR:
3153 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3154 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3155 flags | OEP_ADDRESS_OF
3156 | OEP_MATCH_SIDE_EFFECTS);
3157 case CONSTRUCTOR:
3158 /* In GIMPLE empty constructors are allowed in initializers of
3159 aggregates. */
3160 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3161 default:
3162 break;
3163 }
3164
3165 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3166 two instances of undefined behavior will give identical results. */
3167 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3168 return false;
3169
3170 /* Define macros to test an operand from arg0 and arg1 for equality and a
3171 variant that allows null and views null as being different from any
3172 non-null value. In the latter case, if either is null, the both
3173 must be; otherwise, do the normal comparison. */
3174 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3175 TREE_OPERAND (arg1, N), flags)
3176
3177 #define OP_SAME_WITH_NULL(N) \
3178 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3179 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3180
3181 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3182 {
3183 case tcc_unary:
3184 /* Two conversions are equal only if signedness and modes match. */
3185 switch (TREE_CODE (arg0))
3186 {
3187 CASE_CONVERT:
3188 case FIX_TRUNC_EXPR:
3189 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3190 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3191 return false;
3192 break;
3193 default:
3194 break;
3195 }
3196
3197 return OP_SAME (0);
3198
3199
3200 case tcc_comparison:
3201 case tcc_binary:
3202 if (OP_SAME (0) && OP_SAME (1))
3203 return true;
3204
3205 /* For commutative ops, allow the other order. */
3206 return (commutative_tree_code (TREE_CODE (arg0))
3207 && operand_equal_p (TREE_OPERAND (arg0, 0),
3208 TREE_OPERAND (arg1, 1), flags)
3209 && operand_equal_p (TREE_OPERAND (arg0, 1),
3210 TREE_OPERAND (arg1, 0), flags));
3211
3212 case tcc_reference:
3213 /* If either of the pointer (or reference) expressions we are
3214 dereferencing contain a side effect, these cannot be equal,
3215 but their addresses can be. */
3216 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3217 && (TREE_SIDE_EFFECTS (arg0)
3218 || TREE_SIDE_EFFECTS (arg1)))
3219 return false;
3220
3221 switch (TREE_CODE (arg0))
3222 {
3223 case INDIRECT_REF:
3224 if (!(flags & OEP_ADDRESS_OF))
3225 {
3226 if (TYPE_ALIGN (TREE_TYPE (arg0))
3227 != TYPE_ALIGN (TREE_TYPE (arg1)))
3228 return false;
3229 /* Verify that the access types are compatible. */
3230 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3231 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3232 return false;
3233 }
3234 flags &= ~OEP_ADDRESS_OF;
3235 return OP_SAME (0);
3236
3237 case IMAGPART_EXPR:
3238 /* Require the same offset. */
3239 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3240 TYPE_SIZE (TREE_TYPE (arg1)),
3241 flags & ~OEP_ADDRESS_OF))
3242 return false;
3243
3244 /* Fallthru. */
3245 case REALPART_EXPR:
3246 case VIEW_CONVERT_EXPR:
3247 return OP_SAME (0);
3248
3249 case TARGET_MEM_REF:
3250 case MEM_REF:
3251 if (!(flags & OEP_ADDRESS_OF))
3252 {
3253 /* Require equal access sizes */
3254 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3255 && (!TYPE_SIZE (TREE_TYPE (arg0))
3256 || !TYPE_SIZE (TREE_TYPE (arg1))
3257 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3258 TYPE_SIZE (TREE_TYPE (arg1)),
3259 flags)))
3260 return false;
3261 /* Verify that access happens in similar types. */
3262 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3263 return false;
3264 /* Verify that accesses are TBAA compatible. */
3265 if (!alias_ptr_types_compatible_p
3266 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3267 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3268 || (MR_DEPENDENCE_CLIQUE (arg0)
3269 != MR_DEPENDENCE_CLIQUE (arg1))
3270 || (MR_DEPENDENCE_BASE (arg0)
3271 != MR_DEPENDENCE_BASE (arg1)))
3272 return false;
3273 /* Verify that alignment is compatible. */
3274 if (TYPE_ALIGN (TREE_TYPE (arg0))
3275 != TYPE_ALIGN (TREE_TYPE (arg1)))
3276 return false;
3277 }
3278 flags &= ~OEP_ADDRESS_OF;
3279 return (OP_SAME (0) && OP_SAME (1)
3280 /* TARGET_MEM_REF require equal extra operands. */
3281 && (TREE_CODE (arg0) != TARGET_MEM_REF
3282 || (OP_SAME_WITH_NULL (2)
3283 && OP_SAME_WITH_NULL (3)
3284 && OP_SAME_WITH_NULL (4))));
3285
3286 case ARRAY_REF:
3287 case ARRAY_RANGE_REF:
3288 if (!OP_SAME (0))
3289 return false;
3290 flags &= ~OEP_ADDRESS_OF;
3291 /* Compare the array index by value if it is constant first as we
3292 may have different types but same value here. */
3293 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3294 TREE_OPERAND (arg1, 1))
3295 || OP_SAME (1))
3296 && OP_SAME_WITH_NULL (2)
3297 && OP_SAME_WITH_NULL (3)
3298 /* Compare low bound and element size as with OEP_ADDRESS_OF
3299 we have to account for the offset of the ref. */
3300 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3301 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3302 || (operand_equal_p (array_ref_low_bound
3303 (CONST_CAST_TREE (arg0)),
3304 array_ref_low_bound
3305 (CONST_CAST_TREE (arg1)), flags)
3306 && operand_equal_p (array_ref_element_size
3307 (CONST_CAST_TREE (arg0)),
3308 array_ref_element_size
3309 (CONST_CAST_TREE (arg1)),
3310 flags))));
3311
3312 case COMPONENT_REF:
3313 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3314 may be NULL when we're called to compare MEM_EXPRs. */
3315 if (!OP_SAME_WITH_NULL (0)
3316 || !OP_SAME (1))
3317 return false;
3318 flags &= ~OEP_ADDRESS_OF;
3319 return OP_SAME_WITH_NULL (2);
3320
3321 case BIT_FIELD_REF:
3322 if (!OP_SAME (0))
3323 return false;
3324 flags &= ~OEP_ADDRESS_OF;
3325 return OP_SAME (1) && OP_SAME (2);
3326
3327 /* Virtual table call. */
3328 case OBJ_TYPE_REF:
3329 {
3330 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3331 OBJ_TYPE_REF_EXPR (arg1), flags))
3332 return false;
3333 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3334 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3335 return false;
3336 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3337 OBJ_TYPE_REF_OBJECT (arg1), flags))
3338 return false;
3339 if (!types_same_for_odr (obj_type_ref_class (arg0),
3340 obj_type_ref_class (arg1)))
3341 return false;
3342 return true;
3343 }
3344
3345 default:
3346 return false;
3347 }
3348
3349 case tcc_expression:
3350 switch (TREE_CODE (arg0))
3351 {
3352 case ADDR_EXPR:
3353 /* Be sure we pass right ADDRESS_OF flag. */
3354 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3355 return operand_equal_p (TREE_OPERAND (arg0, 0),
3356 TREE_OPERAND (arg1, 0),
3357 flags | OEP_ADDRESS_OF);
3358
3359 case TRUTH_NOT_EXPR:
3360 return OP_SAME (0);
3361
3362 case TRUTH_ANDIF_EXPR:
3363 case TRUTH_ORIF_EXPR:
3364 return OP_SAME (0) && OP_SAME (1);
3365
3366 case WIDEN_MULT_PLUS_EXPR:
3367 case WIDEN_MULT_MINUS_EXPR:
3368 if (!OP_SAME (2))
3369 return false;
3370 /* The multiplcation operands are commutative. */
3371 /* FALLTHRU */
3372
3373 case TRUTH_AND_EXPR:
3374 case TRUTH_OR_EXPR:
3375 case TRUTH_XOR_EXPR:
3376 if (OP_SAME (0) && OP_SAME (1))
3377 return true;
3378
3379 /* Otherwise take into account this is a commutative operation. */
3380 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3381 TREE_OPERAND (arg1, 1), flags)
3382 && operand_equal_p (TREE_OPERAND (arg0, 1),
3383 TREE_OPERAND (arg1, 0), flags));
3384
3385 case COND_EXPR:
3386 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3387 return false;
3388 flags &= ~OEP_ADDRESS_OF;
3389 return OP_SAME (0);
3390
3391 case BIT_INSERT_EXPR:
3392 /* BIT_INSERT_EXPR has an implict operand as the type precision
3393 of op1. Need to check to make sure they are the same. */
3394 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3395 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3396 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3397 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3398 return false;
3399 /* FALLTHRU */
3400
3401 case VEC_COND_EXPR:
3402 case DOT_PROD_EXPR:
3403 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3404
3405 case MODIFY_EXPR:
3406 case INIT_EXPR:
3407 case COMPOUND_EXPR:
3408 case PREDECREMENT_EXPR:
3409 case PREINCREMENT_EXPR:
3410 case POSTDECREMENT_EXPR:
3411 case POSTINCREMENT_EXPR:
3412 if (flags & OEP_LEXICOGRAPHIC)
3413 return OP_SAME (0) && OP_SAME (1);
3414 return false;
3415
3416 case CLEANUP_POINT_EXPR:
3417 case EXPR_STMT:
3418 case SAVE_EXPR:
3419 if (flags & OEP_LEXICOGRAPHIC)
3420 return OP_SAME (0);
3421 return false;
3422
3423 default:
3424 return false;
3425 }
3426
3427 case tcc_vl_exp:
3428 switch (TREE_CODE (arg0))
3429 {
3430 case CALL_EXPR:
3431 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3432 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3433 /* If not both CALL_EXPRs are either internal or normal function
3434 functions, then they are not equal. */
3435 return false;
3436 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3437 {
3438 /* If the CALL_EXPRs call different internal functions, then they
3439 are not equal. */
3440 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3441 return false;
3442 }
3443 else
3444 {
3445 /* If the CALL_EXPRs call different functions, then they are not
3446 equal. */
3447 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3448 flags))
3449 return false;
3450 }
3451
3452 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3453 {
3454 unsigned int cef = call_expr_flags (arg0);
3455 if (flags & OEP_PURE_SAME)
3456 cef &= ECF_CONST | ECF_PURE;
3457 else
3458 cef &= ECF_CONST;
3459 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3460 return false;
3461 }
3462
3463 /* Now see if all the arguments are the same. */
3464 {
3465 const_call_expr_arg_iterator iter0, iter1;
3466 const_tree a0, a1;
3467 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3468 a1 = first_const_call_expr_arg (arg1, &iter1);
3469 a0 && a1;
3470 a0 = next_const_call_expr_arg (&iter0),
3471 a1 = next_const_call_expr_arg (&iter1))
3472 if (! operand_equal_p (a0, a1, flags))
3473 return false;
3474
3475 /* If we get here and both argument lists are exhausted
3476 then the CALL_EXPRs are equal. */
3477 return ! (a0 || a1);
3478 }
3479 default:
3480 return false;
3481 }
3482
3483 case tcc_declaration:
3484 /* Consider __builtin_sqrt equal to sqrt. */
3485 return (TREE_CODE (arg0) == FUNCTION_DECL
3486 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3487 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3488 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3489 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3490
3491 case tcc_exceptional:
3492 if (TREE_CODE (arg0) == CONSTRUCTOR)
3493 {
3494 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3495 return false;
3496
3497 /* In GIMPLE constructors are used only to build vectors from
3498 elements. Individual elements in the constructor must be
3499 indexed in increasing order and form an initial sequence.
3500
3501 We make no effort to compare constructors in generic.
3502 (see sem_variable::equals in ipa-icf which can do so for
3503 constants). */
3504 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3505 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3506 return false;
3507
3508 /* Be sure that vectors constructed have the same representation.
3509 We only tested element precision and modes to match.
3510 Vectors may be BLKmode and thus also check that the number of
3511 parts match. */
3512 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3513 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3514 return false;
3515
3516 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3517 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3518 unsigned int len = vec_safe_length (v0);
3519
3520 if (len != vec_safe_length (v1))
3521 return false;
3522
3523 for (unsigned int i = 0; i < len; i++)
3524 {
3525 constructor_elt *c0 = &(*v0)[i];
3526 constructor_elt *c1 = &(*v1)[i];
3527
3528 if (!operand_equal_p (c0->value, c1->value, flags)
3529 /* In GIMPLE the indexes can be either NULL or matching i.
3530 Double check this so we won't get false
3531 positives for GENERIC. */
3532 || (c0->index
3533 && (TREE_CODE (c0->index) != INTEGER_CST
3534 || compare_tree_int (c0->index, i)))
3535 || (c1->index
3536 && (TREE_CODE (c1->index) != INTEGER_CST
3537 || compare_tree_int (c1->index, i))))
3538 return false;
3539 }
3540 return true;
3541 }
3542 else if (TREE_CODE (arg0) == STATEMENT_LIST
3543 && (flags & OEP_LEXICOGRAPHIC))
3544 {
3545 /* Compare the STATEMENT_LISTs. */
3546 tree_stmt_iterator tsi1, tsi2;
3547 tree body1 = CONST_CAST_TREE (arg0);
3548 tree body2 = CONST_CAST_TREE (arg1);
3549 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3550 tsi_next (&tsi1), tsi_next (&tsi2))
3551 {
3552 /* The lists don't have the same number of statements. */
3553 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3554 return false;
3555 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3556 return true;
3557 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3558 flags & (OEP_LEXICOGRAPHIC
3559 | OEP_NO_HASH_CHECK)))
3560 return false;
3561 }
3562 }
3563 return false;
3564
3565 case tcc_statement:
3566 switch (TREE_CODE (arg0))
3567 {
3568 case RETURN_EXPR:
3569 if (flags & OEP_LEXICOGRAPHIC)
3570 return OP_SAME_WITH_NULL (0);
3571 return false;
3572 case DEBUG_BEGIN_STMT:
3573 if (flags & OEP_LEXICOGRAPHIC)
3574 return true;
3575 return false;
3576 default:
3577 return false;
3578 }
3579
3580 default:
3581 return false;
3582 }
3583
3584 #undef OP_SAME
3585 #undef OP_SAME_WITH_NULL
3586 }
3587
3588 /* Generate a hash value for an expression. This can be used iteratively
3589 by passing a previous result as the HSTATE argument. */
3590
3591 void
3592 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3593 unsigned int flags)
3594 {
3595 int i;
3596 enum tree_code code;
3597 enum tree_code_class tclass;
3598
3599 if (t == NULL_TREE || t == error_mark_node)
3600 {
3601 hstate.merge_hash (0);
3602 return;
3603 }
3604
3605 STRIP_ANY_LOCATION_WRAPPER (t);
3606
3607 if (!(flags & OEP_ADDRESS_OF))
3608 STRIP_NOPS (t);
3609
3610 code = TREE_CODE (t);
3611
3612 switch (code)
3613 {
3614 /* Alas, constants aren't shared, so we can't rely on pointer
3615 identity. */
3616 case VOID_CST:
3617 hstate.merge_hash (0);
3618 return;
3619 case INTEGER_CST:
3620 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3621 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3622 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3623 return;
3624 case REAL_CST:
3625 {
3626 unsigned int val2;
3627 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3628 val2 = rvc_zero;
3629 else
3630 val2 = real_hash (TREE_REAL_CST_PTR (t));
3631 hstate.merge_hash (val2);
3632 return;
3633 }
3634 case FIXED_CST:
3635 {
3636 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3637 hstate.merge_hash (val2);
3638 return;
3639 }
3640 case STRING_CST:
3641 hstate.add ((const void *) TREE_STRING_POINTER (t),
3642 TREE_STRING_LENGTH (t));
3643 return;
3644 case COMPLEX_CST:
3645 hash_operand (TREE_REALPART (t), hstate, flags);
3646 hash_operand (TREE_IMAGPART (t), hstate, flags);
3647 return;
3648 case VECTOR_CST:
3649 {
3650 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3651 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3652 unsigned int count = vector_cst_encoded_nelts (t);
3653 for (unsigned int i = 0; i < count; ++i)
3654 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3655 return;
3656 }
3657 case SSA_NAME:
3658 /* We can just compare by pointer. */
3659 hstate.add_hwi (SSA_NAME_VERSION (t));
3660 return;
3661 case PLACEHOLDER_EXPR:
3662 /* The node itself doesn't matter. */
3663 return;
3664 case BLOCK:
3665 case OMP_CLAUSE:
3666 /* Ignore. */
3667 return;
3668 case TREE_LIST:
3669 /* A list of expressions, for a CALL_EXPR or as the elements of a
3670 VECTOR_CST. */
3671 for (; t; t = TREE_CHAIN (t))
3672 hash_operand (TREE_VALUE (t), hstate, flags);
3673 return;
3674 case CONSTRUCTOR:
3675 {
3676 unsigned HOST_WIDE_INT idx;
3677 tree field, value;
3678 flags &= ~OEP_ADDRESS_OF;
3679 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3680 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3681 {
3682 /* In GIMPLE the indexes can be either NULL or matching i. */
3683 if (field == NULL_TREE)
3684 field = bitsize_int (idx);
3685 hash_operand (field, hstate, flags);
3686 hash_operand (value, hstate, flags);
3687 }
3688 return;
3689 }
3690 case STATEMENT_LIST:
3691 {
3692 tree_stmt_iterator i;
3693 for (i = tsi_start (CONST_CAST_TREE (t));
3694 !tsi_end_p (i); tsi_next (&i))
3695 hash_operand (tsi_stmt (i), hstate, flags);
3696 return;
3697 }
3698 case TREE_VEC:
3699 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3700 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3701 return;
3702 case IDENTIFIER_NODE:
3703 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3704 return;
3705 case FUNCTION_DECL:
3706 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3707 Otherwise nodes that compare equal according to operand_equal_p might
3708 get different hash codes. However, don't do this for machine specific
3709 or front end builtins, since the function code is overloaded in those
3710 cases. */
3711 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3712 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3713 {
3714 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3715 code = TREE_CODE (t);
3716 }
3717 /* FALL THROUGH */
3718 default:
3719 if (POLY_INT_CST_P (t))
3720 {
3721 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3722 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3723 return;
3724 }
3725 tclass = TREE_CODE_CLASS (code);
3726
3727 if (tclass == tcc_declaration)
3728 {
3729 /* DECL's have a unique ID */
3730 hstate.add_hwi (DECL_UID (t));
3731 }
3732 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3733 {
3734 /* For comparisons that can be swapped, use the lower
3735 tree code. */
3736 enum tree_code ccode = swap_tree_comparison (code);
3737 if (code < ccode)
3738 ccode = code;
3739 hstate.add_object (ccode);
3740 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3741 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3742 }
3743 else if (CONVERT_EXPR_CODE_P (code))
3744 {
3745 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3746 operand_equal_p. */
3747 enum tree_code ccode = NOP_EXPR;
3748 hstate.add_object (ccode);
3749
3750 /* Don't hash the type, that can lead to having nodes which
3751 compare equal according to operand_equal_p, but which
3752 have different hash codes. Make sure to include signedness
3753 in the hash computation. */
3754 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3755 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3756 }
3757 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3758 else if (code == MEM_REF
3759 && (flags & OEP_ADDRESS_OF) != 0
3760 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3761 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3762 && integer_zerop (TREE_OPERAND (t, 1)))
3763 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3764 hstate, flags);
3765 /* Don't ICE on FE specific trees, or their arguments etc.
3766 during operand_equal_p hash verification. */
3767 else if (!IS_EXPR_CODE_CLASS (tclass))
3768 gcc_assert (flags & OEP_HASH_CHECK);
3769 else
3770 {
3771 unsigned int sflags = flags;
3772
3773 hstate.add_object (code);
3774
3775 switch (code)
3776 {
3777 case ADDR_EXPR:
3778 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3779 flags |= OEP_ADDRESS_OF;
3780 sflags = flags;
3781 break;
3782
3783 case INDIRECT_REF:
3784 case MEM_REF:
3785 case TARGET_MEM_REF:
3786 flags &= ~OEP_ADDRESS_OF;
3787 sflags = flags;
3788 break;
3789
3790 case ARRAY_REF:
3791 case ARRAY_RANGE_REF:
3792 case COMPONENT_REF:
3793 case BIT_FIELD_REF:
3794 sflags &= ~OEP_ADDRESS_OF;
3795 break;
3796
3797 case COND_EXPR:
3798 flags &= ~OEP_ADDRESS_OF;
3799 break;
3800
3801 case WIDEN_MULT_PLUS_EXPR:
3802 case WIDEN_MULT_MINUS_EXPR:
3803 {
3804 /* The multiplication operands are commutative. */
3805 inchash::hash one, two;
3806 hash_operand (TREE_OPERAND (t, 0), one, flags);
3807 hash_operand (TREE_OPERAND (t, 1), two, flags);
3808 hstate.add_commutative (one, two);
3809 hash_operand (TREE_OPERAND (t, 2), two, flags);
3810 return;
3811 }
3812
3813 case CALL_EXPR:
3814 if (CALL_EXPR_FN (t) == NULL_TREE)
3815 hstate.add_int (CALL_EXPR_IFN (t));
3816 break;
3817
3818 case TARGET_EXPR:
3819 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3820 Usually different TARGET_EXPRs just should use
3821 different temporaries in their slots. */
3822 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3823 return;
3824
3825 /* Virtual table call. */
3826 case OBJ_TYPE_REF:
3827 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3828 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3829 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3830 return;
3831 default:
3832 break;
3833 }
3834
3835 /* Don't hash the type, that can lead to having nodes which
3836 compare equal according to operand_equal_p, but which
3837 have different hash codes. */
3838 if (code == NON_LVALUE_EXPR)
3839 {
3840 /* Make sure to include signness in the hash computation. */
3841 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3842 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3843 }
3844
3845 else if (commutative_tree_code (code))
3846 {
3847 /* It's a commutative expression. We want to hash it the same
3848 however it appears. We do this by first hashing both operands
3849 and then rehashing based on the order of their independent
3850 hashes. */
3851 inchash::hash one, two;
3852 hash_operand (TREE_OPERAND (t, 0), one, flags);
3853 hash_operand (TREE_OPERAND (t, 1), two, flags);
3854 hstate.add_commutative (one, two);
3855 }
3856 else
3857 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3858 hash_operand (TREE_OPERAND (t, i), hstate,
3859 i == 0 ? flags : sflags);
3860 }
3861 return;
3862 }
3863 }
3864
3865 bool
3866 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3867 unsigned int flags, bool *ret)
3868 {
3869 /* When checking, verify at the outermost operand_equal_p call that
3870 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3871 hash value. */
3872 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3873 {
3874 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3875 {
3876 if (arg0 != arg1)
3877 {
3878 inchash::hash hstate0 (0), hstate1 (0);
3879 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3880 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3881 hashval_t h0 = hstate0.end ();
3882 hashval_t h1 = hstate1.end ();
3883 gcc_assert (h0 == h1);
3884 }
3885 *ret = true;
3886 }
3887 else
3888 *ret = false;
3889
3890 return true;
3891 }
3892
3893 return false;
3894 }
3895
3896
3897 static operand_compare default_compare_instance;
3898
3899 /* Conveinece wrapper around operand_compare class because usually we do
3900 not need to play with the valueizer. */
3901
3902 bool
3903 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3904 {
3905 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3906 }
3907
3908 namespace inchash
3909 {
3910
3911 /* Generate a hash value for an expression. This can be used iteratively
3912 by passing a previous result as the HSTATE argument.
3913
3914 This function is intended to produce the same hash for expressions which
3915 would compare equal using operand_equal_p. */
3916 void
3917 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3918 {
3919 default_compare_instance.hash_operand (t, hstate, flags);
3920 }
3921
3922 }
3923 \f
3924 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3925 with a different signedness or a narrower precision. */
3926
3927 static bool
3928 operand_equal_for_comparison_p (tree arg0, tree arg1)
3929 {
3930 if (operand_equal_p (arg0, arg1, 0))
3931 return true;
3932
3933 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3934 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3935 return false;
3936
3937 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3938 and see if the inner values are the same. This removes any
3939 signedness comparison, which doesn't matter here. */
3940 tree op0 = arg0;
3941 tree op1 = arg1;
3942 STRIP_NOPS (op0);
3943 STRIP_NOPS (op1);
3944 if (operand_equal_p (op0, op1, 0))
3945 return true;
3946
3947 /* Discard a single widening conversion from ARG1 and see if the inner
3948 value is the same as ARG0. */
3949 if (CONVERT_EXPR_P (arg1)
3950 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3951 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3952 < TYPE_PRECISION (TREE_TYPE (arg1))
3953 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3954 return true;
3955
3956 return false;
3957 }
3958 \f
3959 /* See if ARG is an expression that is either a comparison or is performing
3960 arithmetic on comparisons. The comparisons must only be comparing
3961 two different values, which will be stored in *CVAL1 and *CVAL2; if
3962 they are nonzero it means that some operands have already been found.
3963 No variables may be used anywhere else in the expression except in the
3964 comparisons.
3965
3966 If this is true, return 1. Otherwise, return zero. */
3967
3968 static bool
3969 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3970 {
3971 enum tree_code code = TREE_CODE (arg);
3972 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3973
3974 /* We can handle some of the tcc_expression cases here. */
3975 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3976 tclass = tcc_unary;
3977 else if (tclass == tcc_expression
3978 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3979 || code == COMPOUND_EXPR))
3980 tclass = tcc_binary;
3981
3982 switch (tclass)
3983 {
3984 case tcc_unary:
3985 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3986
3987 case tcc_binary:
3988 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3989 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3990
3991 case tcc_constant:
3992 return true;
3993
3994 case tcc_expression:
3995 if (code == COND_EXPR)
3996 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3997 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3998 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3999 return false;
4000
4001 case tcc_comparison:
4002 /* First see if we can handle the first operand, then the second. For
4003 the second operand, we know *CVAL1 can't be zero. It must be that
4004 one side of the comparison is each of the values; test for the
4005 case where this isn't true by failing if the two operands
4006 are the same. */
4007
4008 if (operand_equal_p (TREE_OPERAND (arg, 0),
4009 TREE_OPERAND (arg, 1), 0))
4010 return false;
4011
4012 if (*cval1 == 0)
4013 *cval1 = TREE_OPERAND (arg, 0);
4014 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4015 ;
4016 else if (*cval2 == 0)
4017 *cval2 = TREE_OPERAND (arg, 0);
4018 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4019 ;
4020 else
4021 return false;
4022
4023 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4024 ;
4025 else if (*cval2 == 0)
4026 *cval2 = TREE_OPERAND (arg, 1);
4027 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4028 ;
4029 else
4030 return false;
4031
4032 return true;
4033
4034 default:
4035 return false;
4036 }
4037 }
4038 \f
4039 /* ARG is a tree that is known to contain just arithmetic operations and
4040 comparisons. Evaluate the operations in the tree substituting NEW0 for
4041 any occurrence of OLD0 as an operand of a comparison and likewise for
4042 NEW1 and OLD1. */
4043
4044 static tree
4045 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4046 tree old1, tree new1)
4047 {
4048 tree type = TREE_TYPE (arg);
4049 enum tree_code code = TREE_CODE (arg);
4050 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4051
4052 /* We can handle some of the tcc_expression cases here. */
4053 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4054 tclass = tcc_unary;
4055 else if (tclass == tcc_expression
4056 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4057 tclass = tcc_binary;
4058
4059 switch (tclass)
4060 {
4061 case tcc_unary:
4062 return fold_build1_loc (loc, code, type,
4063 eval_subst (loc, TREE_OPERAND (arg, 0),
4064 old0, new0, old1, new1));
4065
4066 case tcc_binary:
4067 return fold_build2_loc (loc, code, type,
4068 eval_subst (loc, TREE_OPERAND (arg, 0),
4069 old0, new0, old1, new1),
4070 eval_subst (loc, TREE_OPERAND (arg, 1),
4071 old0, new0, old1, new1));
4072
4073 case tcc_expression:
4074 switch (code)
4075 {
4076 case SAVE_EXPR:
4077 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4078 old1, new1);
4079
4080 case COMPOUND_EXPR:
4081 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4082 old1, new1);
4083
4084 case COND_EXPR:
4085 return fold_build3_loc (loc, code, type,
4086 eval_subst (loc, TREE_OPERAND (arg, 0),
4087 old0, new0, old1, new1),
4088 eval_subst (loc, TREE_OPERAND (arg, 1),
4089 old0, new0, old1, new1),
4090 eval_subst (loc, TREE_OPERAND (arg, 2),
4091 old0, new0, old1, new1));
4092 default:
4093 break;
4094 }
4095 /* Fall through - ??? */
4096
4097 case tcc_comparison:
4098 {
4099 tree arg0 = TREE_OPERAND (arg, 0);
4100 tree arg1 = TREE_OPERAND (arg, 1);
4101
4102 /* We need to check both for exact equality and tree equality. The
4103 former will be true if the operand has a side-effect. In that
4104 case, we know the operand occurred exactly once. */
4105
4106 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4107 arg0 = new0;
4108 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4109 arg0 = new1;
4110
4111 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4112 arg1 = new0;
4113 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4114 arg1 = new1;
4115
4116 return fold_build2_loc (loc, code, type, arg0, arg1);
4117 }
4118
4119 default:
4120 return arg;
4121 }
4122 }
4123 \f
4124 /* Return a tree for the case when the result of an expression is RESULT
4125 converted to TYPE and OMITTED was previously an operand of the expression
4126 but is now not needed (e.g., we folded OMITTED * 0).
4127
4128 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4129 the conversion of RESULT to TYPE. */
4130
4131 tree
4132 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4133 {
4134 tree t = fold_convert_loc (loc, type, result);
4135
4136 /* If the resulting operand is an empty statement, just return the omitted
4137 statement casted to void. */
4138 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4139 return build1_loc (loc, NOP_EXPR, void_type_node,
4140 fold_ignored_result (omitted));
4141
4142 if (TREE_SIDE_EFFECTS (omitted))
4143 return build2_loc (loc, COMPOUND_EXPR, type,
4144 fold_ignored_result (omitted), t);
4145
4146 return non_lvalue_loc (loc, t);
4147 }
4148
4149 /* Return a tree for the case when the result of an expression is RESULT
4150 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4151 of the expression but are now not needed.
4152
4153 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4154 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4155 evaluated before OMITTED2. Otherwise, if neither has side effects,
4156 just do the conversion of RESULT to TYPE. */
4157
4158 tree
4159 omit_two_operands_loc (location_t loc, tree type, tree result,
4160 tree omitted1, tree omitted2)
4161 {
4162 tree t = fold_convert_loc (loc, type, result);
4163
4164 if (TREE_SIDE_EFFECTS (omitted2))
4165 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4166 if (TREE_SIDE_EFFECTS (omitted1))
4167 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4168
4169 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4170 }
4171
4172 \f
4173 /* Return a simplified tree node for the truth-negation of ARG. This
4174 never alters ARG itself. We assume that ARG is an operation that
4175 returns a truth value (0 or 1).
4176
4177 FIXME: one would think we would fold the result, but it causes
4178 problems with the dominator optimizer. */
4179
4180 static tree
4181 fold_truth_not_expr (location_t loc, tree arg)
4182 {
4183 tree type = TREE_TYPE (arg);
4184 enum tree_code code = TREE_CODE (arg);
4185 location_t loc1, loc2;
4186
4187 /* If this is a comparison, we can simply invert it, except for
4188 floating-point non-equality comparisons, in which case we just
4189 enclose a TRUTH_NOT_EXPR around what we have. */
4190
4191 if (TREE_CODE_CLASS (code) == tcc_comparison)
4192 {
4193 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4194 if (FLOAT_TYPE_P (op_type)
4195 && flag_trapping_math
4196 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4197 && code != NE_EXPR && code != EQ_EXPR)
4198 return NULL_TREE;
4199
4200 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4201 if (code == ERROR_MARK)
4202 return NULL_TREE;
4203
4204 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4205 TREE_OPERAND (arg, 1));
4206 if (TREE_NO_WARNING (arg))
4207 TREE_NO_WARNING (ret) = 1;
4208 return ret;
4209 }
4210
4211 switch (code)
4212 {
4213 case INTEGER_CST:
4214 return constant_boolean_node (integer_zerop (arg), type);
4215
4216 case TRUTH_AND_EXPR:
4217 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4218 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4219 return build2_loc (loc, TRUTH_OR_EXPR, type,
4220 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4221 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4222
4223 case TRUTH_OR_EXPR:
4224 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4225 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4226 return build2_loc (loc, TRUTH_AND_EXPR, type,
4227 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4228 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4229
4230 case TRUTH_XOR_EXPR:
4231 /* Here we can invert either operand. We invert the first operand
4232 unless the second operand is a TRUTH_NOT_EXPR in which case our
4233 result is the XOR of the first operand with the inside of the
4234 negation of the second operand. */
4235
4236 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4237 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4238 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4239 else
4240 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4241 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4242 TREE_OPERAND (arg, 1));
4243
4244 case TRUTH_ANDIF_EXPR:
4245 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4246 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4247 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4248 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4249 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4250
4251 case TRUTH_ORIF_EXPR:
4252 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4253 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4254 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4255 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4256 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4257
4258 case TRUTH_NOT_EXPR:
4259 return TREE_OPERAND (arg, 0);
4260
4261 case COND_EXPR:
4262 {
4263 tree arg1 = TREE_OPERAND (arg, 1);
4264 tree arg2 = TREE_OPERAND (arg, 2);
4265
4266 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4267 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4268
4269 /* A COND_EXPR may have a throw as one operand, which
4270 then has void type. Just leave void operands
4271 as they are. */
4272 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4273 VOID_TYPE_P (TREE_TYPE (arg1))
4274 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4275 VOID_TYPE_P (TREE_TYPE (arg2))
4276 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4277 }
4278
4279 case COMPOUND_EXPR:
4280 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4281 return build2_loc (loc, COMPOUND_EXPR, type,
4282 TREE_OPERAND (arg, 0),
4283 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4284
4285 case NON_LVALUE_EXPR:
4286 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4287 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4288
4289 CASE_CONVERT:
4290 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4291 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4292
4293 /* fall through */
4294
4295 case FLOAT_EXPR:
4296 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4297 return build1_loc (loc, TREE_CODE (arg), type,
4298 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4299
4300 case BIT_AND_EXPR:
4301 if (!integer_onep (TREE_OPERAND (arg, 1)))
4302 return NULL_TREE;
4303 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4304
4305 case SAVE_EXPR:
4306 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4307
4308 case CLEANUP_POINT_EXPR:
4309 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4310 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4311 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4312
4313 default:
4314 return NULL_TREE;
4315 }
4316 }
4317
4318 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4319 assume that ARG is an operation that returns a truth value (0 or 1
4320 for scalars, 0 or -1 for vectors). Return the folded expression if
4321 folding is successful. Otherwise, return NULL_TREE. */
4322
4323 static tree
4324 fold_invert_truthvalue (location_t loc, tree arg)
4325 {
4326 tree type = TREE_TYPE (arg);
4327 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4328 ? BIT_NOT_EXPR
4329 : TRUTH_NOT_EXPR,
4330 type, arg);
4331 }
4332
4333 /* Return a simplified tree node for the truth-negation of ARG. This
4334 never alters ARG itself. We assume that ARG is an operation that
4335 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4336
4337 tree
4338 invert_truthvalue_loc (location_t loc, tree arg)
4339 {
4340 if (TREE_CODE (arg) == ERROR_MARK)
4341 return arg;
4342
4343 tree type = TREE_TYPE (arg);
4344 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4345 ? BIT_NOT_EXPR
4346 : TRUTH_NOT_EXPR,
4347 type, arg);
4348 }
4349 \f
4350 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4351 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4352 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4353 is the original memory reference used to preserve the alias set of
4354 the access. */
4355
4356 static tree
4357 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4358 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4359 int unsignedp, int reversep)
4360 {
4361 tree result, bftype;
4362
4363 /* Attempt not to lose the access path if possible. */
4364 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4365 {
4366 tree ninner = TREE_OPERAND (orig_inner, 0);
4367 machine_mode nmode;
4368 poly_int64 nbitsize, nbitpos;
4369 tree noffset;
4370 int nunsignedp, nreversep, nvolatilep = 0;
4371 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4372 &noffset, &nmode, &nunsignedp,
4373 &nreversep, &nvolatilep);
4374 if (base == inner
4375 && noffset == NULL_TREE
4376 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4377 && !reversep
4378 && !nreversep
4379 && !nvolatilep)
4380 {
4381 inner = ninner;
4382 bitpos -= nbitpos;
4383 }
4384 }
4385
4386 alias_set_type iset = get_alias_set (orig_inner);
4387 if (iset == 0 && get_alias_set (inner) != iset)
4388 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4389 build_fold_addr_expr (inner),
4390 build_int_cst (ptr_type_node, 0));
4391
4392 if (known_eq (bitpos, 0) && !reversep)
4393 {
4394 tree size = TYPE_SIZE (TREE_TYPE (inner));
4395 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4396 || POINTER_TYPE_P (TREE_TYPE (inner)))
4397 && tree_fits_shwi_p (size)
4398 && tree_to_shwi (size) == bitsize)
4399 return fold_convert_loc (loc, type, inner);
4400 }
4401
4402 bftype = type;
4403 if (TYPE_PRECISION (bftype) != bitsize
4404 || TYPE_UNSIGNED (bftype) == !unsignedp)
4405 bftype = build_nonstandard_integer_type (bitsize, 0);
4406
4407 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4408 bitsize_int (bitsize), bitsize_int (bitpos));
4409 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4410
4411 if (bftype != type)
4412 result = fold_convert_loc (loc, type, result);
4413
4414 return result;
4415 }
4416
4417 /* Optimize a bit-field compare.
4418
4419 There are two cases: First is a compare against a constant and the
4420 second is a comparison of two items where the fields are at the same
4421 bit position relative to the start of a chunk (byte, halfword, word)
4422 large enough to contain it. In these cases we can avoid the shift
4423 implicit in bitfield extractions.
4424
4425 For constants, we emit a compare of the shifted constant with the
4426 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4427 compared. For two fields at the same position, we do the ANDs with the
4428 similar mask and compare the result of the ANDs.
4429
4430 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4431 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4432 are the left and right operands of the comparison, respectively.
4433
4434 If the optimization described above can be done, we return the resulting
4435 tree. Otherwise we return zero. */
4436
4437 static tree
4438 optimize_bit_field_compare (location_t loc, enum tree_code code,
4439 tree compare_type, tree lhs, tree rhs)
4440 {
4441 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4442 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4443 tree type = TREE_TYPE (lhs);
4444 tree unsigned_type;
4445 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4446 machine_mode lmode, rmode;
4447 scalar_int_mode nmode;
4448 int lunsignedp, runsignedp;
4449 int lreversep, rreversep;
4450 int lvolatilep = 0, rvolatilep = 0;
4451 tree linner, rinner = NULL_TREE;
4452 tree mask;
4453 tree offset;
4454
4455 /* Get all the information about the extractions being done. If the bit size
4456 is the same as the size of the underlying object, we aren't doing an
4457 extraction at all and so can do nothing. We also don't want to
4458 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4459 then will no longer be able to replace it. */
4460 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4461 &lunsignedp, &lreversep, &lvolatilep);
4462 if (linner == lhs
4463 || !known_size_p (plbitsize)
4464 || !plbitsize.is_constant (&lbitsize)
4465 || !plbitpos.is_constant (&lbitpos)
4466 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4467 || offset != 0
4468 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4469 || lvolatilep)
4470 return 0;
4471
4472 if (const_p)
4473 rreversep = lreversep;
4474 else
4475 {
4476 /* If this is not a constant, we can only do something if bit positions,
4477 sizes, signedness and storage order are the same. */
4478 rinner
4479 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4480 &runsignedp, &rreversep, &rvolatilep);
4481
4482 if (rinner == rhs
4483 || maybe_ne (lbitpos, rbitpos)
4484 || maybe_ne (lbitsize, rbitsize)
4485 || lunsignedp != runsignedp
4486 || lreversep != rreversep
4487 || offset != 0
4488 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4489 || rvolatilep)
4490 return 0;
4491 }
4492
4493 /* Honor the C++ memory model and mimic what RTL expansion does. */
4494 poly_uint64 bitstart = 0;
4495 poly_uint64 bitend = 0;
4496 if (TREE_CODE (lhs) == COMPONENT_REF)
4497 {
4498 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4499 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4500 return 0;
4501 }
4502
4503 /* See if we can find a mode to refer to this field. We should be able to,
4504 but fail if we can't. */
4505 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4506 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4507 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4508 TYPE_ALIGN (TREE_TYPE (rinner))),
4509 BITS_PER_WORD, false, &nmode))
4510 return 0;
4511
4512 /* Set signed and unsigned types of the precision of this mode for the
4513 shifts below. */
4514 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4515
4516 /* Compute the bit position and size for the new reference and our offset
4517 within it. If the new reference is the same size as the original, we
4518 won't optimize anything, so return zero. */
4519 nbitsize = GET_MODE_BITSIZE (nmode);
4520 nbitpos = lbitpos & ~ (nbitsize - 1);
4521 lbitpos -= nbitpos;
4522 if (nbitsize == lbitsize)
4523 return 0;
4524
4525 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4526 lbitpos = nbitsize - lbitsize - lbitpos;
4527
4528 /* Make the mask to be used against the extracted field. */
4529 mask = build_int_cst_type (unsigned_type, -1);
4530 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4531 mask = const_binop (RSHIFT_EXPR, mask,
4532 size_int (nbitsize - lbitsize - lbitpos));
4533
4534 if (! const_p)
4535 {
4536 if (nbitpos < 0)
4537 return 0;
4538
4539 /* If not comparing with constant, just rework the comparison
4540 and return. */
4541 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4542 nbitsize, nbitpos, 1, lreversep);
4543 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4544 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4545 nbitsize, nbitpos, 1, rreversep);
4546 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4547 return fold_build2_loc (loc, code, compare_type, t1, t2);
4548 }
4549
4550 /* Otherwise, we are handling the constant case. See if the constant is too
4551 big for the field. Warn and return a tree for 0 (false) if so. We do
4552 this not only for its own sake, but to avoid having to test for this
4553 error case below. If we didn't, we might generate wrong code.
4554
4555 For unsigned fields, the constant shifted right by the field length should
4556 be all zero. For signed fields, the high-order bits should agree with
4557 the sign bit. */
4558
4559 if (lunsignedp)
4560 {
4561 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4562 {
4563 warning (0, "comparison is always %d due to width of bit-field",
4564 code == NE_EXPR);
4565 return constant_boolean_node (code == NE_EXPR, compare_type);
4566 }
4567 }
4568 else
4569 {
4570 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4571 if (tem != 0 && tem != -1)
4572 {
4573 warning (0, "comparison is always %d due to width of bit-field",
4574 code == NE_EXPR);
4575 return constant_boolean_node (code == NE_EXPR, compare_type);
4576 }
4577 }
4578
4579 if (nbitpos < 0)
4580 return 0;
4581
4582 /* Single-bit compares should always be against zero. */
4583 if (lbitsize == 1 && ! integer_zerop (rhs))
4584 {
4585 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4586 rhs = build_int_cst (type, 0);
4587 }
4588
4589 /* Make a new bitfield reference, shift the constant over the
4590 appropriate number of bits and mask it with the computed mask
4591 (in case this was a signed field). If we changed it, make a new one. */
4592 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4593 nbitsize, nbitpos, 1, lreversep);
4594
4595 rhs = const_binop (BIT_AND_EXPR,
4596 const_binop (LSHIFT_EXPR,
4597 fold_convert_loc (loc, unsigned_type, rhs),
4598 size_int (lbitpos)),
4599 mask);
4600
4601 lhs = build2_loc (loc, code, compare_type,
4602 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4603 return lhs;
4604 }
4605 \f
4606 /* Subroutine for fold_truth_andor_1: decode a field reference.
4607
4608 If EXP is a comparison reference, we return the innermost reference.
4609
4610 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4611 set to the starting bit number.
4612
4613 If the innermost field can be completely contained in a mode-sized
4614 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4615
4616 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4617 otherwise it is not changed.
4618
4619 *PUNSIGNEDP is set to the signedness of the field.
4620
4621 *PREVERSEP is set to the storage order of the field.
4622
4623 *PMASK is set to the mask used. This is either contained in a
4624 BIT_AND_EXPR or derived from the width of the field.
4625
4626 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4627
4628 Return 0 if this is not a component reference or is one that we can't
4629 do anything with. */
4630
4631 static tree
4632 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4633 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4634 int *punsignedp, int *preversep, int *pvolatilep,
4635 tree *pmask, tree *pand_mask)
4636 {
4637 tree exp = *exp_;
4638 tree outer_type = 0;
4639 tree and_mask = 0;
4640 tree mask, inner, offset;
4641 tree unsigned_type;
4642 unsigned int precision;
4643
4644 /* All the optimizations using this function assume integer fields.
4645 There are problems with FP fields since the type_for_size call
4646 below can fail for, e.g., XFmode. */
4647 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4648 return NULL_TREE;
4649
4650 /* We are interested in the bare arrangement of bits, so strip everything
4651 that doesn't affect the machine mode. However, record the type of the
4652 outermost expression if it may matter below. */
4653 if (CONVERT_EXPR_P (exp)
4654 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4655 outer_type = TREE_TYPE (exp);
4656 STRIP_NOPS (exp);
4657
4658 if (TREE_CODE (exp) == BIT_AND_EXPR)
4659 {
4660 and_mask = TREE_OPERAND (exp, 1);
4661 exp = TREE_OPERAND (exp, 0);
4662 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4663 if (TREE_CODE (and_mask) != INTEGER_CST)
4664 return NULL_TREE;
4665 }
4666
4667 poly_int64 poly_bitsize, poly_bitpos;
4668 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4669 pmode, punsignedp, preversep, pvolatilep);
4670 if ((inner == exp && and_mask == 0)
4671 || !poly_bitsize.is_constant (pbitsize)
4672 || !poly_bitpos.is_constant (pbitpos)
4673 || *pbitsize < 0
4674 || offset != 0
4675 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4676 /* Reject out-of-bound accesses (PR79731). */
4677 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4678 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4679 *pbitpos + *pbitsize) < 0))
4680 return NULL_TREE;
4681
4682 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4683 if (unsigned_type == NULL_TREE)
4684 return NULL_TREE;
4685
4686 *exp_ = exp;
4687
4688 /* If the number of bits in the reference is the same as the bitsize of
4689 the outer type, then the outer type gives the signedness. Otherwise
4690 (in case of a small bitfield) the signedness is unchanged. */
4691 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4692 *punsignedp = TYPE_UNSIGNED (outer_type);
4693
4694 /* Compute the mask to access the bitfield. */
4695 precision = TYPE_PRECISION (unsigned_type);
4696
4697 mask = build_int_cst_type (unsigned_type, -1);
4698
4699 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4700 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4701
4702 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4703 if (and_mask != 0)
4704 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4705 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4706
4707 *pmask = mask;
4708 *pand_mask = and_mask;
4709 return inner;
4710 }
4711
4712 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4713 bit positions and MASK is SIGNED. */
4714
4715 static bool
4716 all_ones_mask_p (const_tree mask, unsigned int size)
4717 {
4718 tree type = TREE_TYPE (mask);
4719 unsigned int precision = TYPE_PRECISION (type);
4720
4721 /* If this function returns true when the type of the mask is
4722 UNSIGNED, then there will be errors. In particular see
4723 gcc.c-torture/execute/990326-1.c. There does not appear to be
4724 any documentation paper trail as to why this is so. But the pre
4725 wide-int worked with that restriction and it has been preserved
4726 here. */
4727 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4728 return false;
4729
4730 return wi::mask (size, false, precision) == wi::to_wide (mask);
4731 }
4732
4733 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4734 represents the sign bit of EXP's type. If EXP represents a sign
4735 or zero extension, also test VAL against the unextended type.
4736 The return value is the (sub)expression whose sign bit is VAL,
4737 or NULL_TREE otherwise. */
4738
4739 tree
4740 sign_bit_p (tree exp, const_tree val)
4741 {
4742 int width;
4743 tree t;
4744
4745 /* Tree EXP must have an integral type. */
4746 t = TREE_TYPE (exp);
4747 if (! INTEGRAL_TYPE_P (t))
4748 return NULL_TREE;
4749
4750 /* Tree VAL must be an integer constant. */
4751 if (TREE_CODE (val) != INTEGER_CST
4752 || TREE_OVERFLOW (val))
4753 return NULL_TREE;
4754
4755 width = TYPE_PRECISION (t);
4756 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4757 return exp;
4758
4759 /* Handle extension from a narrower type. */
4760 if (TREE_CODE (exp) == NOP_EXPR
4761 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4762 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4763
4764 return NULL_TREE;
4765 }
4766
4767 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4768 to be evaluated unconditionally. */
4769
4770 static bool
4771 simple_operand_p (const_tree exp)
4772 {
4773 /* Strip any conversions that don't change the machine mode. */
4774 STRIP_NOPS (exp);
4775
4776 return (CONSTANT_CLASS_P (exp)
4777 || TREE_CODE (exp) == SSA_NAME
4778 || (DECL_P (exp)
4779 && ! TREE_ADDRESSABLE (exp)
4780 && ! TREE_THIS_VOLATILE (exp)
4781 && ! DECL_NONLOCAL (exp)
4782 /* Don't regard global variables as simple. They may be
4783 allocated in ways unknown to the compiler (shared memory,
4784 #pragma weak, etc). */
4785 && ! TREE_PUBLIC (exp)
4786 && ! DECL_EXTERNAL (exp)
4787 /* Weakrefs are not safe to be read, since they can be NULL.
4788 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4789 have DECL_WEAK flag set. */
4790 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4791 /* Loading a static variable is unduly expensive, but global
4792 registers aren't expensive. */
4793 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4794 }
4795
4796 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4797 to be evaluated unconditionally.
4798 I addition to simple_operand_p, we assume that comparisons, conversions,
4799 and logic-not operations are simple, if their operands are simple, too. */
4800
4801 static bool
4802 simple_operand_p_2 (tree exp)
4803 {
4804 enum tree_code code;
4805
4806 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4807 return false;
4808
4809 while (CONVERT_EXPR_P (exp))
4810 exp = TREE_OPERAND (exp, 0);
4811
4812 code = TREE_CODE (exp);
4813
4814 if (TREE_CODE_CLASS (code) == tcc_comparison)
4815 return (simple_operand_p (TREE_OPERAND (exp, 0))
4816 && simple_operand_p (TREE_OPERAND (exp, 1)));
4817
4818 if (code == TRUTH_NOT_EXPR)
4819 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4820
4821 return simple_operand_p (exp);
4822 }
4823
4824 \f
4825 /* The following functions are subroutines to fold_range_test and allow it to
4826 try to change a logical combination of comparisons into a range test.
4827
4828 For example, both
4829 X == 2 || X == 3 || X == 4 || X == 5
4830 and
4831 X >= 2 && X <= 5
4832 are converted to
4833 (unsigned) (X - 2) <= 3
4834
4835 We describe each set of comparisons as being either inside or outside
4836 a range, using a variable named like IN_P, and then describe the
4837 range with a lower and upper bound. If one of the bounds is omitted,
4838 it represents either the highest or lowest value of the type.
4839
4840 In the comments below, we represent a range by two numbers in brackets
4841 preceded by a "+" to designate being inside that range, or a "-" to
4842 designate being outside that range, so the condition can be inverted by
4843 flipping the prefix. An omitted bound is represented by a "-". For
4844 example, "- [-, 10]" means being outside the range starting at the lowest
4845 possible value and ending at 10, in other words, being greater than 10.
4846 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4847 always false.
4848
4849 We set up things so that the missing bounds are handled in a consistent
4850 manner so neither a missing bound nor "true" and "false" need to be
4851 handled using a special case. */
4852
4853 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4854 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4855 and UPPER1_P are nonzero if the respective argument is an upper bound
4856 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4857 must be specified for a comparison. ARG1 will be converted to ARG0's
4858 type if both are specified. */
4859
4860 static tree
4861 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4862 tree arg1, int upper1_p)
4863 {
4864 tree tem;
4865 int result;
4866 int sgn0, sgn1;
4867
4868 /* If neither arg represents infinity, do the normal operation.
4869 Else, if not a comparison, return infinity. Else handle the special
4870 comparison rules. Note that most of the cases below won't occur, but
4871 are handled for consistency. */
4872
4873 if (arg0 != 0 && arg1 != 0)
4874 {
4875 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4876 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4877 STRIP_NOPS (tem);
4878 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4879 }
4880
4881 if (TREE_CODE_CLASS (code) != tcc_comparison)
4882 return 0;
4883
4884 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4885 for neither. In real maths, we cannot assume open ended ranges are
4886 the same. But, this is computer arithmetic, where numbers are finite.
4887 We can therefore make the transformation of any unbounded range with
4888 the value Z, Z being greater than any representable number. This permits
4889 us to treat unbounded ranges as equal. */
4890 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4891 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4892 switch (code)
4893 {
4894 case EQ_EXPR:
4895 result = sgn0 == sgn1;
4896 break;
4897 case NE_EXPR:
4898 result = sgn0 != sgn1;
4899 break;
4900 case LT_EXPR:
4901 result = sgn0 < sgn1;
4902 break;
4903 case LE_EXPR:
4904 result = sgn0 <= sgn1;
4905 break;
4906 case GT_EXPR:
4907 result = sgn0 > sgn1;
4908 break;
4909 case GE_EXPR:
4910 result = sgn0 >= sgn1;
4911 break;
4912 default:
4913 gcc_unreachable ();
4914 }
4915
4916 return constant_boolean_node (result, type);
4917 }
4918 \f
4919 /* Helper routine for make_range. Perform one step for it, return
4920 new expression if the loop should continue or NULL_TREE if it should
4921 stop. */
4922
4923 tree
4924 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4925 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4926 bool *strict_overflow_p)
4927 {
4928 tree arg0_type = TREE_TYPE (arg0);
4929 tree n_low, n_high, low = *p_low, high = *p_high;
4930 int in_p = *p_in_p, n_in_p;
4931
4932 switch (code)
4933 {
4934 case TRUTH_NOT_EXPR:
4935 /* We can only do something if the range is testing for zero. */
4936 if (low == NULL_TREE || high == NULL_TREE
4937 || ! integer_zerop (low) || ! integer_zerop (high))
4938 return NULL_TREE;
4939 *p_in_p = ! in_p;
4940 return arg0;
4941
4942 case EQ_EXPR: case NE_EXPR:
4943 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4944 /* We can only do something if the range is testing for zero
4945 and if the second operand is an integer constant. Note that
4946 saying something is "in" the range we make is done by
4947 complementing IN_P since it will set in the initial case of
4948 being not equal to zero; "out" is leaving it alone. */
4949 if (low == NULL_TREE || high == NULL_TREE
4950 || ! integer_zerop (low) || ! integer_zerop (high)
4951 || TREE_CODE (arg1) != INTEGER_CST)
4952 return NULL_TREE;
4953
4954 switch (code)
4955 {
4956 case NE_EXPR: /* - [c, c] */
4957 low = high = arg1;
4958 break;
4959 case EQ_EXPR: /* + [c, c] */
4960 in_p = ! in_p, low = high = arg1;
4961 break;
4962 case GT_EXPR: /* - [-, c] */
4963 low = 0, high = arg1;
4964 break;
4965 case GE_EXPR: /* + [c, -] */
4966 in_p = ! in_p, low = arg1, high = 0;
4967 break;
4968 case LT_EXPR: /* - [c, -] */
4969 low = arg1, high = 0;
4970 break;
4971 case LE_EXPR: /* + [-, c] */
4972 in_p = ! in_p, low = 0, high = arg1;
4973 break;
4974 default:
4975 gcc_unreachable ();
4976 }
4977
4978 /* If this is an unsigned comparison, we also know that EXP is
4979 greater than or equal to zero. We base the range tests we make
4980 on that fact, so we record it here so we can parse existing
4981 range tests. We test arg0_type since often the return type
4982 of, e.g. EQ_EXPR, is boolean. */
4983 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4984 {
4985 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4986 in_p, low, high, 1,
4987 build_int_cst (arg0_type, 0),
4988 NULL_TREE))
4989 return NULL_TREE;
4990
4991 in_p = n_in_p, low = n_low, high = n_high;
4992
4993 /* If the high bound is missing, but we have a nonzero low
4994 bound, reverse the range so it goes from zero to the low bound
4995 minus 1. */
4996 if (high == 0 && low && ! integer_zerop (low))
4997 {
4998 in_p = ! in_p;
4999 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5000 build_int_cst (TREE_TYPE (low), 1), 0);
5001 low = build_int_cst (arg0_type, 0);
5002 }
5003 }
5004
5005 *p_low = low;
5006 *p_high = high;
5007 *p_in_p = in_p;
5008 return arg0;
5009
5010 case NEGATE_EXPR:
5011 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5012 low and high are non-NULL, then normalize will DTRT. */
5013 if (!TYPE_UNSIGNED (arg0_type)
5014 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5015 {
5016 if (low == NULL_TREE)
5017 low = TYPE_MIN_VALUE (arg0_type);
5018 if (high == NULL_TREE)
5019 high = TYPE_MAX_VALUE (arg0_type);
5020 }
5021
5022 /* (-x) IN [a,b] -> x in [-b, -a] */
5023 n_low = range_binop (MINUS_EXPR, exp_type,
5024 build_int_cst (exp_type, 0),
5025 0, high, 1);
5026 n_high = range_binop (MINUS_EXPR, exp_type,
5027 build_int_cst (exp_type, 0),
5028 0, low, 0);
5029 if (n_high != 0 && TREE_OVERFLOW (n_high))
5030 return NULL_TREE;
5031 goto normalize;
5032
5033 case BIT_NOT_EXPR:
5034 /* ~ X -> -X - 1 */
5035 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5036 build_int_cst (exp_type, 1));
5037
5038 case PLUS_EXPR:
5039 case MINUS_EXPR:
5040 if (TREE_CODE (arg1) != INTEGER_CST)
5041 return NULL_TREE;
5042
5043 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5044 move a constant to the other side. */
5045 if (!TYPE_UNSIGNED (arg0_type)
5046 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5047 return NULL_TREE;
5048
5049 /* If EXP is signed, any overflow in the computation is undefined,
5050 so we don't worry about it so long as our computations on
5051 the bounds don't overflow. For unsigned, overflow is defined
5052 and this is exactly the right thing. */
5053 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5054 arg0_type, low, 0, arg1, 0);
5055 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5056 arg0_type, high, 1, arg1, 0);
5057 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5058 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5059 return NULL_TREE;
5060
5061 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5062 *strict_overflow_p = true;
5063
5064 normalize:
5065 /* Check for an unsigned range which has wrapped around the maximum
5066 value thus making n_high < n_low, and normalize it. */
5067 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5068 {
5069 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5070 build_int_cst (TREE_TYPE (n_high), 1), 0);
5071 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5072 build_int_cst (TREE_TYPE (n_low), 1), 0);
5073
5074 /* If the range is of the form +/- [ x+1, x ], we won't
5075 be able to normalize it. But then, it represents the
5076 whole range or the empty set, so make it
5077 +/- [ -, - ]. */
5078 if (tree_int_cst_equal (n_low, low)
5079 && tree_int_cst_equal (n_high, high))
5080 low = high = 0;
5081 else
5082 in_p = ! in_p;
5083 }
5084 else
5085 low = n_low, high = n_high;
5086
5087 *p_low = low;
5088 *p_high = high;
5089 *p_in_p = in_p;
5090 return arg0;
5091
5092 CASE_CONVERT:
5093 case NON_LVALUE_EXPR:
5094 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5095 return NULL_TREE;
5096
5097 if (! INTEGRAL_TYPE_P (arg0_type)
5098 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5099 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5100 return NULL_TREE;
5101
5102 n_low = low, n_high = high;
5103
5104 if (n_low != 0)
5105 n_low = fold_convert_loc (loc, arg0_type, n_low);
5106
5107 if (n_high != 0)
5108 n_high = fold_convert_loc (loc, arg0_type, n_high);
5109
5110 /* If we're converting arg0 from an unsigned type, to exp,
5111 a signed type, we will be doing the comparison as unsigned.
5112 The tests above have already verified that LOW and HIGH
5113 are both positive.
5114
5115 So we have to ensure that we will handle large unsigned
5116 values the same way that the current signed bounds treat
5117 negative values. */
5118
5119 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5120 {
5121 tree high_positive;
5122 tree equiv_type;
5123 /* For fixed-point modes, we need to pass the saturating flag
5124 as the 2nd parameter. */
5125 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5126 equiv_type
5127 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5128 TYPE_SATURATING (arg0_type));
5129 else
5130 equiv_type
5131 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5132
5133 /* A range without an upper bound is, naturally, unbounded.
5134 Since convert would have cropped a very large value, use
5135 the max value for the destination type. */
5136 high_positive
5137 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5138 : TYPE_MAX_VALUE (arg0_type);
5139
5140 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5141 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5142 fold_convert_loc (loc, arg0_type,
5143 high_positive),
5144 build_int_cst (arg0_type, 1));
5145
5146 /* If the low bound is specified, "and" the range with the
5147 range for which the original unsigned value will be
5148 positive. */
5149 if (low != 0)
5150 {
5151 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5152 1, fold_convert_loc (loc, arg0_type,
5153 integer_zero_node),
5154 high_positive))
5155 return NULL_TREE;
5156
5157 in_p = (n_in_p == in_p);
5158 }
5159 else
5160 {
5161 /* Otherwise, "or" the range with the range of the input
5162 that will be interpreted as negative. */
5163 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5164 1, fold_convert_loc (loc, arg0_type,
5165 integer_zero_node),
5166 high_positive))
5167 return NULL_TREE;
5168
5169 in_p = (in_p != n_in_p);
5170 }
5171 }
5172
5173 *p_low = n_low;
5174 *p_high = n_high;
5175 *p_in_p = in_p;
5176 return arg0;
5177
5178 default:
5179 return NULL_TREE;
5180 }
5181 }
5182
5183 /* Given EXP, a logical expression, set the range it is testing into
5184 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5185 actually being tested. *PLOW and *PHIGH will be made of the same
5186 type as the returned expression. If EXP is not a comparison, we
5187 will most likely not be returning a useful value and range. Set
5188 *STRICT_OVERFLOW_P to true if the return value is only valid
5189 because signed overflow is undefined; otherwise, do not change
5190 *STRICT_OVERFLOW_P. */
5191
5192 tree
5193 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5194 bool *strict_overflow_p)
5195 {
5196 enum tree_code code;
5197 tree arg0, arg1 = NULL_TREE;
5198 tree exp_type, nexp;
5199 int in_p;
5200 tree low, high;
5201 location_t loc = EXPR_LOCATION (exp);
5202
5203 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5204 and see if we can refine the range. Some of the cases below may not
5205 happen, but it doesn't seem worth worrying about this. We "continue"
5206 the outer loop when we've changed something; otherwise we "break"
5207 the switch, which will "break" the while. */
5208
5209 in_p = 0;
5210 low = high = build_int_cst (TREE_TYPE (exp), 0);
5211
5212 while (1)
5213 {
5214 code = TREE_CODE (exp);
5215 exp_type = TREE_TYPE (exp);
5216 arg0 = NULL_TREE;
5217
5218 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5219 {
5220 if (TREE_OPERAND_LENGTH (exp) > 0)
5221 arg0 = TREE_OPERAND (exp, 0);
5222 if (TREE_CODE_CLASS (code) == tcc_binary
5223 || TREE_CODE_CLASS (code) == tcc_comparison
5224 || (TREE_CODE_CLASS (code) == tcc_expression
5225 && TREE_OPERAND_LENGTH (exp) > 1))
5226 arg1 = TREE_OPERAND (exp, 1);
5227 }
5228 if (arg0 == NULL_TREE)
5229 break;
5230
5231 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5232 &high, &in_p, strict_overflow_p);
5233 if (nexp == NULL_TREE)
5234 break;
5235 exp = nexp;
5236 }
5237
5238 /* If EXP is a constant, we can evaluate whether this is true or false. */
5239 if (TREE_CODE (exp) == INTEGER_CST)
5240 {
5241 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5242 exp, 0, low, 0))
5243 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5244 exp, 1, high, 1)));
5245 low = high = 0;
5246 exp = 0;
5247 }
5248
5249 *pin_p = in_p, *plow = low, *phigh = high;
5250 return exp;
5251 }
5252
5253 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5254 a bitwise check i.e. when
5255 LOW == 0xXX...X00...0
5256 HIGH == 0xXX...X11...1
5257 Return corresponding mask in MASK and stem in VALUE. */
5258
5259 static bool
5260 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5261 tree *value)
5262 {
5263 if (TREE_CODE (low) != INTEGER_CST
5264 || TREE_CODE (high) != INTEGER_CST)
5265 return false;
5266
5267 unsigned prec = TYPE_PRECISION (type);
5268 wide_int lo = wi::to_wide (low, prec);
5269 wide_int hi = wi::to_wide (high, prec);
5270
5271 wide_int end_mask = lo ^ hi;
5272 if ((end_mask & (end_mask + 1)) != 0
5273 || (lo & end_mask) != 0)
5274 return false;
5275
5276 wide_int stem_mask = ~end_mask;
5277 wide_int stem = lo & stem_mask;
5278 if (stem != (hi & stem_mask))
5279 return false;
5280
5281 *mask = wide_int_to_tree (type, stem_mask);
5282 *value = wide_int_to_tree (type, stem);
5283
5284 return true;
5285 }
5286 \f
5287 /* Helper routine for build_range_check and match.pd. Return the type to
5288 perform the check or NULL if it shouldn't be optimized. */
5289
5290 tree
5291 range_check_type (tree etype)
5292 {
5293 /* First make sure that arithmetics in this type is valid, then make sure
5294 that it wraps around. */
5295 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5296 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5297
5298 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5299 {
5300 tree utype, minv, maxv;
5301
5302 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5303 for the type in question, as we rely on this here. */
5304 utype = unsigned_type_for (etype);
5305 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5306 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5307 build_int_cst (TREE_TYPE (maxv), 1), 1);
5308 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5309
5310 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5311 minv, 1, maxv, 1)))
5312 etype = utype;
5313 else
5314 return NULL_TREE;
5315 }
5316 else if (POINTER_TYPE_P (etype))
5317 etype = unsigned_type_for (etype);
5318 return etype;
5319 }
5320
5321 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5322 type, TYPE, return an expression to test if EXP is in (or out of, depending
5323 on IN_P) the range. Return 0 if the test couldn't be created. */
5324
5325 tree
5326 build_range_check (location_t loc, tree type, tree exp, int in_p,
5327 tree low, tree high)
5328 {
5329 tree etype = TREE_TYPE (exp), mask, value;
5330
5331 /* Disable this optimization for function pointer expressions
5332 on targets that require function pointer canonicalization. */
5333 if (targetm.have_canonicalize_funcptr_for_compare ()
5334 && POINTER_TYPE_P (etype)
5335 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5336 return NULL_TREE;
5337
5338 if (! in_p)
5339 {
5340 value = build_range_check (loc, type, exp, 1, low, high);
5341 if (value != 0)
5342 return invert_truthvalue_loc (loc, value);
5343
5344 return 0;
5345 }
5346
5347 if (low == 0 && high == 0)
5348 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5349
5350 if (low == 0)
5351 return fold_build2_loc (loc, LE_EXPR, type, exp,
5352 fold_convert_loc (loc, etype, high));
5353
5354 if (high == 0)
5355 return fold_build2_loc (loc, GE_EXPR, type, exp,
5356 fold_convert_loc (loc, etype, low));
5357
5358 if (operand_equal_p (low, high, 0))
5359 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5360 fold_convert_loc (loc, etype, low));
5361
5362 if (TREE_CODE (exp) == BIT_AND_EXPR
5363 && maskable_range_p (low, high, etype, &mask, &value))
5364 return fold_build2_loc (loc, EQ_EXPR, type,
5365 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5366 exp, mask),
5367 value);
5368
5369 if (integer_zerop (low))
5370 {
5371 if (! TYPE_UNSIGNED (etype))
5372 {
5373 etype = unsigned_type_for (etype);
5374 high = fold_convert_loc (loc, etype, high);
5375 exp = fold_convert_loc (loc, etype, exp);
5376 }
5377 return build_range_check (loc, type, exp, 1, 0, high);
5378 }
5379
5380 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5381 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5382 {
5383 int prec = TYPE_PRECISION (etype);
5384
5385 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5386 {
5387 if (TYPE_UNSIGNED (etype))
5388 {
5389 tree signed_etype = signed_type_for (etype);
5390 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5391 etype
5392 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5393 else
5394 etype = signed_etype;
5395 exp = fold_convert_loc (loc, etype, exp);
5396 }
5397 return fold_build2_loc (loc, GT_EXPR, type, exp,
5398 build_int_cst (etype, 0));
5399 }
5400 }
5401
5402 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5403 This requires wrap-around arithmetics for the type of the expression. */
5404 etype = range_check_type (etype);
5405 if (etype == NULL_TREE)
5406 return NULL_TREE;
5407
5408 high = fold_convert_loc (loc, etype, high);
5409 low = fold_convert_loc (loc, etype, low);
5410 exp = fold_convert_loc (loc, etype, exp);
5411
5412 value = const_binop (MINUS_EXPR, high, low);
5413
5414 if (value != 0 && !TREE_OVERFLOW (value))
5415 return build_range_check (loc, type,
5416 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5417 1, build_int_cst (etype, 0), value);
5418
5419 return 0;
5420 }
5421 \f
5422 /* Return the predecessor of VAL in its type, handling the infinite case. */
5423
5424 static tree
5425 range_predecessor (tree val)
5426 {
5427 tree type = TREE_TYPE (val);
5428
5429 if (INTEGRAL_TYPE_P (type)
5430 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5431 return 0;
5432 else
5433 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5434 build_int_cst (TREE_TYPE (val), 1), 0);
5435 }
5436
5437 /* Return the successor of VAL in its type, handling the infinite case. */
5438
5439 static tree
5440 range_successor (tree val)
5441 {
5442 tree type = TREE_TYPE (val);
5443
5444 if (INTEGRAL_TYPE_P (type)
5445 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5446 return 0;
5447 else
5448 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5449 build_int_cst (TREE_TYPE (val), 1), 0);
5450 }
5451
5452 /* Given two ranges, see if we can merge them into one. Return 1 if we
5453 can, 0 if we can't. Set the output range into the specified parameters. */
5454
5455 bool
5456 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5457 tree high0, int in1_p, tree low1, tree high1)
5458 {
5459 int no_overlap;
5460 int subset;
5461 int temp;
5462 tree tem;
5463 int in_p;
5464 tree low, high;
5465 int lowequal = ((low0 == 0 && low1 == 0)
5466 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5467 low0, 0, low1, 0)));
5468 int highequal = ((high0 == 0 && high1 == 0)
5469 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5470 high0, 1, high1, 1)));
5471
5472 /* Make range 0 be the range that starts first, or ends last if they
5473 start at the same value. Swap them if it isn't. */
5474 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5475 low0, 0, low1, 0))
5476 || (lowequal
5477 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5478 high1, 1, high0, 1))))
5479 {
5480 temp = in0_p, in0_p = in1_p, in1_p = temp;
5481 tem = low0, low0 = low1, low1 = tem;
5482 tem = high0, high0 = high1, high1 = tem;
5483 }
5484
5485 /* If the second range is != high1 where high1 is the type maximum of
5486 the type, try first merging with < high1 range. */
5487 if (low1
5488 && high1
5489 && TREE_CODE (low1) == INTEGER_CST
5490 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5491 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5492 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5493 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5494 && operand_equal_p (low1, high1, 0))
5495 {
5496 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5497 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5498 !in1_p, NULL_TREE, range_predecessor (low1)))
5499 return true;
5500 /* Similarly for the second range != low1 where low1 is the type minimum
5501 of the type, try first merging with > low1 range. */
5502 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5503 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5504 !in1_p, range_successor (low1), NULL_TREE))
5505 return true;
5506 }
5507
5508 /* Now flag two cases, whether the ranges are disjoint or whether the
5509 second range is totally subsumed in the first. Note that the tests
5510 below are simplified by the ones above. */
5511 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5512 high0, 1, low1, 0));
5513 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5514 high1, 1, high0, 1));
5515
5516 /* We now have four cases, depending on whether we are including or
5517 excluding the two ranges. */
5518 if (in0_p && in1_p)
5519 {
5520 /* If they don't overlap, the result is false. If the second range
5521 is a subset it is the result. Otherwise, the range is from the start
5522 of the second to the end of the first. */
5523 if (no_overlap)
5524 in_p = 0, low = high = 0;
5525 else if (subset)
5526 in_p = 1, low = low1, high = high1;
5527 else
5528 in_p = 1, low = low1, high = high0;
5529 }
5530
5531 else if (in0_p && ! in1_p)
5532 {
5533 /* If they don't overlap, the result is the first range. If they are
5534 equal, the result is false. If the second range is a subset of the
5535 first, and the ranges begin at the same place, we go from just after
5536 the end of the second range to the end of the first. If the second
5537 range is not a subset of the first, or if it is a subset and both
5538 ranges end at the same place, the range starts at the start of the
5539 first range and ends just before the second range.
5540 Otherwise, we can't describe this as a single range. */
5541 if (no_overlap)
5542 in_p = 1, low = low0, high = high0;
5543 else if (lowequal && highequal)
5544 in_p = 0, low = high = 0;
5545 else if (subset && lowequal)
5546 {
5547 low = range_successor (high1);
5548 high = high0;
5549 in_p = 1;
5550 if (low == 0)
5551 {
5552 /* We are in the weird situation where high0 > high1 but
5553 high1 has no successor. Punt. */
5554 return 0;
5555 }
5556 }
5557 else if (! subset || highequal)
5558 {
5559 low = low0;
5560 high = range_predecessor (low1);
5561 in_p = 1;
5562 if (high == 0)
5563 {
5564 /* low0 < low1 but low1 has no predecessor. Punt. */
5565 return 0;
5566 }
5567 }
5568 else
5569 return 0;
5570 }
5571
5572 else if (! in0_p && in1_p)
5573 {
5574 /* If they don't overlap, the result is the second range. If the second
5575 is a subset of the first, the result is false. Otherwise,
5576 the range starts just after the first range and ends at the
5577 end of the second. */
5578 if (no_overlap)
5579 in_p = 1, low = low1, high = high1;
5580 else if (subset || highequal)
5581 in_p = 0, low = high = 0;
5582 else
5583 {
5584 low = range_successor (high0);
5585 high = high1;
5586 in_p = 1;
5587 if (low == 0)
5588 {
5589 /* high1 > high0 but high0 has no successor. Punt. */
5590 return 0;
5591 }
5592 }
5593 }
5594
5595 else
5596 {
5597 /* The case where we are excluding both ranges. Here the complex case
5598 is if they don't overlap. In that case, the only time we have a
5599 range is if they are adjacent. If the second is a subset of the
5600 first, the result is the first. Otherwise, the range to exclude
5601 starts at the beginning of the first range and ends at the end of the
5602 second. */
5603 if (no_overlap)
5604 {
5605 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5606 range_successor (high0),
5607 1, low1, 0)))
5608 in_p = 0, low = low0, high = high1;
5609 else
5610 {
5611 /* Canonicalize - [min, x] into - [-, x]. */
5612 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5613 switch (TREE_CODE (TREE_TYPE (low0)))
5614 {
5615 case ENUMERAL_TYPE:
5616 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5617 GET_MODE_BITSIZE
5618 (TYPE_MODE (TREE_TYPE (low0)))))
5619 break;
5620 /* FALLTHROUGH */
5621 case INTEGER_TYPE:
5622 if (tree_int_cst_equal (low0,
5623 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5624 low0 = 0;
5625 break;
5626 case POINTER_TYPE:
5627 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5628 && integer_zerop (low0))
5629 low0 = 0;
5630 break;
5631 default:
5632 break;
5633 }
5634
5635 /* Canonicalize - [x, max] into - [x, -]. */
5636 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5637 switch (TREE_CODE (TREE_TYPE (high1)))
5638 {
5639 case ENUMERAL_TYPE:
5640 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5641 GET_MODE_BITSIZE
5642 (TYPE_MODE (TREE_TYPE (high1)))))
5643 break;
5644 /* FALLTHROUGH */
5645 case INTEGER_TYPE:
5646 if (tree_int_cst_equal (high1,
5647 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5648 high1 = 0;
5649 break;
5650 case POINTER_TYPE:
5651 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5652 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5653 high1, 1,
5654 build_int_cst (TREE_TYPE (high1), 1),
5655 1)))
5656 high1 = 0;
5657 break;
5658 default:
5659 break;
5660 }
5661
5662 /* The ranges might be also adjacent between the maximum and
5663 minimum values of the given type. For
5664 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5665 return + [x + 1, y - 1]. */
5666 if (low0 == 0 && high1 == 0)
5667 {
5668 low = range_successor (high0);
5669 high = range_predecessor (low1);
5670 if (low == 0 || high == 0)
5671 return 0;
5672
5673 in_p = 1;
5674 }
5675 else
5676 return 0;
5677 }
5678 }
5679 else if (subset)
5680 in_p = 0, low = low0, high = high0;
5681 else
5682 in_p = 0, low = low0, high = high1;
5683 }
5684
5685 *pin_p = in_p, *plow = low, *phigh = high;
5686 return 1;
5687 }
5688 \f
5689
5690 /* Subroutine of fold, looking inside expressions of the form
5691 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5692 of the COND_EXPR. This function is being used also to optimize
5693 A op B ? C : A, by reversing the comparison first.
5694
5695 Return a folded expression whose code is not a COND_EXPR
5696 anymore, or NULL_TREE if no folding opportunity is found. */
5697
5698 static tree
5699 fold_cond_expr_with_comparison (location_t loc, tree type,
5700 tree arg0, tree arg1, tree arg2)
5701 {
5702 enum tree_code comp_code = TREE_CODE (arg0);
5703 tree arg00 = TREE_OPERAND (arg0, 0);
5704 tree arg01 = TREE_OPERAND (arg0, 1);
5705 tree arg1_type = TREE_TYPE (arg1);
5706 tree tem;
5707
5708 STRIP_NOPS (arg1);
5709 STRIP_NOPS (arg2);
5710
5711 /* If we have A op 0 ? A : -A, consider applying the following
5712 transformations:
5713
5714 A == 0? A : -A same as -A
5715 A != 0? A : -A same as A
5716 A >= 0? A : -A same as abs (A)
5717 A > 0? A : -A same as abs (A)
5718 A <= 0? A : -A same as -abs (A)
5719 A < 0? A : -A same as -abs (A)
5720
5721 None of these transformations work for modes with signed
5722 zeros. If A is +/-0, the first two transformations will
5723 change the sign of the result (from +0 to -0, or vice
5724 versa). The last four will fix the sign of the result,
5725 even though the original expressions could be positive or
5726 negative, depending on the sign of A.
5727
5728 Note that all these transformations are correct if A is
5729 NaN, since the two alternatives (A and -A) are also NaNs. */
5730 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5731 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5732 ? real_zerop (arg01)
5733 : integer_zerop (arg01))
5734 && ((TREE_CODE (arg2) == NEGATE_EXPR
5735 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5736 /* In the case that A is of the form X-Y, '-A' (arg2) may
5737 have already been folded to Y-X, check for that. */
5738 || (TREE_CODE (arg1) == MINUS_EXPR
5739 && TREE_CODE (arg2) == MINUS_EXPR
5740 && operand_equal_p (TREE_OPERAND (arg1, 0),
5741 TREE_OPERAND (arg2, 1), 0)
5742 && operand_equal_p (TREE_OPERAND (arg1, 1),
5743 TREE_OPERAND (arg2, 0), 0))))
5744 switch (comp_code)
5745 {
5746 case EQ_EXPR:
5747 case UNEQ_EXPR:
5748 tem = fold_convert_loc (loc, arg1_type, arg1);
5749 return fold_convert_loc (loc, type, negate_expr (tem));
5750 case NE_EXPR:
5751 case LTGT_EXPR:
5752 return fold_convert_loc (loc, type, arg1);
5753 case UNGE_EXPR:
5754 case UNGT_EXPR:
5755 if (flag_trapping_math)
5756 break;
5757 /* Fall through. */
5758 case GE_EXPR:
5759 case GT_EXPR:
5760 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5761 break;
5762 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5763 return fold_convert_loc (loc, type, tem);
5764 case UNLE_EXPR:
5765 case UNLT_EXPR:
5766 if (flag_trapping_math)
5767 break;
5768 /* FALLTHRU */
5769 case LE_EXPR:
5770 case LT_EXPR:
5771 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5772 break;
5773 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5774 return negate_expr (fold_convert_loc (loc, type, tem));
5775 default:
5776 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5777 break;
5778 }
5779
5780 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5781 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5782 both transformations are correct when A is NaN: A != 0
5783 is then true, and A == 0 is false. */
5784
5785 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5786 && integer_zerop (arg01) && integer_zerop (arg2))
5787 {
5788 if (comp_code == NE_EXPR)
5789 return fold_convert_loc (loc, type, arg1);
5790 else if (comp_code == EQ_EXPR)
5791 return build_zero_cst (type);
5792 }
5793
5794 /* Try some transformations of A op B ? A : B.
5795
5796 A == B? A : B same as B
5797 A != B? A : B same as A
5798 A >= B? A : B same as max (A, B)
5799 A > B? A : B same as max (B, A)
5800 A <= B? A : B same as min (A, B)
5801 A < B? A : B same as min (B, A)
5802
5803 As above, these transformations don't work in the presence
5804 of signed zeros. For example, if A and B are zeros of
5805 opposite sign, the first two transformations will change
5806 the sign of the result. In the last four, the original
5807 expressions give different results for (A=+0, B=-0) and
5808 (A=-0, B=+0), but the transformed expressions do not.
5809
5810 The first two transformations are correct if either A or B
5811 is a NaN. In the first transformation, the condition will
5812 be false, and B will indeed be chosen. In the case of the
5813 second transformation, the condition A != B will be true,
5814 and A will be chosen.
5815
5816 The conversions to max() and min() are not correct if B is
5817 a number and A is not. The conditions in the original
5818 expressions will be false, so all four give B. The min()
5819 and max() versions would give a NaN instead. */
5820 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5821 && operand_equal_for_comparison_p (arg01, arg2)
5822 /* Avoid these transformations if the COND_EXPR may be used
5823 as an lvalue in the C++ front-end. PR c++/19199. */
5824 && (in_gimple_form
5825 || VECTOR_TYPE_P (type)
5826 || (! lang_GNU_CXX ()
5827 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5828 || ! maybe_lvalue_p (arg1)
5829 || ! maybe_lvalue_p (arg2)))
5830 {
5831 tree comp_op0 = arg00;
5832 tree comp_op1 = arg01;
5833 tree comp_type = TREE_TYPE (comp_op0);
5834
5835 switch (comp_code)
5836 {
5837 case EQ_EXPR:
5838 return fold_convert_loc (loc, type, arg2);
5839 case NE_EXPR:
5840 return fold_convert_loc (loc, type, arg1);
5841 case LE_EXPR:
5842 case LT_EXPR:
5843 case UNLE_EXPR:
5844 case UNLT_EXPR:
5845 /* In C++ a ?: expression can be an lvalue, so put the
5846 operand which will be used if they are equal first
5847 so that we can convert this back to the
5848 corresponding COND_EXPR. */
5849 if (!HONOR_NANS (arg1))
5850 {
5851 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5852 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5853 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5854 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5855 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5856 comp_op1, comp_op0);
5857 return fold_convert_loc (loc, type, tem);
5858 }
5859 break;
5860 case GE_EXPR:
5861 case GT_EXPR:
5862 case UNGE_EXPR:
5863 case UNGT_EXPR:
5864 if (!HONOR_NANS (arg1))
5865 {
5866 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5867 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5868 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5869 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5870 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5871 comp_op1, comp_op0);
5872 return fold_convert_loc (loc, type, tem);
5873 }
5874 break;
5875 case UNEQ_EXPR:
5876 if (!HONOR_NANS (arg1))
5877 return fold_convert_loc (loc, type, arg2);
5878 break;
5879 case LTGT_EXPR:
5880 if (!HONOR_NANS (arg1))
5881 return fold_convert_loc (loc, type, arg1);
5882 break;
5883 default:
5884 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5885 break;
5886 }
5887 }
5888
5889 return NULL_TREE;
5890 }
5891
5892
5893 \f
5894 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5895 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5896 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5897 false) >= 2)
5898 #endif
5899
5900 /* EXP is some logical combination of boolean tests. See if we can
5901 merge it into some range test. Return the new tree if so. */
5902
5903 static tree
5904 fold_range_test (location_t loc, enum tree_code code, tree type,
5905 tree op0, tree op1)
5906 {
5907 int or_op = (code == TRUTH_ORIF_EXPR
5908 || code == TRUTH_OR_EXPR);
5909 int in0_p, in1_p, in_p;
5910 tree low0, low1, low, high0, high1, high;
5911 bool strict_overflow_p = false;
5912 tree tem, lhs, rhs;
5913 const char * const warnmsg = G_("assuming signed overflow does not occur "
5914 "when simplifying range test");
5915
5916 if (!INTEGRAL_TYPE_P (type))
5917 return 0;
5918
5919 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5920 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5921
5922 /* If this is an OR operation, invert both sides; we will invert
5923 again at the end. */
5924 if (or_op)
5925 in0_p = ! in0_p, in1_p = ! in1_p;
5926
5927 /* If both expressions are the same, if we can merge the ranges, and we
5928 can build the range test, return it or it inverted. If one of the
5929 ranges is always true or always false, consider it to be the same
5930 expression as the other. */
5931 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5932 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5933 in1_p, low1, high1)
5934 && (tem = (build_range_check (loc, type,
5935 lhs != 0 ? lhs
5936 : rhs != 0 ? rhs : integer_zero_node,
5937 in_p, low, high))) != 0)
5938 {
5939 if (strict_overflow_p)
5940 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5941 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5942 }
5943
5944 /* On machines where the branch cost is expensive, if this is a
5945 short-circuited branch and the underlying object on both sides
5946 is the same, make a non-short-circuit operation. */
5947 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5948 if (param_logical_op_non_short_circuit != -1)
5949 logical_op_non_short_circuit
5950 = param_logical_op_non_short_circuit;
5951 if (logical_op_non_short_circuit
5952 && !flag_sanitize_coverage
5953 && lhs != 0 && rhs != 0
5954 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5955 && operand_equal_p (lhs, rhs, 0))
5956 {
5957 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5958 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5959 which cases we can't do this. */
5960 if (simple_operand_p (lhs))
5961 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5962 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5963 type, op0, op1);
5964
5965 else if (!lang_hooks.decls.global_bindings_p ()
5966 && !CONTAINS_PLACEHOLDER_P (lhs))
5967 {
5968 tree common = save_expr (lhs);
5969
5970 if ((lhs = build_range_check (loc, type, common,
5971 or_op ? ! in0_p : in0_p,
5972 low0, high0)) != 0
5973 && (rhs = build_range_check (loc, type, common,
5974 or_op ? ! in1_p : in1_p,
5975 low1, high1)) != 0)
5976 {
5977 if (strict_overflow_p)
5978 fold_overflow_warning (warnmsg,
5979 WARN_STRICT_OVERFLOW_COMPARISON);
5980 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5981 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5982 type, lhs, rhs);
5983 }
5984 }
5985 }
5986
5987 return 0;
5988 }
5989 \f
5990 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5991 bit value. Arrange things so the extra bits will be set to zero if and
5992 only if C is signed-extended to its full width. If MASK is nonzero,
5993 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5994
5995 static tree
5996 unextend (tree c, int p, int unsignedp, tree mask)
5997 {
5998 tree type = TREE_TYPE (c);
5999 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6000 tree temp;
6001
6002 if (p == modesize || unsignedp)
6003 return c;
6004
6005 /* We work by getting just the sign bit into the low-order bit, then
6006 into the high-order bit, then sign-extend. We then XOR that value
6007 with C. */
6008 temp = build_int_cst (TREE_TYPE (c),
6009 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6010
6011 /* We must use a signed type in order to get an arithmetic right shift.
6012 However, we must also avoid introducing accidental overflows, so that
6013 a subsequent call to integer_zerop will work. Hence we must
6014 do the type conversion here. At this point, the constant is either
6015 zero or one, and the conversion to a signed type can never overflow.
6016 We could get an overflow if this conversion is done anywhere else. */
6017 if (TYPE_UNSIGNED (type))
6018 temp = fold_convert (signed_type_for (type), temp);
6019
6020 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6021 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6022 if (mask != 0)
6023 temp = const_binop (BIT_AND_EXPR, temp,
6024 fold_convert (TREE_TYPE (c), mask));
6025 /* If necessary, convert the type back to match the type of C. */
6026 if (TYPE_UNSIGNED (type))
6027 temp = fold_convert (type, temp);
6028
6029 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6030 }
6031 \f
6032 /* For an expression that has the form
6033 (A && B) || ~B
6034 or
6035 (A || B) && ~B,
6036 we can drop one of the inner expressions and simplify to
6037 A || ~B
6038 or
6039 A && ~B
6040 LOC is the location of the resulting expression. OP is the inner
6041 logical operation; the left-hand side in the examples above, while CMPOP
6042 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6043 removing a condition that guards another, as in
6044 (A != NULL && A->...) || A == NULL
6045 which we must not transform. If RHS_ONLY is true, only eliminate the
6046 right-most operand of the inner logical operation. */
6047
6048 static tree
6049 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6050 bool rhs_only)
6051 {
6052 tree type = TREE_TYPE (cmpop);
6053 enum tree_code code = TREE_CODE (cmpop);
6054 enum tree_code truthop_code = TREE_CODE (op);
6055 tree lhs = TREE_OPERAND (op, 0);
6056 tree rhs = TREE_OPERAND (op, 1);
6057 tree orig_lhs = lhs, orig_rhs = rhs;
6058 enum tree_code rhs_code = TREE_CODE (rhs);
6059 enum tree_code lhs_code = TREE_CODE (lhs);
6060 enum tree_code inv_code;
6061
6062 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6063 return NULL_TREE;
6064
6065 if (TREE_CODE_CLASS (code) != tcc_comparison)
6066 return NULL_TREE;
6067
6068 if (rhs_code == truthop_code)
6069 {
6070 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6071 if (newrhs != NULL_TREE)
6072 {
6073 rhs = newrhs;
6074 rhs_code = TREE_CODE (rhs);
6075 }
6076 }
6077 if (lhs_code == truthop_code && !rhs_only)
6078 {
6079 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6080 if (newlhs != NULL_TREE)
6081 {
6082 lhs = newlhs;
6083 lhs_code = TREE_CODE (lhs);
6084 }
6085 }
6086
6087 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6088 if (inv_code == rhs_code
6089 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6090 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6091 return lhs;
6092 if (!rhs_only && inv_code == lhs_code
6093 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6094 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6095 return rhs;
6096 if (rhs != orig_rhs || lhs != orig_lhs)
6097 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6098 lhs, rhs);
6099 return NULL_TREE;
6100 }
6101
6102 /* Find ways of folding logical expressions of LHS and RHS:
6103 Try to merge two comparisons to the same innermost item.
6104 Look for range tests like "ch >= '0' && ch <= '9'".
6105 Look for combinations of simple terms on machines with expensive branches
6106 and evaluate the RHS unconditionally.
6107
6108 For example, if we have p->a == 2 && p->b == 4 and we can make an
6109 object large enough to span both A and B, we can do this with a comparison
6110 against the object ANDed with the a mask.
6111
6112 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6113 operations to do this with one comparison.
6114
6115 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6116 function and the one above.
6117
6118 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6119 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6120
6121 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6122 two operands.
6123
6124 We return the simplified tree or 0 if no optimization is possible. */
6125
6126 static tree
6127 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6128 tree lhs, tree rhs)
6129 {
6130 /* If this is the "or" of two comparisons, we can do something if
6131 the comparisons are NE_EXPR. If this is the "and", we can do something
6132 if the comparisons are EQ_EXPR. I.e.,
6133 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6134
6135 WANTED_CODE is this operation code. For single bit fields, we can
6136 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6137 comparison for one-bit fields. */
6138
6139 enum tree_code wanted_code;
6140 enum tree_code lcode, rcode;
6141 tree ll_arg, lr_arg, rl_arg, rr_arg;
6142 tree ll_inner, lr_inner, rl_inner, rr_inner;
6143 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6144 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6145 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6146 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6147 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6148 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6149 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6150 scalar_int_mode lnmode, rnmode;
6151 tree ll_mask, lr_mask, rl_mask, rr_mask;
6152 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6153 tree l_const, r_const;
6154 tree lntype, rntype, result;
6155 HOST_WIDE_INT first_bit, end_bit;
6156 int volatilep;
6157
6158 /* Start by getting the comparison codes. Fail if anything is volatile.
6159 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6160 it were surrounded with a NE_EXPR. */
6161
6162 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6163 return 0;
6164
6165 lcode = TREE_CODE (lhs);
6166 rcode = TREE_CODE (rhs);
6167
6168 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6169 {
6170 lhs = build2 (NE_EXPR, truth_type, lhs,
6171 build_int_cst (TREE_TYPE (lhs), 0));
6172 lcode = NE_EXPR;
6173 }
6174
6175 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6176 {
6177 rhs = build2 (NE_EXPR, truth_type, rhs,
6178 build_int_cst (TREE_TYPE (rhs), 0));
6179 rcode = NE_EXPR;
6180 }
6181
6182 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6183 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6184 return 0;
6185
6186 ll_arg = TREE_OPERAND (lhs, 0);
6187 lr_arg = TREE_OPERAND (lhs, 1);
6188 rl_arg = TREE_OPERAND (rhs, 0);
6189 rr_arg = TREE_OPERAND (rhs, 1);
6190
6191 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6192 if (simple_operand_p (ll_arg)
6193 && simple_operand_p (lr_arg))
6194 {
6195 if (operand_equal_p (ll_arg, rl_arg, 0)
6196 && operand_equal_p (lr_arg, rr_arg, 0))
6197 {
6198 result = combine_comparisons (loc, code, lcode, rcode,
6199 truth_type, ll_arg, lr_arg);
6200 if (result)
6201 return result;
6202 }
6203 else if (operand_equal_p (ll_arg, rr_arg, 0)
6204 && operand_equal_p (lr_arg, rl_arg, 0))
6205 {
6206 result = combine_comparisons (loc, code, lcode,
6207 swap_tree_comparison (rcode),
6208 truth_type, ll_arg, lr_arg);
6209 if (result)
6210 return result;
6211 }
6212 }
6213
6214 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6215 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6216
6217 /* If the RHS can be evaluated unconditionally and its operands are
6218 simple, it wins to evaluate the RHS unconditionally on machines
6219 with expensive branches. In this case, this isn't a comparison
6220 that can be merged. */
6221
6222 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6223 false) >= 2
6224 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6225 && simple_operand_p (rl_arg)
6226 && simple_operand_p (rr_arg))
6227 {
6228 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6229 if (code == TRUTH_OR_EXPR
6230 && lcode == NE_EXPR && integer_zerop (lr_arg)
6231 && rcode == NE_EXPR && integer_zerop (rr_arg)
6232 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6233 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6234 return build2_loc (loc, NE_EXPR, truth_type,
6235 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6236 ll_arg, rl_arg),
6237 build_int_cst (TREE_TYPE (ll_arg), 0));
6238
6239 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6240 if (code == TRUTH_AND_EXPR
6241 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6242 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6243 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6244 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6245 return build2_loc (loc, EQ_EXPR, truth_type,
6246 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6247 ll_arg, rl_arg),
6248 build_int_cst (TREE_TYPE (ll_arg), 0));
6249 }
6250
6251 /* See if the comparisons can be merged. Then get all the parameters for
6252 each side. */
6253
6254 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6255 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6256 return 0;
6257
6258 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6259 volatilep = 0;
6260 ll_inner = decode_field_reference (loc, &ll_arg,
6261 &ll_bitsize, &ll_bitpos, &ll_mode,
6262 &ll_unsignedp, &ll_reversep, &volatilep,
6263 &ll_mask, &ll_and_mask);
6264 lr_inner = decode_field_reference (loc, &lr_arg,
6265 &lr_bitsize, &lr_bitpos, &lr_mode,
6266 &lr_unsignedp, &lr_reversep, &volatilep,
6267 &lr_mask, &lr_and_mask);
6268 rl_inner = decode_field_reference (loc, &rl_arg,
6269 &rl_bitsize, &rl_bitpos, &rl_mode,
6270 &rl_unsignedp, &rl_reversep, &volatilep,
6271 &rl_mask, &rl_and_mask);
6272 rr_inner = decode_field_reference (loc, &rr_arg,
6273 &rr_bitsize, &rr_bitpos, &rr_mode,
6274 &rr_unsignedp, &rr_reversep, &volatilep,
6275 &rr_mask, &rr_and_mask);
6276
6277 /* It must be true that the inner operation on the lhs of each
6278 comparison must be the same if we are to be able to do anything.
6279 Then see if we have constants. If not, the same must be true for
6280 the rhs's. */
6281 if (volatilep
6282 || ll_reversep != rl_reversep
6283 || ll_inner == 0 || rl_inner == 0
6284 || ! operand_equal_p (ll_inner, rl_inner, 0))
6285 return 0;
6286
6287 if (TREE_CODE (lr_arg) == INTEGER_CST
6288 && TREE_CODE (rr_arg) == INTEGER_CST)
6289 {
6290 l_const = lr_arg, r_const = rr_arg;
6291 lr_reversep = ll_reversep;
6292 }
6293 else if (lr_reversep != rr_reversep
6294 || lr_inner == 0 || rr_inner == 0
6295 || ! operand_equal_p (lr_inner, rr_inner, 0))
6296 return 0;
6297 else
6298 l_const = r_const = 0;
6299
6300 /* If either comparison code is not correct for our logical operation,
6301 fail. However, we can convert a one-bit comparison against zero into
6302 the opposite comparison against that bit being set in the field. */
6303
6304 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6305 if (lcode != wanted_code)
6306 {
6307 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6308 {
6309 /* Make the left operand unsigned, since we are only interested
6310 in the value of one bit. Otherwise we are doing the wrong
6311 thing below. */
6312 ll_unsignedp = 1;
6313 l_const = ll_mask;
6314 }
6315 else
6316 return 0;
6317 }
6318
6319 /* This is analogous to the code for l_const above. */
6320 if (rcode != wanted_code)
6321 {
6322 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6323 {
6324 rl_unsignedp = 1;
6325 r_const = rl_mask;
6326 }
6327 else
6328 return 0;
6329 }
6330
6331 /* See if we can find a mode that contains both fields being compared on
6332 the left. If we can't, fail. Otherwise, update all constants and masks
6333 to be relative to a field of that size. */
6334 first_bit = MIN (ll_bitpos, rl_bitpos);
6335 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6336 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6337 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6338 volatilep, &lnmode))
6339 return 0;
6340
6341 lnbitsize = GET_MODE_BITSIZE (lnmode);
6342 lnbitpos = first_bit & ~ (lnbitsize - 1);
6343 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6344 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6345
6346 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6347 {
6348 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6349 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6350 }
6351
6352 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6353 size_int (xll_bitpos));
6354 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6355 size_int (xrl_bitpos));
6356
6357 if (l_const)
6358 {
6359 l_const = fold_convert_loc (loc, lntype, l_const);
6360 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6361 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6362 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6363 fold_build1_loc (loc, BIT_NOT_EXPR,
6364 lntype, ll_mask))))
6365 {
6366 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6367
6368 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6369 }
6370 }
6371 if (r_const)
6372 {
6373 r_const = fold_convert_loc (loc, lntype, r_const);
6374 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6375 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6376 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6377 fold_build1_loc (loc, BIT_NOT_EXPR,
6378 lntype, rl_mask))))
6379 {
6380 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6381
6382 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6383 }
6384 }
6385
6386 /* If the right sides are not constant, do the same for it. Also,
6387 disallow this optimization if a size, signedness or storage order
6388 mismatch occurs between the left and right sides. */
6389 if (l_const == 0)
6390 {
6391 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6392 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6393 || ll_reversep != lr_reversep
6394 /* Make sure the two fields on the right
6395 correspond to the left without being swapped. */
6396 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6397 return 0;
6398
6399 first_bit = MIN (lr_bitpos, rr_bitpos);
6400 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6401 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6402 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6403 volatilep, &rnmode))
6404 return 0;
6405
6406 rnbitsize = GET_MODE_BITSIZE (rnmode);
6407 rnbitpos = first_bit & ~ (rnbitsize - 1);
6408 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6409 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6410
6411 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6412 {
6413 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6414 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6415 }
6416
6417 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6418 rntype, lr_mask),
6419 size_int (xlr_bitpos));
6420 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6421 rntype, rr_mask),
6422 size_int (xrr_bitpos));
6423
6424 /* Make a mask that corresponds to both fields being compared.
6425 Do this for both items being compared. If the operands are the
6426 same size and the bits being compared are in the same position
6427 then we can do this by masking both and comparing the masked
6428 results. */
6429 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6430 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6431 if (lnbitsize == rnbitsize
6432 && xll_bitpos == xlr_bitpos
6433 && lnbitpos >= 0
6434 && rnbitpos >= 0)
6435 {
6436 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6437 lntype, lnbitsize, lnbitpos,
6438 ll_unsignedp || rl_unsignedp, ll_reversep);
6439 if (! all_ones_mask_p (ll_mask, lnbitsize))
6440 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6441
6442 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6443 rntype, rnbitsize, rnbitpos,
6444 lr_unsignedp || rr_unsignedp, lr_reversep);
6445 if (! all_ones_mask_p (lr_mask, rnbitsize))
6446 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6447
6448 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6449 }
6450
6451 /* There is still another way we can do something: If both pairs of
6452 fields being compared are adjacent, we may be able to make a wider
6453 field containing them both.
6454
6455 Note that we still must mask the lhs/rhs expressions. Furthermore,
6456 the mask must be shifted to account for the shift done by
6457 make_bit_field_ref. */
6458 if (((ll_bitsize + ll_bitpos == rl_bitpos
6459 && lr_bitsize + lr_bitpos == rr_bitpos)
6460 || (ll_bitpos == rl_bitpos + rl_bitsize
6461 && lr_bitpos == rr_bitpos + rr_bitsize))
6462 && ll_bitpos >= 0
6463 && rl_bitpos >= 0
6464 && lr_bitpos >= 0
6465 && rr_bitpos >= 0)
6466 {
6467 tree type;
6468
6469 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6470 ll_bitsize + rl_bitsize,
6471 MIN (ll_bitpos, rl_bitpos),
6472 ll_unsignedp, ll_reversep);
6473 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6474 lr_bitsize + rr_bitsize,
6475 MIN (lr_bitpos, rr_bitpos),
6476 lr_unsignedp, lr_reversep);
6477
6478 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6479 size_int (MIN (xll_bitpos, xrl_bitpos)));
6480 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6481 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6482
6483 /* Convert to the smaller type before masking out unwanted bits. */
6484 type = lntype;
6485 if (lntype != rntype)
6486 {
6487 if (lnbitsize > rnbitsize)
6488 {
6489 lhs = fold_convert_loc (loc, rntype, lhs);
6490 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6491 type = rntype;
6492 }
6493 else if (lnbitsize < rnbitsize)
6494 {
6495 rhs = fold_convert_loc (loc, lntype, rhs);
6496 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6497 type = lntype;
6498 }
6499 }
6500
6501 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6502 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6503
6504 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6505 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6506
6507 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6508 }
6509
6510 return 0;
6511 }
6512
6513 /* Handle the case of comparisons with constants. If there is something in
6514 common between the masks, those bits of the constants must be the same.
6515 If not, the condition is always false. Test for this to avoid generating
6516 incorrect code below. */
6517 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6518 if (! integer_zerop (result)
6519 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6520 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6521 {
6522 if (wanted_code == NE_EXPR)
6523 {
6524 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6525 return constant_boolean_node (true, truth_type);
6526 }
6527 else
6528 {
6529 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6530 return constant_boolean_node (false, truth_type);
6531 }
6532 }
6533
6534 if (lnbitpos < 0)
6535 return 0;
6536
6537 /* Construct the expression we will return. First get the component
6538 reference we will make. Unless the mask is all ones the width of
6539 that field, perform the mask operation. Then compare with the
6540 merged constant. */
6541 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6542 lntype, lnbitsize, lnbitpos,
6543 ll_unsignedp || rl_unsignedp, ll_reversep);
6544
6545 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6546 if (! all_ones_mask_p (ll_mask, lnbitsize))
6547 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6548
6549 return build2_loc (loc, wanted_code, truth_type, result,
6550 const_binop (BIT_IOR_EXPR, l_const, r_const));
6551 }
6552 \f
6553 /* T is an integer expression that is being multiplied, divided, or taken a
6554 modulus (CODE says which and what kind of divide or modulus) by a
6555 constant C. See if we can eliminate that operation by folding it with
6556 other operations already in T. WIDE_TYPE, if non-null, is a type that
6557 should be used for the computation if wider than our type.
6558
6559 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6560 (X * 2) + (Y * 4). We must, however, be assured that either the original
6561 expression would not overflow or that overflow is undefined for the type
6562 in the language in question.
6563
6564 If we return a non-null expression, it is an equivalent form of the
6565 original computation, but need not be in the original type.
6566
6567 We set *STRICT_OVERFLOW_P to true if the return values depends on
6568 signed overflow being undefined. Otherwise we do not change
6569 *STRICT_OVERFLOW_P. */
6570
6571 static tree
6572 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6573 bool *strict_overflow_p)
6574 {
6575 /* To avoid exponential search depth, refuse to allow recursion past
6576 three levels. Beyond that (1) it's highly unlikely that we'll find
6577 something interesting and (2) we've probably processed it before
6578 when we built the inner expression. */
6579
6580 static int depth;
6581 tree ret;
6582
6583 if (depth > 3)
6584 return NULL;
6585
6586 depth++;
6587 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6588 depth--;
6589
6590 return ret;
6591 }
6592
6593 static tree
6594 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6595 bool *strict_overflow_p)
6596 {
6597 tree type = TREE_TYPE (t);
6598 enum tree_code tcode = TREE_CODE (t);
6599 tree ctype = (wide_type != 0
6600 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6601 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6602 ? wide_type : type);
6603 tree t1, t2;
6604 int same_p = tcode == code;
6605 tree op0 = NULL_TREE, op1 = NULL_TREE;
6606 bool sub_strict_overflow_p;
6607
6608 /* Don't deal with constants of zero here; they confuse the code below. */
6609 if (integer_zerop (c))
6610 return NULL_TREE;
6611
6612 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6613 op0 = TREE_OPERAND (t, 0);
6614
6615 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6616 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6617
6618 /* Note that we need not handle conditional operations here since fold
6619 already handles those cases. So just do arithmetic here. */
6620 switch (tcode)
6621 {
6622 case INTEGER_CST:
6623 /* For a constant, we can always simplify if we are a multiply
6624 or (for divide and modulus) if it is a multiple of our constant. */
6625 if (code == MULT_EXPR
6626 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6627 TYPE_SIGN (type)))
6628 {
6629 tree tem = const_binop (code, fold_convert (ctype, t),
6630 fold_convert (ctype, c));
6631 /* If the multiplication overflowed, we lost information on it.
6632 See PR68142 and PR69845. */
6633 if (TREE_OVERFLOW (tem))
6634 return NULL_TREE;
6635 return tem;
6636 }
6637 break;
6638
6639 CASE_CONVERT: case NON_LVALUE_EXPR:
6640 /* If op0 is an expression ... */
6641 if ((COMPARISON_CLASS_P (op0)
6642 || UNARY_CLASS_P (op0)
6643 || BINARY_CLASS_P (op0)
6644 || VL_EXP_CLASS_P (op0)
6645 || EXPRESSION_CLASS_P (op0))
6646 /* ... and has wrapping overflow, and its type is smaller
6647 than ctype, then we cannot pass through as widening. */
6648 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6649 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6650 && (TYPE_PRECISION (ctype)
6651 > TYPE_PRECISION (TREE_TYPE (op0))))
6652 /* ... or this is a truncation (t is narrower than op0),
6653 then we cannot pass through this narrowing. */
6654 || (TYPE_PRECISION (type)
6655 < TYPE_PRECISION (TREE_TYPE (op0)))
6656 /* ... or signedness changes for division or modulus,
6657 then we cannot pass through this conversion. */
6658 || (code != MULT_EXPR
6659 && (TYPE_UNSIGNED (ctype)
6660 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6661 /* ... or has undefined overflow while the converted to
6662 type has not, we cannot do the operation in the inner type
6663 as that would introduce undefined overflow. */
6664 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6665 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6666 && !TYPE_OVERFLOW_UNDEFINED (type))))
6667 break;
6668
6669 /* Pass the constant down and see if we can make a simplification. If
6670 we can, replace this expression with the inner simplification for
6671 possible later conversion to our or some other type. */
6672 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6673 && TREE_CODE (t2) == INTEGER_CST
6674 && !TREE_OVERFLOW (t2)
6675 && (t1 = extract_muldiv (op0, t2, code,
6676 code == MULT_EXPR ? ctype : NULL_TREE,
6677 strict_overflow_p)) != 0)
6678 return t1;
6679 break;
6680
6681 case ABS_EXPR:
6682 /* If widening the type changes it from signed to unsigned, then we
6683 must avoid building ABS_EXPR itself as unsigned. */
6684 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6685 {
6686 tree cstype = (*signed_type_for) (ctype);
6687 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6688 != 0)
6689 {
6690 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6691 return fold_convert (ctype, t1);
6692 }
6693 break;
6694 }
6695 /* If the constant is negative, we cannot simplify this. */
6696 if (tree_int_cst_sgn (c) == -1)
6697 break;
6698 /* FALLTHROUGH */
6699 case NEGATE_EXPR:
6700 /* For division and modulus, type can't be unsigned, as e.g.
6701 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6702 For signed types, even with wrapping overflow, this is fine. */
6703 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6704 break;
6705 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6706 != 0)
6707 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6708 break;
6709
6710 case MIN_EXPR: case MAX_EXPR:
6711 /* If widening the type changes the signedness, then we can't perform
6712 this optimization as that changes the result. */
6713 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6714 break;
6715
6716 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6717 sub_strict_overflow_p = false;
6718 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6719 &sub_strict_overflow_p)) != 0
6720 && (t2 = extract_muldiv (op1, c, code, wide_type,
6721 &sub_strict_overflow_p)) != 0)
6722 {
6723 if (tree_int_cst_sgn (c) < 0)
6724 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6725 if (sub_strict_overflow_p)
6726 *strict_overflow_p = true;
6727 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6728 fold_convert (ctype, t2));
6729 }
6730 break;
6731
6732 case LSHIFT_EXPR: case RSHIFT_EXPR:
6733 /* If the second operand is constant, this is a multiplication
6734 or floor division, by a power of two, so we can treat it that
6735 way unless the multiplier or divisor overflows. Signed
6736 left-shift overflow is implementation-defined rather than
6737 undefined in C90, so do not convert signed left shift into
6738 multiplication. */
6739 if (TREE_CODE (op1) == INTEGER_CST
6740 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6741 /* const_binop may not detect overflow correctly,
6742 so check for it explicitly here. */
6743 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6744 wi::to_wide (op1))
6745 && (t1 = fold_convert (ctype,
6746 const_binop (LSHIFT_EXPR, size_one_node,
6747 op1))) != 0
6748 && !TREE_OVERFLOW (t1))
6749 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6750 ? MULT_EXPR : FLOOR_DIV_EXPR,
6751 ctype,
6752 fold_convert (ctype, op0),
6753 t1),
6754 c, code, wide_type, strict_overflow_p);
6755 break;
6756
6757 case PLUS_EXPR: case MINUS_EXPR:
6758 /* See if we can eliminate the operation on both sides. If we can, we
6759 can return a new PLUS or MINUS. If we can't, the only remaining
6760 cases where we can do anything are if the second operand is a
6761 constant. */
6762 sub_strict_overflow_p = false;
6763 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6764 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6765 if (t1 != 0 && t2 != 0
6766 && TYPE_OVERFLOW_WRAPS (ctype)
6767 && (code == MULT_EXPR
6768 /* If not multiplication, we can only do this if both operands
6769 are divisible by c. */
6770 || (multiple_of_p (ctype, op0, c)
6771 && multiple_of_p (ctype, op1, c))))
6772 {
6773 if (sub_strict_overflow_p)
6774 *strict_overflow_p = true;
6775 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6776 fold_convert (ctype, t2));
6777 }
6778
6779 /* If this was a subtraction, negate OP1 and set it to be an addition.
6780 This simplifies the logic below. */
6781 if (tcode == MINUS_EXPR)
6782 {
6783 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6784 /* If OP1 was not easily negatable, the constant may be OP0. */
6785 if (TREE_CODE (op0) == INTEGER_CST)
6786 {
6787 std::swap (op0, op1);
6788 std::swap (t1, t2);
6789 }
6790 }
6791
6792 if (TREE_CODE (op1) != INTEGER_CST)
6793 break;
6794
6795 /* If either OP1 or C are negative, this optimization is not safe for
6796 some of the division and remainder types while for others we need
6797 to change the code. */
6798 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6799 {
6800 if (code == CEIL_DIV_EXPR)
6801 code = FLOOR_DIV_EXPR;
6802 else if (code == FLOOR_DIV_EXPR)
6803 code = CEIL_DIV_EXPR;
6804 else if (code != MULT_EXPR
6805 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6806 break;
6807 }
6808
6809 /* If it's a multiply or a division/modulus operation of a multiple
6810 of our constant, do the operation and verify it doesn't overflow. */
6811 if (code == MULT_EXPR
6812 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6813 TYPE_SIGN (type)))
6814 {
6815 op1 = const_binop (code, fold_convert (ctype, op1),
6816 fold_convert (ctype, c));
6817 /* We allow the constant to overflow with wrapping semantics. */
6818 if (op1 == 0
6819 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6820 break;
6821 }
6822 else
6823 break;
6824
6825 /* If we have an unsigned type, we cannot widen the operation since it
6826 will change the result if the original computation overflowed. */
6827 if (TYPE_UNSIGNED (ctype) && ctype != type)
6828 break;
6829
6830 /* The last case is if we are a multiply. In that case, we can
6831 apply the distributive law to commute the multiply and addition
6832 if the multiplication of the constants doesn't overflow
6833 and overflow is defined. With undefined overflow
6834 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6835 But fold_plusminus_mult_expr would factor back any power-of-two
6836 value so do not distribute in the first place in this case. */
6837 if (code == MULT_EXPR
6838 && TYPE_OVERFLOW_WRAPS (ctype)
6839 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6840 return fold_build2 (tcode, ctype,
6841 fold_build2 (code, ctype,
6842 fold_convert (ctype, op0),
6843 fold_convert (ctype, c)),
6844 op1);
6845
6846 break;
6847
6848 case MULT_EXPR:
6849 /* We have a special case here if we are doing something like
6850 (C * 8) % 4 since we know that's zero. */
6851 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6852 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6853 /* If the multiplication can overflow we cannot optimize this. */
6854 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6855 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6856 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6857 TYPE_SIGN (type)))
6858 {
6859 *strict_overflow_p = true;
6860 return omit_one_operand (type, integer_zero_node, op0);
6861 }
6862
6863 /* ... fall through ... */
6864
6865 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6866 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6867 /* If we can extract our operation from the LHS, do so and return a
6868 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6869 do something only if the second operand is a constant. */
6870 if (same_p
6871 && TYPE_OVERFLOW_WRAPS (ctype)
6872 && (t1 = extract_muldiv (op0, c, code, wide_type,
6873 strict_overflow_p)) != 0)
6874 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6875 fold_convert (ctype, op1));
6876 else if (tcode == MULT_EXPR && code == MULT_EXPR
6877 && TYPE_OVERFLOW_WRAPS (ctype)
6878 && (t1 = extract_muldiv (op1, c, code, wide_type,
6879 strict_overflow_p)) != 0)
6880 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6881 fold_convert (ctype, t1));
6882 else if (TREE_CODE (op1) != INTEGER_CST)
6883 return 0;
6884
6885 /* If these are the same operation types, we can associate them
6886 assuming no overflow. */
6887 if (tcode == code)
6888 {
6889 bool overflow_p = false;
6890 wi::overflow_type overflow_mul;
6891 signop sign = TYPE_SIGN (ctype);
6892 unsigned prec = TYPE_PRECISION (ctype);
6893 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6894 wi::to_wide (c, prec),
6895 sign, &overflow_mul);
6896 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6897 if (overflow_mul
6898 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6899 overflow_p = true;
6900 if (!overflow_p)
6901 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6902 wide_int_to_tree (ctype, mul));
6903 }
6904
6905 /* If these operations "cancel" each other, we have the main
6906 optimizations of this pass, which occur when either constant is a
6907 multiple of the other, in which case we replace this with either an
6908 operation or CODE or TCODE.
6909
6910 If we have an unsigned type, we cannot do this since it will change
6911 the result if the original computation overflowed. */
6912 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6913 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6914 || (tcode == MULT_EXPR
6915 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6916 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6917 && code != MULT_EXPR)))
6918 {
6919 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6920 TYPE_SIGN (type)))
6921 {
6922 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6923 *strict_overflow_p = true;
6924 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6925 fold_convert (ctype,
6926 const_binop (TRUNC_DIV_EXPR,
6927 op1, c)));
6928 }
6929 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6930 TYPE_SIGN (type)))
6931 {
6932 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6933 *strict_overflow_p = true;
6934 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6935 fold_convert (ctype,
6936 const_binop (TRUNC_DIV_EXPR,
6937 c, op1)));
6938 }
6939 }
6940 break;
6941
6942 default:
6943 break;
6944 }
6945
6946 return 0;
6947 }
6948 \f
6949 /* Return a node which has the indicated constant VALUE (either 0 or
6950 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6951 and is of the indicated TYPE. */
6952
6953 tree
6954 constant_boolean_node (bool value, tree type)
6955 {
6956 if (type == integer_type_node)
6957 return value ? integer_one_node : integer_zero_node;
6958 else if (type == boolean_type_node)
6959 return value ? boolean_true_node : boolean_false_node;
6960 else if (TREE_CODE (type) == VECTOR_TYPE)
6961 return build_vector_from_val (type,
6962 build_int_cst (TREE_TYPE (type),
6963 value ? -1 : 0));
6964 else
6965 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6966 }
6967
6968
6969 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6970 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6971 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6972 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6973 COND is the first argument to CODE; otherwise (as in the example
6974 given here), it is the second argument. TYPE is the type of the
6975 original expression. Return NULL_TREE if no simplification is
6976 possible. */
6977
6978 static tree
6979 fold_binary_op_with_conditional_arg (location_t loc,
6980 enum tree_code code,
6981 tree type, tree op0, tree op1,
6982 tree cond, tree arg, int cond_first_p)
6983 {
6984 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6985 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6986 tree test, true_value, false_value;
6987 tree lhs = NULL_TREE;
6988 tree rhs = NULL_TREE;
6989 enum tree_code cond_code = COND_EXPR;
6990
6991 /* Do not move possibly trapping operations into the conditional as this
6992 pessimizes code and causes gimplification issues when applied late. */
6993 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
6994 ANY_INTEGRAL_TYPE_P (type)
6995 && TYPE_OVERFLOW_TRAPS (type), op1))
6996 return NULL_TREE;
6997
6998 if (TREE_CODE (cond) == COND_EXPR
6999 || TREE_CODE (cond) == VEC_COND_EXPR)
7000 {
7001 test = TREE_OPERAND (cond, 0);
7002 true_value = TREE_OPERAND (cond, 1);
7003 false_value = TREE_OPERAND (cond, 2);
7004 /* If this operand throws an expression, then it does not make
7005 sense to try to perform a logical or arithmetic operation
7006 involving it. */
7007 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7008 lhs = true_value;
7009 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7010 rhs = false_value;
7011 }
7012 else if (!(TREE_CODE (type) != VECTOR_TYPE
7013 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7014 {
7015 tree testtype = TREE_TYPE (cond);
7016 test = cond;
7017 true_value = constant_boolean_node (true, testtype);
7018 false_value = constant_boolean_node (false, testtype);
7019 }
7020 else
7021 /* Detect the case of mixing vector and scalar types - bail out. */
7022 return NULL_TREE;
7023
7024 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7025 cond_code = VEC_COND_EXPR;
7026
7027 /* This transformation is only worthwhile if we don't have to wrap ARG
7028 in a SAVE_EXPR and the operation can be simplified without recursing
7029 on at least one of the branches once its pushed inside the COND_EXPR. */
7030 if (!TREE_CONSTANT (arg)
7031 && (TREE_SIDE_EFFECTS (arg)
7032 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7033 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7034 return NULL_TREE;
7035
7036 arg = fold_convert_loc (loc, arg_type, arg);
7037 if (lhs == 0)
7038 {
7039 true_value = fold_convert_loc (loc, cond_type, true_value);
7040 if (cond_first_p)
7041 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7042 else
7043 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7044 }
7045 if (rhs == 0)
7046 {
7047 false_value = fold_convert_loc (loc, cond_type, false_value);
7048 if (cond_first_p)
7049 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7050 else
7051 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7052 }
7053
7054 /* Check that we have simplified at least one of the branches. */
7055 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7056 return NULL_TREE;
7057
7058 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7059 }
7060
7061 \f
7062 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7063
7064 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7065 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7066 ADDEND is the same as X.
7067
7068 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7069 and finite. The problematic cases are when X is zero, and its mode
7070 has signed zeros. In the case of rounding towards -infinity,
7071 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7072 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7073
7074 bool
7075 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7076 {
7077 if (!real_zerop (addend))
7078 return false;
7079
7080 /* Don't allow the fold with -fsignaling-nans. */
7081 if (HONOR_SNANS (type))
7082 return false;
7083
7084 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7085 if (!HONOR_SIGNED_ZEROS (type))
7086 return true;
7087
7088 /* There is no case that is safe for all rounding modes. */
7089 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7090 return false;
7091
7092 /* In a vector or complex, we would need to check the sign of all zeros. */
7093 if (TREE_CODE (addend) == VECTOR_CST)
7094 addend = uniform_vector_p (addend);
7095 if (!addend || TREE_CODE (addend) != REAL_CST)
7096 return false;
7097
7098 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7099 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7100 negate = !negate;
7101
7102 /* The mode has signed zeros, and we have to honor their sign.
7103 In this situation, there is only one case we can return true for.
7104 X - 0 is the same as X with default rounding. */
7105 return negate;
7106 }
7107
7108 /* Subroutine of match.pd that optimizes comparisons of a division by
7109 a nonzero integer constant against an integer constant, i.e.
7110 X/C1 op C2.
7111
7112 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7113 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7114
7115 enum tree_code
7116 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7117 tree *hi, bool *neg_overflow)
7118 {
7119 tree prod, tmp, type = TREE_TYPE (c1);
7120 signop sign = TYPE_SIGN (type);
7121 wi::overflow_type overflow;
7122
7123 /* We have to do this the hard way to detect unsigned overflow.
7124 prod = int_const_binop (MULT_EXPR, c1, c2); */
7125 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7126 prod = force_fit_type (type, val, -1, overflow);
7127 *neg_overflow = false;
7128
7129 if (sign == UNSIGNED)
7130 {
7131 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7132 *lo = prod;
7133
7134 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7135 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7136 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7137 }
7138 else if (tree_int_cst_sgn (c1) >= 0)
7139 {
7140 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7141 switch (tree_int_cst_sgn (c2))
7142 {
7143 case -1:
7144 *neg_overflow = true;
7145 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7146 *hi = prod;
7147 break;
7148
7149 case 0:
7150 *lo = fold_negate_const (tmp, type);
7151 *hi = tmp;
7152 break;
7153
7154 case 1:
7155 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7156 *lo = prod;
7157 break;
7158
7159 default:
7160 gcc_unreachable ();
7161 }
7162 }
7163 else
7164 {
7165 /* A negative divisor reverses the relational operators. */
7166 code = swap_tree_comparison (code);
7167
7168 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7169 switch (tree_int_cst_sgn (c2))
7170 {
7171 case -1:
7172 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7173 *lo = prod;
7174 break;
7175
7176 case 0:
7177 *hi = fold_negate_const (tmp, type);
7178 *lo = tmp;
7179 break;
7180
7181 case 1:
7182 *neg_overflow = true;
7183 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7184 *hi = prod;
7185 break;
7186
7187 default:
7188 gcc_unreachable ();
7189 }
7190 }
7191
7192 if (code != EQ_EXPR && code != NE_EXPR)
7193 return code;
7194
7195 if (TREE_OVERFLOW (*lo)
7196 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7197 *lo = NULL_TREE;
7198 if (TREE_OVERFLOW (*hi)
7199 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7200 *hi = NULL_TREE;
7201
7202 return code;
7203 }
7204
7205
7206 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7207 equality/inequality test, then return a simplified form of the test
7208 using a sign testing. Otherwise return NULL. TYPE is the desired
7209 result type. */
7210
7211 static tree
7212 fold_single_bit_test_into_sign_test (location_t loc,
7213 enum tree_code code, tree arg0, tree arg1,
7214 tree result_type)
7215 {
7216 /* If this is testing a single bit, we can optimize the test. */
7217 if ((code == NE_EXPR || code == EQ_EXPR)
7218 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7219 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7220 {
7221 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7222 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7223 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7224
7225 if (arg00 != NULL_TREE
7226 /* This is only a win if casting to a signed type is cheap,
7227 i.e. when arg00's type is not a partial mode. */
7228 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7229 {
7230 tree stype = signed_type_for (TREE_TYPE (arg00));
7231 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7232 result_type,
7233 fold_convert_loc (loc, stype, arg00),
7234 build_int_cst (stype, 0));
7235 }
7236 }
7237
7238 return NULL_TREE;
7239 }
7240
7241 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7242 equality/inequality test, then return a simplified form of
7243 the test using shifts and logical operations. Otherwise return
7244 NULL. TYPE is the desired result type. */
7245
7246 tree
7247 fold_single_bit_test (location_t loc, enum tree_code code,
7248 tree arg0, tree arg1, tree result_type)
7249 {
7250 /* If this is testing a single bit, we can optimize the test. */
7251 if ((code == NE_EXPR || code == EQ_EXPR)
7252 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7253 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7254 {
7255 tree inner = TREE_OPERAND (arg0, 0);
7256 tree type = TREE_TYPE (arg0);
7257 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7258 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7259 int ops_unsigned;
7260 tree signed_type, unsigned_type, intermediate_type;
7261 tree tem, one;
7262
7263 /* First, see if we can fold the single bit test into a sign-bit
7264 test. */
7265 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7266 result_type);
7267 if (tem)
7268 return tem;
7269
7270 /* Otherwise we have (A & C) != 0 where C is a single bit,
7271 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7272 Similarly for (A & C) == 0. */
7273
7274 /* If INNER is a right shift of a constant and it plus BITNUM does
7275 not overflow, adjust BITNUM and INNER. */
7276 if (TREE_CODE (inner) == RSHIFT_EXPR
7277 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7278 && bitnum < TYPE_PRECISION (type)
7279 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7280 TYPE_PRECISION (type) - bitnum))
7281 {
7282 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7283 inner = TREE_OPERAND (inner, 0);
7284 }
7285
7286 /* If we are going to be able to omit the AND below, we must do our
7287 operations as unsigned. If we must use the AND, we have a choice.
7288 Normally unsigned is faster, but for some machines signed is. */
7289 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7290 && !flag_syntax_only) ? 0 : 1;
7291
7292 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7293 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7294 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7295 inner = fold_convert_loc (loc, intermediate_type, inner);
7296
7297 if (bitnum != 0)
7298 inner = build2 (RSHIFT_EXPR, intermediate_type,
7299 inner, size_int (bitnum));
7300
7301 one = build_int_cst (intermediate_type, 1);
7302
7303 if (code == EQ_EXPR)
7304 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7305
7306 /* Put the AND last so it can combine with more things. */
7307 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7308
7309 /* Make sure to return the proper type. */
7310 inner = fold_convert_loc (loc, result_type, inner);
7311
7312 return inner;
7313 }
7314 return NULL_TREE;
7315 }
7316
7317 /* Test whether it is preferable two swap two operands, ARG0 and
7318 ARG1, for example because ARG0 is an integer constant and ARG1
7319 isn't. */
7320
7321 bool
7322 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7323 {
7324 if (CONSTANT_CLASS_P (arg1))
7325 return 0;
7326 if (CONSTANT_CLASS_P (arg0))
7327 return 1;
7328
7329 STRIP_NOPS (arg0);
7330 STRIP_NOPS (arg1);
7331
7332 if (TREE_CONSTANT (arg1))
7333 return 0;
7334 if (TREE_CONSTANT (arg0))
7335 return 1;
7336
7337 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7338 for commutative and comparison operators. Ensuring a canonical
7339 form allows the optimizers to find additional redundancies without
7340 having to explicitly check for both orderings. */
7341 if (TREE_CODE (arg0) == SSA_NAME
7342 && TREE_CODE (arg1) == SSA_NAME
7343 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7344 return 1;
7345
7346 /* Put SSA_NAMEs last. */
7347 if (TREE_CODE (arg1) == SSA_NAME)
7348 return 0;
7349 if (TREE_CODE (arg0) == SSA_NAME)
7350 return 1;
7351
7352 /* Put variables last. */
7353 if (DECL_P (arg1))
7354 return 0;
7355 if (DECL_P (arg0))
7356 return 1;
7357
7358 return 0;
7359 }
7360
7361
7362 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7363 means A >= Y && A != MAX, but in this case we know that
7364 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7365
7366 static tree
7367 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7368 {
7369 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7370
7371 if (TREE_CODE (bound) == LT_EXPR)
7372 a = TREE_OPERAND (bound, 0);
7373 else if (TREE_CODE (bound) == GT_EXPR)
7374 a = TREE_OPERAND (bound, 1);
7375 else
7376 return NULL_TREE;
7377
7378 typea = TREE_TYPE (a);
7379 if (!INTEGRAL_TYPE_P (typea)
7380 && !POINTER_TYPE_P (typea))
7381 return NULL_TREE;
7382
7383 if (TREE_CODE (ineq) == LT_EXPR)
7384 {
7385 a1 = TREE_OPERAND (ineq, 1);
7386 y = TREE_OPERAND (ineq, 0);
7387 }
7388 else if (TREE_CODE (ineq) == GT_EXPR)
7389 {
7390 a1 = TREE_OPERAND (ineq, 0);
7391 y = TREE_OPERAND (ineq, 1);
7392 }
7393 else
7394 return NULL_TREE;
7395
7396 if (TREE_TYPE (a1) != typea)
7397 return NULL_TREE;
7398
7399 if (POINTER_TYPE_P (typea))
7400 {
7401 /* Convert the pointer types into integer before taking the difference. */
7402 tree ta = fold_convert_loc (loc, ssizetype, a);
7403 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7404 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7405 }
7406 else
7407 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7408
7409 if (!diff || !integer_onep (diff))
7410 return NULL_TREE;
7411
7412 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7413 }
7414
7415 /* Fold a sum or difference of at least one multiplication.
7416 Returns the folded tree or NULL if no simplification could be made. */
7417
7418 static tree
7419 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7420 tree arg0, tree arg1)
7421 {
7422 tree arg00, arg01, arg10, arg11;
7423 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7424
7425 /* (A * C) +- (B * C) -> (A+-B) * C.
7426 (A * C) +- A -> A * (C+-1).
7427 We are most concerned about the case where C is a constant,
7428 but other combinations show up during loop reduction. Since
7429 it is not difficult, try all four possibilities. */
7430
7431 if (TREE_CODE (arg0) == MULT_EXPR)
7432 {
7433 arg00 = TREE_OPERAND (arg0, 0);
7434 arg01 = TREE_OPERAND (arg0, 1);
7435 }
7436 else if (TREE_CODE (arg0) == INTEGER_CST)
7437 {
7438 arg00 = build_one_cst (type);
7439 arg01 = arg0;
7440 }
7441 else
7442 {
7443 /* We cannot generate constant 1 for fract. */
7444 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7445 return NULL_TREE;
7446 arg00 = arg0;
7447 arg01 = build_one_cst (type);
7448 }
7449 if (TREE_CODE (arg1) == MULT_EXPR)
7450 {
7451 arg10 = TREE_OPERAND (arg1, 0);
7452 arg11 = TREE_OPERAND (arg1, 1);
7453 }
7454 else if (TREE_CODE (arg1) == INTEGER_CST)
7455 {
7456 arg10 = build_one_cst (type);
7457 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7458 the purpose of this canonicalization. */
7459 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7460 && negate_expr_p (arg1)
7461 && code == PLUS_EXPR)
7462 {
7463 arg11 = negate_expr (arg1);
7464 code = MINUS_EXPR;
7465 }
7466 else
7467 arg11 = arg1;
7468 }
7469 else
7470 {
7471 /* We cannot generate constant 1 for fract. */
7472 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7473 return NULL_TREE;
7474 arg10 = arg1;
7475 arg11 = build_one_cst (type);
7476 }
7477 same = NULL_TREE;
7478
7479 /* Prefer factoring a common non-constant. */
7480 if (operand_equal_p (arg00, arg10, 0))
7481 same = arg00, alt0 = arg01, alt1 = arg11;
7482 else if (operand_equal_p (arg01, arg11, 0))
7483 same = arg01, alt0 = arg00, alt1 = arg10;
7484 else if (operand_equal_p (arg00, arg11, 0))
7485 same = arg00, alt0 = arg01, alt1 = arg10;
7486 else if (operand_equal_p (arg01, arg10, 0))
7487 same = arg01, alt0 = arg00, alt1 = arg11;
7488
7489 /* No identical multiplicands; see if we can find a common
7490 power-of-two factor in non-power-of-two multiplies. This
7491 can help in multi-dimensional array access. */
7492 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7493 {
7494 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7495 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7496 HOST_WIDE_INT tmp;
7497 bool swap = false;
7498 tree maybe_same;
7499
7500 /* Move min of absolute values to int11. */
7501 if (absu_hwi (int01) < absu_hwi (int11))
7502 {
7503 tmp = int01, int01 = int11, int11 = tmp;
7504 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7505 maybe_same = arg01;
7506 swap = true;
7507 }
7508 else
7509 maybe_same = arg11;
7510
7511 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7512 if (factor > 1
7513 && pow2p_hwi (factor)
7514 && (int01 & (factor - 1)) == 0
7515 /* The remainder should not be a constant, otherwise we
7516 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7517 increased the number of multiplications necessary. */
7518 && TREE_CODE (arg10) != INTEGER_CST)
7519 {
7520 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7521 build_int_cst (TREE_TYPE (arg00),
7522 int01 / int11));
7523 alt1 = arg10;
7524 same = maybe_same;
7525 if (swap)
7526 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7527 }
7528 }
7529
7530 if (!same)
7531 return NULL_TREE;
7532
7533 if (! ANY_INTEGRAL_TYPE_P (type)
7534 || TYPE_OVERFLOW_WRAPS (type)
7535 /* We are neither factoring zero nor minus one. */
7536 || TREE_CODE (same) == INTEGER_CST)
7537 return fold_build2_loc (loc, MULT_EXPR, type,
7538 fold_build2_loc (loc, code, type,
7539 fold_convert_loc (loc, type, alt0),
7540 fold_convert_loc (loc, type, alt1)),
7541 fold_convert_loc (loc, type, same));
7542
7543 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7544 same may be minus one and thus the multiplication may overflow. Perform
7545 the sum operation in an unsigned type. */
7546 tree utype = unsigned_type_for (type);
7547 tree tem = fold_build2_loc (loc, code, utype,
7548 fold_convert_loc (loc, utype, alt0),
7549 fold_convert_loc (loc, utype, alt1));
7550 /* If the sum evaluated to a constant that is not -INF the multiplication
7551 cannot overflow. */
7552 if (TREE_CODE (tem) == INTEGER_CST
7553 && (wi::to_wide (tem)
7554 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7555 return fold_build2_loc (loc, MULT_EXPR, type,
7556 fold_convert (type, tem), same);
7557
7558 /* Do not resort to unsigned multiplication because
7559 we lose the no-overflow property of the expression. */
7560 return NULL_TREE;
7561 }
7562
7563 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7564 specified by EXPR into the buffer PTR of length LEN bytes.
7565 Return the number of bytes placed in the buffer, or zero
7566 upon failure. */
7567
7568 static int
7569 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7570 {
7571 tree type = TREE_TYPE (expr);
7572 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7573 int byte, offset, word, words;
7574 unsigned char value;
7575
7576 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7577 return 0;
7578 if (off == -1)
7579 off = 0;
7580
7581 if (ptr == NULL)
7582 /* Dry run. */
7583 return MIN (len, total_bytes - off);
7584
7585 words = total_bytes / UNITS_PER_WORD;
7586
7587 for (byte = 0; byte < total_bytes; byte++)
7588 {
7589 int bitpos = byte * BITS_PER_UNIT;
7590 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7591 number of bytes. */
7592 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7593
7594 if (total_bytes > UNITS_PER_WORD)
7595 {
7596 word = byte / UNITS_PER_WORD;
7597 if (WORDS_BIG_ENDIAN)
7598 word = (words - 1) - word;
7599 offset = word * UNITS_PER_WORD;
7600 if (BYTES_BIG_ENDIAN)
7601 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7602 else
7603 offset += byte % UNITS_PER_WORD;
7604 }
7605 else
7606 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7607 if (offset >= off && offset - off < len)
7608 ptr[offset - off] = value;
7609 }
7610 return MIN (len, total_bytes - off);
7611 }
7612
7613
7614 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7615 specified by EXPR into the buffer PTR of length LEN bytes.
7616 Return the number of bytes placed in the buffer, or zero
7617 upon failure. */
7618
7619 static int
7620 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7621 {
7622 tree type = TREE_TYPE (expr);
7623 scalar_mode mode = SCALAR_TYPE_MODE (type);
7624 int total_bytes = GET_MODE_SIZE (mode);
7625 FIXED_VALUE_TYPE value;
7626 tree i_value, i_type;
7627
7628 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7629 return 0;
7630
7631 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7632
7633 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7634 return 0;
7635
7636 value = TREE_FIXED_CST (expr);
7637 i_value = double_int_to_tree (i_type, value.data);
7638
7639 return native_encode_int (i_value, ptr, len, off);
7640 }
7641
7642
7643 /* Subroutine of native_encode_expr. Encode the REAL_CST
7644 specified by EXPR into the buffer PTR of length LEN bytes.
7645 Return the number of bytes placed in the buffer, or zero
7646 upon failure. */
7647
7648 static int
7649 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7650 {
7651 tree type = TREE_TYPE (expr);
7652 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7653 int byte, offset, word, words, bitpos;
7654 unsigned char value;
7655
7656 /* There are always 32 bits in each long, no matter the size of
7657 the hosts long. We handle floating point representations with
7658 up to 192 bits. */
7659 long tmp[6];
7660
7661 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7662 return 0;
7663 if (off == -1)
7664 off = 0;
7665
7666 if (ptr == NULL)
7667 /* Dry run. */
7668 return MIN (len, total_bytes - off);
7669
7670 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7671
7672 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7673
7674 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7675 bitpos += BITS_PER_UNIT)
7676 {
7677 byte = (bitpos / BITS_PER_UNIT) & 3;
7678 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7679
7680 if (UNITS_PER_WORD < 4)
7681 {
7682 word = byte / UNITS_PER_WORD;
7683 if (WORDS_BIG_ENDIAN)
7684 word = (words - 1) - word;
7685 offset = word * UNITS_PER_WORD;
7686 if (BYTES_BIG_ENDIAN)
7687 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7688 else
7689 offset += byte % UNITS_PER_WORD;
7690 }
7691 else
7692 {
7693 offset = byte;
7694 if (BYTES_BIG_ENDIAN)
7695 {
7696 /* Reverse bytes within each long, or within the entire float
7697 if it's smaller than a long (for HFmode). */
7698 offset = MIN (3, total_bytes - 1) - offset;
7699 gcc_assert (offset >= 0);
7700 }
7701 }
7702 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7703 if (offset >= off
7704 && offset - off < len)
7705 ptr[offset - off] = value;
7706 }
7707 return MIN (len, total_bytes - off);
7708 }
7709
7710 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7711 specified by EXPR into the buffer PTR of length LEN bytes.
7712 Return the number of bytes placed in the buffer, or zero
7713 upon failure. */
7714
7715 static int
7716 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7717 {
7718 int rsize, isize;
7719 tree part;
7720
7721 part = TREE_REALPART (expr);
7722 rsize = native_encode_expr (part, ptr, len, off);
7723 if (off == -1 && rsize == 0)
7724 return 0;
7725 part = TREE_IMAGPART (expr);
7726 if (off != -1)
7727 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7728 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7729 len - rsize, off);
7730 if (off == -1 && isize != rsize)
7731 return 0;
7732 return rsize + isize;
7733 }
7734
7735 /* Like native_encode_vector, but only encode the first COUNT elements.
7736 The other arguments are as for native_encode_vector. */
7737
7738 static int
7739 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7740 int off, unsigned HOST_WIDE_INT count)
7741 {
7742 tree itype = TREE_TYPE (TREE_TYPE (expr));
7743 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7744 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7745 {
7746 /* This is the only case in which elements can be smaller than a byte.
7747 Element 0 is always in the lsb of the containing byte. */
7748 unsigned int elt_bits = TYPE_PRECISION (itype);
7749 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7750 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7751 return 0;
7752
7753 if (off == -1)
7754 off = 0;
7755
7756 /* Zero the buffer and then set bits later where necessary. */
7757 int extract_bytes = MIN (len, total_bytes - off);
7758 if (ptr)
7759 memset (ptr, 0, extract_bytes);
7760
7761 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7762 unsigned int first_elt = off * elts_per_byte;
7763 unsigned int extract_elts = extract_bytes * elts_per_byte;
7764 for (unsigned int i = 0; i < extract_elts; ++i)
7765 {
7766 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7767 if (TREE_CODE (elt) != INTEGER_CST)
7768 return 0;
7769
7770 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7771 {
7772 unsigned int bit = i * elt_bits;
7773 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7774 }
7775 }
7776 return extract_bytes;
7777 }
7778
7779 int offset = 0;
7780 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7781 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7782 {
7783 if (off >= size)
7784 {
7785 off -= size;
7786 continue;
7787 }
7788 tree elem = VECTOR_CST_ELT (expr, i);
7789 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7790 len - offset, off);
7791 if ((off == -1 && res != size) || res == 0)
7792 return 0;
7793 offset += res;
7794 if (offset >= len)
7795 return (off == -1 && i < count - 1) ? 0 : offset;
7796 if (off != -1)
7797 off = 0;
7798 }
7799 return offset;
7800 }
7801
7802 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7803 specified by EXPR into the buffer PTR of length LEN bytes.
7804 Return the number of bytes placed in the buffer, or zero
7805 upon failure. */
7806
7807 static int
7808 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7809 {
7810 unsigned HOST_WIDE_INT count;
7811 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7812 return 0;
7813 return native_encode_vector_part (expr, ptr, len, off, count);
7814 }
7815
7816
7817 /* Subroutine of native_encode_expr. Encode the STRING_CST
7818 specified by EXPR into the buffer PTR of length LEN bytes.
7819 Return the number of bytes placed in the buffer, or zero
7820 upon failure. */
7821
7822 static int
7823 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7824 {
7825 tree type = TREE_TYPE (expr);
7826
7827 /* Wide-char strings are encoded in target byte-order so native
7828 encoding them is trivial. */
7829 if (BITS_PER_UNIT != CHAR_BIT
7830 || TREE_CODE (type) != ARRAY_TYPE
7831 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7832 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7833 return 0;
7834
7835 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7836 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7837 return 0;
7838 if (off == -1)
7839 off = 0;
7840 len = MIN (total_bytes - off, len);
7841 if (ptr == NULL)
7842 /* Dry run. */;
7843 else
7844 {
7845 int written = 0;
7846 if (off < TREE_STRING_LENGTH (expr))
7847 {
7848 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7849 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7850 }
7851 memset (ptr + written, 0, len - written);
7852 }
7853 return len;
7854 }
7855
7856
7857 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7858 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7859 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7860 anything, just do a dry run. If OFF is not -1 then start
7861 the encoding at byte offset OFF and encode at most LEN bytes.
7862 Return the number of bytes placed in the buffer, or zero upon failure. */
7863
7864 int
7865 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7866 {
7867 /* We don't support starting at negative offset and -1 is special. */
7868 if (off < -1)
7869 return 0;
7870
7871 switch (TREE_CODE (expr))
7872 {
7873 case INTEGER_CST:
7874 return native_encode_int (expr, ptr, len, off);
7875
7876 case REAL_CST:
7877 return native_encode_real (expr, ptr, len, off);
7878
7879 case FIXED_CST:
7880 return native_encode_fixed (expr, ptr, len, off);
7881
7882 case COMPLEX_CST:
7883 return native_encode_complex (expr, ptr, len, off);
7884
7885 case VECTOR_CST:
7886 return native_encode_vector (expr, ptr, len, off);
7887
7888 case STRING_CST:
7889 return native_encode_string (expr, ptr, len, off);
7890
7891 default:
7892 return 0;
7893 }
7894 }
7895
7896 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7897 NON_LVALUE_EXPRs and nops. */
7898
7899 int
7900 native_encode_initializer (tree init, unsigned char *ptr, int len,
7901 int off)
7902 {
7903 /* We don't support starting at negative offset and -1 is special. */
7904 if (off < -1 || init == NULL_TREE)
7905 return 0;
7906
7907 STRIP_NOPS (init);
7908 switch (TREE_CODE (init))
7909 {
7910 case VIEW_CONVERT_EXPR:
7911 case NON_LVALUE_EXPR:
7912 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off);
7913 default:
7914 return native_encode_expr (init, ptr, len, off);
7915 case CONSTRUCTOR:
7916 tree type = TREE_TYPE (init);
7917 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
7918 if (total_bytes < 0)
7919 return 0;
7920 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7921 return 0;
7922 int o = off == -1 ? 0 : off;
7923 if (TREE_CODE (type) == ARRAY_TYPE)
7924 {
7925 HOST_WIDE_INT min_index;
7926 unsigned HOST_WIDE_INT cnt;
7927 HOST_WIDE_INT curpos = 0, fieldsize;
7928 constructor_elt *ce;
7929
7930 if (TYPE_DOMAIN (type) == NULL_TREE
7931 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
7932 return 0;
7933
7934 fieldsize = int_size_in_bytes (TREE_TYPE (type));
7935 if (fieldsize <= 0)
7936 return 0;
7937
7938 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
7939 if (ptr != NULL)
7940 memset (ptr, '\0', MIN (total_bytes - off, len));
7941
7942 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
7943 {
7944 tree val = ce->value;
7945 tree index = ce->index;
7946 HOST_WIDE_INT pos = curpos, count = 0;
7947 bool full = false;
7948 if (index && TREE_CODE (index) == RANGE_EXPR)
7949 {
7950 if (!tree_fits_shwi_p (TREE_OPERAND (index, 0))
7951 || !tree_fits_shwi_p (TREE_OPERAND (index, 1)))
7952 return 0;
7953 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
7954 * fieldsize;
7955 count = (tree_to_shwi (TREE_OPERAND (index, 1))
7956 - tree_to_shwi (TREE_OPERAND (index, 0)));
7957 }
7958 else if (index)
7959 {
7960 if (!tree_fits_shwi_p (index))
7961 return 0;
7962 pos = (tree_to_shwi (index) - min_index) * fieldsize;
7963 }
7964
7965 curpos = pos;
7966 if (val)
7967 do
7968 {
7969 if (off == -1
7970 || (curpos >= off
7971 && (curpos + fieldsize
7972 <= (HOST_WIDE_INT) off + len)))
7973 {
7974 if (full)
7975 {
7976 if (ptr)
7977 memcpy (ptr + (curpos - o), ptr + (pos - o),
7978 fieldsize);
7979 }
7980 else if (!native_encode_initializer (val,
7981 ptr
7982 ? ptr + curpos - o
7983 : NULL,
7984 fieldsize,
7985 off == -1 ? -1
7986 : 0))
7987 return 0;
7988 else
7989 {
7990 full = true;
7991 pos = curpos;
7992 }
7993 }
7994 else if (curpos + fieldsize > off
7995 && curpos < (HOST_WIDE_INT) off + len)
7996 {
7997 /* Partial overlap. */
7998 unsigned char *p = NULL;
7999 int no = 0;
8000 int l;
8001 if (curpos >= off)
8002 {
8003 if (ptr)
8004 p = ptr + curpos - off;
8005 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8006 fieldsize);
8007 }
8008 else
8009 {
8010 p = ptr;
8011 no = off - curpos;
8012 l = len;
8013 }
8014 if (!native_encode_initializer (val, p, l, no))
8015 return 0;
8016 }
8017 curpos += fieldsize;
8018 }
8019 while (count-- != 0);
8020 }
8021 return MIN (total_bytes - off, len);
8022 }
8023 else if (TREE_CODE (type) == RECORD_TYPE
8024 || TREE_CODE (type) == UNION_TYPE)
8025 {
8026 unsigned HOST_WIDE_INT cnt;
8027 constructor_elt *ce;
8028
8029 if (ptr != NULL)
8030 memset (ptr, '\0', MIN (total_bytes - off, len));
8031 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
8032 {
8033 tree field = ce->index;
8034 tree val = ce->value;
8035 HOST_WIDE_INT pos, fieldsize;
8036
8037 if (field == NULL_TREE)
8038 return 0;
8039
8040 pos = int_byte_position (field);
8041 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8042 continue;
8043
8044 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8045 && TYPE_DOMAIN (TREE_TYPE (field))
8046 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8047 return 0;
8048 if (DECL_SIZE_UNIT (field) == NULL_TREE
8049 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8050 return 0;
8051 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8052 if (fieldsize == 0)
8053 continue;
8054
8055 if (off != -1 && pos + fieldsize <= off)
8056 continue;
8057
8058 if (DECL_BIT_FIELD (field))
8059 return 0;
8060
8061 if (val == NULL_TREE)
8062 continue;
8063
8064 if (off == -1
8065 || (pos >= off
8066 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8067 {
8068 if (!native_encode_initializer (val, ptr ? ptr + pos - o
8069 : NULL,
8070 fieldsize,
8071 off == -1 ? -1 : 0))
8072 return 0;
8073 }
8074 else
8075 {
8076 /* Partial overlap. */
8077 unsigned char *p = NULL;
8078 int no = 0;
8079 int l;
8080 if (pos >= off)
8081 {
8082 if (ptr)
8083 p = ptr + pos - off;
8084 l = MIN ((HOST_WIDE_INT) off + len - pos,
8085 fieldsize);
8086 }
8087 else
8088 {
8089 p = ptr;
8090 no = off - pos;
8091 l = len;
8092 }
8093 if (!native_encode_initializer (val, p, l, no))
8094 return 0;
8095 }
8096 }
8097 return MIN (total_bytes - off, len);
8098 }
8099 return 0;
8100 }
8101 }
8102
8103
8104 /* Subroutine of native_interpret_expr. Interpret the contents of
8105 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8106 If the buffer cannot be interpreted, return NULL_TREE. */
8107
8108 static tree
8109 native_interpret_int (tree type, const unsigned char *ptr, int len)
8110 {
8111 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8112
8113 if (total_bytes > len
8114 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8115 return NULL_TREE;
8116
8117 wide_int result = wi::from_buffer (ptr, total_bytes);
8118
8119 return wide_int_to_tree (type, result);
8120 }
8121
8122
8123 /* Subroutine of native_interpret_expr. Interpret the contents of
8124 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8125 If the buffer cannot be interpreted, return NULL_TREE. */
8126
8127 static tree
8128 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8129 {
8130 scalar_mode mode = SCALAR_TYPE_MODE (type);
8131 int total_bytes = GET_MODE_SIZE (mode);
8132 double_int result;
8133 FIXED_VALUE_TYPE fixed_value;
8134
8135 if (total_bytes > len
8136 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8137 return NULL_TREE;
8138
8139 result = double_int::from_buffer (ptr, total_bytes);
8140 fixed_value = fixed_from_double_int (result, mode);
8141
8142 return build_fixed (type, fixed_value);
8143 }
8144
8145
8146 /* Subroutine of native_interpret_expr. Interpret the contents of
8147 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8148 If the buffer cannot be interpreted, return NULL_TREE. */
8149
8150 static tree
8151 native_interpret_real (tree type, const unsigned char *ptr, int len)
8152 {
8153 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8154 int total_bytes = GET_MODE_SIZE (mode);
8155 unsigned char value;
8156 /* There are always 32 bits in each long, no matter the size of
8157 the hosts long. We handle floating point representations with
8158 up to 192 bits. */
8159 REAL_VALUE_TYPE r;
8160 long tmp[6];
8161
8162 if (total_bytes > len || total_bytes > 24)
8163 return NULL_TREE;
8164 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8165
8166 memset (tmp, 0, sizeof (tmp));
8167 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8168 bitpos += BITS_PER_UNIT)
8169 {
8170 /* Both OFFSET and BYTE index within a long;
8171 bitpos indexes the whole float. */
8172 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8173 if (UNITS_PER_WORD < 4)
8174 {
8175 int word = byte / UNITS_PER_WORD;
8176 if (WORDS_BIG_ENDIAN)
8177 word = (words - 1) - word;
8178 offset = word * UNITS_PER_WORD;
8179 if (BYTES_BIG_ENDIAN)
8180 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8181 else
8182 offset += byte % UNITS_PER_WORD;
8183 }
8184 else
8185 {
8186 offset = byte;
8187 if (BYTES_BIG_ENDIAN)
8188 {
8189 /* Reverse bytes within each long, or within the entire float
8190 if it's smaller than a long (for HFmode). */
8191 offset = MIN (3, total_bytes - 1) - offset;
8192 gcc_assert (offset >= 0);
8193 }
8194 }
8195 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8196
8197 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8198 }
8199
8200 real_from_target (&r, tmp, mode);
8201 return build_real (type, r);
8202 }
8203
8204
8205 /* Subroutine of native_interpret_expr. Interpret the contents of
8206 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8207 If the buffer cannot be interpreted, return NULL_TREE. */
8208
8209 static tree
8210 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8211 {
8212 tree etype, rpart, ipart;
8213 int size;
8214
8215 etype = TREE_TYPE (type);
8216 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8217 if (size * 2 > len)
8218 return NULL_TREE;
8219 rpart = native_interpret_expr (etype, ptr, size);
8220 if (!rpart)
8221 return NULL_TREE;
8222 ipart = native_interpret_expr (etype, ptr+size, size);
8223 if (!ipart)
8224 return NULL_TREE;
8225 return build_complex (type, rpart, ipart);
8226 }
8227
8228 /* Read a vector of type TYPE from the target memory image given by BYTES,
8229 which contains LEN bytes. The vector is known to be encodable using
8230 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8231
8232 Return the vector on success, otherwise return null. */
8233
8234 static tree
8235 native_interpret_vector_part (tree type, const unsigned char *bytes,
8236 unsigned int len, unsigned int npatterns,
8237 unsigned int nelts_per_pattern)
8238 {
8239 tree elt_type = TREE_TYPE (type);
8240 if (VECTOR_BOOLEAN_TYPE_P (type)
8241 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8242 {
8243 /* This is the only case in which elements can be smaller than a byte.
8244 Element 0 is always in the lsb of the containing byte. */
8245 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8246 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8247 return NULL_TREE;
8248
8249 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8250 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8251 {
8252 unsigned int bit_index = i * elt_bits;
8253 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8254 unsigned int lsb = bit_index % BITS_PER_UNIT;
8255 builder.quick_push (bytes[byte_index] & (1 << lsb)
8256 ? build_all_ones_cst (elt_type)
8257 : build_zero_cst (elt_type));
8258 }
8259 return builder.build ();
8260 }
8261
8262 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8263 if (elt_bytes * npatterns * nelts_per_pattern > len)
8264 return NULL_TREE;
8265
8266 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8267 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8268 {
8269 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8270 if (!elt)
8271 return NULL_TREE;
8272 builder.quick_push (elt);
8273 bytes += elt_bytes;
8274 }
8275 return builder.build ();
8276 }
8277
8278 /* Subroutine of native_interpret_expr. Interpret the contents of
8279 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8280 If the buffer cannot be interpreted, return NULL_TREE. */
8281
8282 static tree
8283 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8284 {
8285 tree etype;
8286 unsigned int size;
8287 unsigned HOST_WIDE_INT count;
8288
8289 etype = TREE_TYPE (type);
8290 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8291 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8292 || size * count > len)
8293 return NULL_TREE;
8294
8295 return native_interpret_vector_part (type, ptr, len, count, 1);
8296 }
8297
8298
8299 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8300 the buffer PTR of length LEN as a constant of type TYPE. For
8301 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8302 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8303 return NULL_TREE. */
8304
8305 tree
8306 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8307 {
8308 switch (TREE_CODE (type))
8309 {
8310 case INTEGER_TYPE:
8311 case ENUMERAL_TYPE:
8312 case BOOLEAN_TYPE:
8313 case POINTER_TYPE:
8314 case REFERENCE_TYPE:
8315 return native_interpret_int (type, ptr, len);
8316
8317 case REAL_TYPE:
8318 return native_interpret_real (type, ptr, len);
8319
8320 case FIXED_POINT_TYPE:
8321 return native_interpret_fixed (type, ptr, len);
8322
8323 case COMPLEX_TYPE:
8324 return native_interpret_complex (type, ptr, len);
8325
8326 case VECTOR_TYPE:
8327 return native_interpret_vector (type, ptr, len);
8328
8329 default:
8330 return NULL_TREE;
8331 }
8332 }
8333
8334 /* Returns true if we can interpret the contents of a native encoding
8335 as TYPE. */
8336
8337 bool
8338 can_native_interpret_type_p (tree type)
8339 {
8340 switch (TREE_CODE (type))
8341 {
8342 case INTEGER_TYPE:
8343 case ENUMERAL_TYPE:
8344 case BOOLEAN_TYPE:
8345 case POINTER_TYPE:
8346 case REFERENCE_TYPE:
8347 case FIXED_POINT_TYPE:
8348 case REAL_TYPE:
8349 case COMPLEX_TYPE:
8350 case VECTOR_TYPE:
8351 return true;
8352 default:
8353 return false;
8354 }
8355 }
8356
8357 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8358 or extracted constant positions and/or sizes aren't byte aligned. */
8359
8360 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8361 bits between adjacent elements. AMNT should be within
8362 [0, BITS_PER_UNIT).
8363 Example, AMNT = 2:
8364 00011111|11100000 << 2 = 01111111|10000000
8365 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8366
8367 void
8368 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8369 unsigned int amnt)
8370 {
8371 if (amnt == 0)
8372 return;
8373
8374 unsigned char carry_over = 0U;
8375 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8376 unsigned char clear_mask = (~0U) << amnt;
8377
8378 for (unsigned int i = 0; i < sz; i++)
8379 {
8380 unsigned prev_carry_over = carry_over;
8381 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8382
8383 ptr[i] <<= amnt;
8384 if (i != 0)
8385 {
8386 ptr[i] &= clear_mask;
8387 ptr[i] |= prev_carry_over;
8388 }
8389 }
8390 }
8391
8392 /* Like shift_bytes_in_array_left but for big-endian.
8393 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8394 bits between adjacent elements. AMNT should be within
8395 [0, BITS_PER_UNIT).
8396 Example, AMNT = 2:
8397 00011111|11100000 >> 2 = 00000111|11111000
8398 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8399
8400 void
8401 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
8402 unsigned int amnt)
8403 {
8404 if (amnt == 0)
8405 return;
8406
8407 unsigned char carry_over = 0U;
8408 unsigned char carry_mask = ~(~0U << amnt);
8409
8410 for (unsigned int i = 0; i < sz; i++)
8411 {
8412 unsigned prev_carry_over = carry_over;
8413 carry_over = ptr[i] & carry_mask;
8414
8415 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
8416 ptr[i] >>= amnt;
8417 ptr[i] |= prev_carry_over;
8418 }
8419 }
8420
8421 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8422 directly on the VECTOR_CST encoding, in a way that works for variable-
8423 length vectors. Return the resulting VECTOR_CST on success or null
8424 on failure. */
8425
8426 static tree
8427 fold_view_convert_vector_encoding (tree type, tree expr)
8428 {
8429 tree expr_type = TREE_TYPE (expr);
8430 poly_uint64 type_bits, expr_bits;
8431 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
8432 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
8433 return NULL_TREE;
8434
8435 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
8436 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
8437 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
8438 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
8439
8440 /* We can only preserve the semantics of a stepped pattern if the new
8441 vector element is an integer of the same size. */
8442 if (VECTOR_CST_STEPPED_P (expr)
8443 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
8444 return NULL_TREE;
8445
8446 /* The number of bits needed to encode one element from every pattern
8447 of the original vector. */
8448 unsigned int expr_sequence_bits
8449 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
8450
8451 /* The number of bits needed to encode one element from every pattern
8452 of the result. */
8453 unsigned int type_sequence_bits
8454 = least_common_multiple (expr_sequence_bits, type_elt_bits);
8455
8456 /* Don't try to read more bytes than are available, which can happen
8457 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8458 The general VIEW_CONVERT handling can cope with that case, so there's
8459 no point complicating things here. */
8460 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
8461 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
8462 BITS_PER_UNIT);
8463 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
8464 if (known_gt (buffer_bits, expr_bits))
8465 return NULL_TREE;
8466
8467 /* Get enough bytes of EXPR to form the new encoding. */
8468 auto_vec<unsigned char, 128> buffer (buffer_bytes);
8469 buffer.quick_grow (buffer_bytes);
8470 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
8471 buffer_bits / expr_elt_bits)
8472 != (int) buffer_bytes)
8473 return NULL_TREE;
8474
8475 /* Reencode the bytes as TYPE. */
8476 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
8477 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
8478 type_npatterns, nelts_per_pattern);
8479 }
8480
8481 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8482 TYPE at compile-time. If we're unable to perform the conversion
8483 return NULL_TREE. */
8484
8485 static tree
8486 fold_view_convert_expr (tree type, tree expr)
8487 {
8488 /* We support up to 512-bit values (for V8DFmode). */
8489 unsigned char buffer[64];
8490 int len;
8491
8492 /* Check that the host and target are sane. */
8493 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8494 return NULL_TREE;
8495
8496 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
8497 if (tree res = fold_view_convert_vector_encoding (type, expr))
8498 return res;
8499
8500 len = native_encode_expr (expr, buffer, sizeof (buffer));
8501 if (len == 0)
8502 return NULL_TREE;
8503
8504 return native_interpret_expr (type, buffer, len);
8505 }
8506
8507 /* Build an expression for the address of T. Folds away INDIRECT_REF
8508 to avoid confusing the gimplify process. */
8509
8510 tree
8511 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8512 {
8513 /* The size of the object is not relevant when talking about its address. */
8514 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8515 t = TREE_OPERAND (t, 0);
8516
8517 if (TREE_CODE (t) == INDIRECT_REF)
8518 {
8519 t = TREE_OPERAND (t, 0);
8520
8521 if (TREE_TYPE (t) != ptrtype)
8522 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8523 }
8524 else if (TREE_CODE (t) == MEM_REF
8525 && integer_zerop (TREE_OPERAND (t, 1)))
8526 {
8527 t = TREE_OPERAND (t, 0);
8528
8529 if (TREE_TYPE (t) != ptrtype)
8530 t = fold_convert_loc (loc, ptrtype, t);
8531 }
8532 else if (TREE_CODE (t) == MEM_REF
8533 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8534 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8535 TREE_OPERAND (t, 0),
8536 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8537 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8538 {
8539 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8540
8541 if (TREE_TYPE (t) != ptrtype)
8542 t = fold_convert_loc (loc, ptrtype, t);
8543 }
8544 else
8545 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8546
8547 return t;
8548 }
8549
8550 /* Build an expression for the address of T. */
8551
8552 tree
8553 build_fold_addr_expr_loc (location_t loc, tree t)
8554 {
8555 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8556
8557 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8558 }
8559
8560 /* Fold a unary expression of code CODE and type TYPE with operand
8561 OP0. Return the folded expression if folding is successful.
8562 Otherwise, return NULL_TREE. */
8563
8564 tree
8565 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8566 {
8567 tree tem;
8568 tree arg0;
8569 enum tree_code_class kind = TREE_CODE_CLASS (code);
8570
8571 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8572 && TREE_CODE_LENGTH (code) == 1);
8573
8574 arg0 = op0;
8575 if (arg0)
8576 {
8577 if (CONVERT_EXPR_CODE_P (code)
8578 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8579 {
8580 /* Don't use STRIP_NOPS, because signedness of argument type
8581 matters. */
8582 STRIP_SIGN_NOPS (arg0);
8583 }
8584 else
8585 {
8586 /* Strip any conversions that don't change the mode. This
8587 is safe for every expression, except for a comparison
8588 expression because its signedness is derived from its
8589 operands.
8590
8591 Note that this is done as an internal manipulation within
8592 the constant folder, in order to find the simplest
8593 representation of the arguments so that their form can be
8594 studied. In any cases, the appropriate type conversions
8595 should be put back in the tree that will get out of the
8596 constant folder. */
8597 STRIP_NOPS (arg0);
8598 }
8599
8600 if (CONSTANT_CLASS_P (arg0))
8601 {
8602 tree tem = const_unop (code, type, arg0);
8603 if (tem)
8604 {
8605 if (TREE_TYPE (tem) != type)
8606 tem = fold_convert_loc (loc, type, tem);
8607 return tem;
8608 }
8609 }
8610 }
8611
8612 tem = generic_simplify (loc, code, type, op0);
8613 if (tem)
8614 return tem;
8615
8616 if (TREE_CODE_CLASS (code) == tcc_unary)
8617 {
8618 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8619 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8620 fold_build1_loc (loc, code, type,
8621 fold_convert_loc (loc, TREE_TYPE (op0),
8622 TREE_OPERAND (arg0, 1))));
8623 else if (TREE_CODE (arg0) == COND_EXPR)
8624 {
8625 tree arg01 = TREE_OPERAND (arg0, 1);
8626 tree arg02 = TREE_OPERAND (arg0, 2);
8627 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8628 arg01 = fold_build1_loc (loc, code, type,
8629 fold_convert_loc (loc,
8630 TREE_TYPE (op0), arg01));
8631 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8632 arg02 = fold_build1_loc (loc, code, type,
8633 fold_convert_loc (loc,
8634 TREE_TYPE (op0), arg02));
8635 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8636 arg01, arg02);
8637
8638 /* If this was a conversion, and all we did was to move into
8639 inside the COND_EXPR, bring it back out. But leave it if
8640 it is a conversion from integer to integer and the
8641 result precision is no wider than a word since such a
8642 conversion is cheap and may be optimized away by combine,
8643 while it couldn't if it were outside the COND_EXPR. Then return
8644 so we don't get into an infinite recursion loop taking the
8645 conversion out and then back in. */
8646
8647 if ((CONVERT_EXPR_CODE_P (code)
8648 || code == NON_LVALUE_EXPR)
8649 && TREE_CODE (tem) == COND_EXPR
8650 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8651 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8652 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8653 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8654 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8655 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8656 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8657 && (INTEGRAL_TYPE_P
8658 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8659 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8660 || flag_syntax_only))
8661 tem = build1_loc (loc, code, type,
8662 build3 (COND_EXPR,
8663 TREE_TYPE (TREE_OPERAND
8664 (TREE_OPERAND (tem, 1), 0)),
8665 TREE_OPERAND (tem, 0),
8666 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8667 TREE_OPERAND (TREE_OPERAND (tem, 2),
8668 0)));
8669 return tem;
8670 }
8671 }
8672
8673 switch (code)
8674 {
8675 case NON_LVALUE_EXPR:
8676 if (!maybe_lvalue_p (op0))
8677 return fold_convert_loc (loc, type, op0);
8678 return NULL_TREE;
8679
8680 CASE_CONVERT:
8681 case FLOAT_EXPR:
8682 case FIX_TRUNC_EXPR:
8683 if (COMPARISON_CLASS_P (op0))
8684 {
8685 /* If we have (type) (a CMP b) and type is an integral type, return
8686 new expression involving the new type. Canonicalize
8687 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8688 non-integral type.
8689 Do not fold the result as that would not simplify further, also
8690 folding again results in recursions. */
8691 if (TREE_CODE (type) == BOOLEAN_TYPE)
8692 return build2_loc (loc, TREE_CODE (op0), type,
8693 TREE_OPERAND (op0, 0),
8694 TREE_OPERAND (op0, 1));
8695 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8696 && TREE_CODE (type) != VECTOR_TYPE)
8697 return build3_loc (loc, COND_EXPR, type, op0,
8698 constant_boolean_node (true, type),
8699 constant_boolean_node (false, type));
8700 }
8701
8702 /* Handle (T *)&A.B.C for A being of type T and B and C
8703 living at offset zero. This occurs frequently in
8704 C++ upcasting and then accessing the base. */
8705 if (TREE_CODE (op0) == ADDR_EXPR
8706 && POINTER_TYPE_P (type)
8707 && handled_component_p (TREE_OPERAND (op0, 0)))
8708 {
8709 poly_int64 bitsize, bitpos;
8710 tree offset;
8711 machine_mode mode;
8712 int unsignedp, reversep, volatilep;
8713 tree base
8714 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8715 &offset, &mode, &unsignedp, &reversep,
8716 &volatilep);
8717 /* If the reference was to a (constant) zero offset, we can use
8718 the address of the base if it has the same base type
8719 as the result type and the pointer type is unqualified. */
8720 if (!offset
8721 && known_eq (bitpos, 0)
8722 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8723 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8724 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8725 return fold_convert_loc (loc, type,
8726 build_fold_addr_expr_loc (loc, base));
8727 }
8728
8729 if (TREE_CODE (op0) == MODIFY_EXPR
8730 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8731 /* Detect assigning a bitfield. */
8732 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8733 && DECL_BIT_FIELD
8734 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8735 {
8736 /* Don't leave an assignment inside a conversion
8737 unless assigning a bitfield. */
8738 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8739 /* First do the assignment, then return converted constant. */
8740 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8741 TREE_NO_WARNING (tem) = 1;
8742 TREE_USED (tem) = 1;
8743 return tem;
8744 }
8745
8746 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8747 constants (if x has signed type, the sign bit cannot be set
8748 in c). This folds extension into the BIT_AND_EXPR.
8749 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8750 very likely don't have maximal range for their precision and this
8751 transformation effectively doesn't preserve non-maximal ranges. */
8752 if (TREE_CODE (type) == INTEGER_TYPE
8753 && TREE_CODE (op0) == BIT_AND_EXPR
8754 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8755 {
8756 tree and_expr = op0;
8757 tree and0 = TREE_OPERAND (and_expr, 0);
8758 tree and1 = TREE_OPERAND (and_expr, 1);
8759 int change = 0;
8760
8761 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8762 || (TYPE_PRECISION (type)
8763 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8764 change = 1;
8765 else if (TYPE_PRECISION (TREE_TYPE (and1))
8766 <= HOST_BITS_PER_WIDE_INT
8767 && tree_fits_uhwi_p (and1))
8768 {
8769 unsigned HOST_WIDE_INT cst;
8770
8771 cst = tree_to_uhwi (and1);
8772 cst &= HOST_WIDE_INT_M1U
8773 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8774 change = (cst == 0);
8775 if (change
8776 && !flag_syntax_only
8777 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8778 == ZERO_EXTEND))
8779 {
8780 tree uns = unsigned_type_for (TREE_TYPE (and0));
8781 and0 = fold_convert_loc (loc, uns, and0);
8782 and1 = fold_convert_loc (loc, uns, and1);
8783 }
8784 }
8785 if (change)
8786 {
8787 tem = force_fit_type (type, wi::to_widest (and1), 0,
8788 TREE_OVERFLOW (and1));
8789 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8790 fold_convert_loc (loc, type, and0), tem);
8791 }
8792 }
8793
8794 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8795 cast (T1)X will fold away. We assume that this happens when X itself
8796 is a cast. */
8797 if (POINTER_TYPE_P (type)
8798 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8799 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8800 {
8801 tree arg00 = TREE_OPERAND (arg0, 0);
8802 tree arg01 = TREE_OPERAND (arg0, 1);
8803
8804 return fold_build_pointer_plus_loc
8805 (loc, fold_convert_loc (loc, type, arg00), arg01);
8806 }
8807
8808 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8809 of the same precision, and X is an integer type not narrower than
8810 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8811 if (INTEGRAL_TYPE_P (type)
8812 && TREE_CODE (op0) == BIT_NOT_EXPR
8813 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8814 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8815 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8816 {
8817 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8818 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8819 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8820 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8821 fold_convert_loc (loc, type, tem));
8822 }
8823
8824 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8825 type of X and Y (integer types only). */
8826 if (INTEGRAL_TYPE_P (type)
8827 && TREE_CODE (op0) == MULT_EXPR
8828 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8829 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8830 {
8831 /* Be careful not to introduce new overflows. */
8832 tree mult_type;
8833 if (TYPE_OVERFLOW_WRAPS (type))
8834 mult_type = type;
8835 else
8836 mult_type = unsigned_type_for (type);
8837
8838 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8839 {
8840 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8841 fold_convert_loc (loc, mult_type,
8842 TREE_OPERAND (op0, 0)),
8843 fold_convert_loc (loc, mult_type,
8844 TREE_OPERAND (op0, 1)));
8845 return fold_convert_loc (loc, type, tem);
8846 }
8847 }
8848
8849 return NULL_TREE;
8850
8851 case VIEW_CONVERT_EXPR:
8852 if (TREE_CODE (op0) == MEM_REF)
8853 {
8854 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8855 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8856 tem = fold_build2_loc (loc, MEM_REF, type,
8857 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8858 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8859 return tem;
8860 }
8861
8862 return NULL_TREE;
8863
8864 case NEGATE_EXPR:
8865 tem = fold_negate_expr (loc, arg0);
8866 if (tem)
8867 return fold_convert_loc (loc, type, tem);
8868 return NULL_TREE;
8869
8870 case ABS_EXPR:
8871 /* Convert fabs((double)float) into (double)fabsf(float). */
8872 if (TREE_CODE (arg0) == NOP_EXPR
8873 && TREE_CODE (type) == REAL_TYPE)
8874 {
8875 tree targ0 = strip_float_extensions (arg0);
8876 if (targ0 != arg0)
8877 return fold_convert_loc (loc, type,
8878 fold_build1_loc (loc, ABS_EXPR,
8879 TREE_TYPE (targ0),
8880 targ0));
8881 }
8882 return NULL_TREE;
8883
8884 case BIT_NOT_EXPR:
8885 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8886 if (TREE_CODE (arg0) == BIT_XOR_EXPR
8887 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8888 fold_convert_loc (loc, type,
8889 TREE_OPERAND (arg0, 0)))))
8890 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8891 fold_convert_loc (loc, type,
8892 TREE_OPERAND (arg0, 1)));
8893 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8894 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8895 fold_convert_loc (loc, type,
8896 TREE_OPERAND (arg0, 1)))))
8897 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8898 fold_convert_loc (loc, type,
8899 TREE_OPERAND (arg0, 0)), tem);
8900
8901 return NULL_TREE;
8902
8903 case TRUTH_NOT_EXPR:
8904 /* Note that the operand of this must be an int
8905 and its values must be 0 or 1.
8906 ("true" is a fixed value perhaps depending on the language,
8907 but we don't handle values other than 1 correctly yet.) */
8908 tem = fold_truth_not_expr (loc, arg0);
8909 if (!tem)
8910 return NULL_TREE;
8911 return fold_convert_loc (loc, type, tem);
8912
8913 case INDIRECT_REF:
8914 /* Fold *&X to X if X is an lvalue. */
8915 if (TREE_CODE (op0) == ADDR_EXPR)
8916 {
8917 tree op00 = TREE_OPERAND (op0, 0);
8918 if ((VAR_P (op00)
8919 || TREE_CODE (op00) == PARM_DECL
8920 || TREE_CODE (op00) == RESULT_DECL)
8921 && !TREE_READONLY (op00))
8922 return op00;
8923 }
8924 return NULL_TREE;
8925
8926 default:
8927 return NULL_TREE;
8928 } /* switch (code) */
8929 }
8930
8931
8932 /* If the operation was a conversion do _not_ mark a resulting constant
8933 with TREE_OVERFLOW if the original constant was not. These conversions
8934 have implementation defined behavior and retaining the TREE_OVERFLOW
8935 flag here would confuse later passes such as VRP. */
8936 tree
8937 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8938 tree type, tree op0)
8939 {
8940 tree res = fold_unary_loc (loc, code, type, op0);
8941 if (res
8942 && TREE_CODE (res) == INTEGER_CST
8943 && TREE_CODE (op0) == INTEGER_CST
8944 && CONVERT_EXPR_CODE_P (code))
8945 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8946
8947 return res;
8948 }
8949
8950 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8951 operands OP0 and OP1. LOC is the location of the resulting expression.
8952 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8953 Return the folded expression if folding is successful. Otherwise,
8954 return NULL_TREE. */
8955 static tree
8956 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8957 tree arg0, tree arg1, tree op0, tree op1)
8958 {
8959 tree tem;
8960
8961 /* We only do these simplifications if we are optimizing. */
8962 if (!optimize)
8963 return NULL_TREE;
8964
8965 /* Check for things like (A || B) && (A || C). We can convert this
8966 to A || (B && C). Note that either operator can be any of the four
8967 truth and/or operations and the transformation will still be
8968 valid. Also note that we only care about order for the
8969 ANDIF and ORIF operators. If B contains side effects, this
8970 might change the truth-value of A. */
8971 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8972 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8973 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8974 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8975 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8976 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8977 {
8978 tree a00 = TREE_OPERAND (arg0, 0);
8979 tree a01 = TREE_OPERAND (arg0, 1);
8980 tree a10 = TREE_OPERAND (arg1, 0);
8981 tree a11 = TREE_OPERAND (arg1, 1);
8982 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8983 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8984 && (code == TRUTH_AND_EXPR
8985 || code == TRUTH_OR_EXPR));
8986
8987 if (operand_equal_p (a00, a10, 0))
8988 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8989 fold_build2_loc (loc, code, type, a01, a11));
8990 else if (commutative && operand_equal_p (a00, a11, 0))
8991 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8992 fold_build2_loc (loc, code, type, a01, a10));
8993 else if (commutative && operand_equal_p (a01, a10, 0))
8994 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8995 fold_build2_loc (loc, code, type, a00, a11));
8996
8997 /* This case if tricky because we must either have commutative
8998 operators or else A10 must not have side-effects. */
8999
9000 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9001 && operand_equal_p (a01, a11, 0))
9002 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9003 fold_build2_loc (loc, code, type, a00, a10),
9004 a01);
9005 }
9006
9007 /* See if we can build a range comparison. */
9008 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9009 return tem;
9010
9011 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9012 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9013 {
9014 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9015 if (tem)
9016 return fold_build2_loc (loc, code, type, tem, arg1);
9017 }
9018
9019 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9020 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9021 {
9022 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9023 if (tem)
9024 return fold_build2_loc (loc, code, type, arg0, tem);
9025 }
9026
9027 /* Check for the possibility of merging component references. If our
9028 lhs is another similar operation, try to merge its rhs with our
9029 rhs. Then try to merge our lhs and rhs. */
9030 if (TREE_CODE (arg0) == code
9031 && (tem = fold_truth_andor_1 (loc, code, type,
9032 TREE_OPERAND (arg0, 1), arg1)) != 0)
9033 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9034
9035 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9036 return tem;
9037
9038 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9039 if (param_logical_op_non_short_circuit != -1)
9040 logical_op_non_short_circuit
9041 = param_logical_op_non_short_circuit;
9042 if (logical_op_non_short_circuit
9043 && !flag_sanitize_coverage
9044 && (code == TRUTH_AND_EXPR
9045 || code == TRUTH_ANDIF_EXPR
9046 || code == TRUTH_OR_EXPR
9047 || code == TRUTH_ORIF_EXPR))
9048 {
9049 enum tree_code ncode, icode;
9050
9051 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9052 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9053 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9054
9055 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9056 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9057 We don't want to pack more than two leafs to a non-IF AND/OR
9058 expression.
9059 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9060 equal to IF-CODE, then we don't want to add right-hand operand.
9061 If the inner right-hand side of left-hand operand has
9062 side-effects, or isn't simple, then we can't add to it,
9063 as otherwise we might destroy if-sequence. */
9064 if (TREE_CODE (arg0) == icode
9065 && simple_operand_p_2 (arg1)
9066 /* Needed for sequence points to handle trappings, and
9067 side-effects. */
9068 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9069 {
9070 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9071 arg1);
9072 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9073 tem);
9074 }
9075 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9076 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9077 else if (TREE_CODE (arg1) == icode
9078 && simple_operand_p_2 (arg0)
9079 /* Needed for sequence points to handle trappings, and
9080 side-effects. */
9081 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9082 {
9083 tem = fold_build2_loc (loc, ncode, type,
9084 arg0, TREE_OPERAND (arg1, 0));
9085 return fold_build2_loc (loc, icode, type, tem,
9086 TREE_OPERAND (arg1, 1));
9087 }
9088 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9089 into (A OR B).
9090 For sequence point consistancy, we need to check for trapping,
9091 and side-effects. */
9092 else if (code == icode && simple_operand_p_2 (arg0)
9093 && simple_operand_p_2 (arg1))
9094 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9095 }
9096
9097 return NULL_TREE;
9098 }
9099
9100 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9101 by changing CODE to reduce the magnitude of constants involved in
9102 ARG0 of the comparison.
9103 Returns a canonicalized comparison tree if a simplification was
9104 possible, otherwise returns NULL_TREE.
9105 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9106 valid if signed overflow is undefined. */
9107
9108 static tree
9109 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9110 tree arg0, tree arg1,
9111 bool *strict_overflow_p)
9112 {
9113 enum tree_code code0 = TREE_CODE (arg0);
9114 tree t, cst0 = NULL_TREE;
9115 int sgn0;
9116
9117 /* Match A +- CST code arg1. We can change this only if overflow
9118 is undefined. */
9119 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9120 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9121 /* In principle pointers also have undefined overflow behavior,
9122 but that causes problems elsewhere. */
9123 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9124 && (code0 == MINUS_EXPR
9125 || code0 == PLUS_EXPR)
9126 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9127 return NULL_TREE;
9128
9129 /* Identify the constant in arg0 and its sign. */
9130 cst0 = TREE_OPERAND (arg0, 1);
9131 sgn0 = tree_int_cst_sgn (cst0);
9132
9133 /* Overflowed constants and zero will cause problems. */
9134 if (integer_zerop (cst0)
9135 || TREE_OVERFLOW (cst0))
9136 return NULL_TREE;
9137
9138 /* See if we can reduce the magnitude of the constant in
9139 arg0 by changing the comparison code. */
9140 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9141 if (code == LT_EXPR
9142 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9143 code = LE_EXPR;
9144 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9145 else if (code == GT_EXPR
9146 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9147 code = GE_EXPR;
9148 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9149 else if (code == LE_EXPR
9150 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9151 code = LT_EXPR;
9152 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9153 else if (code == GE_EXPR
9154 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9155 code = GT_EXPR;
9156 else
9157 return NULL_TREE;
9158 *strict_overflow_p = true;
9159
9160 /* Now build the constant reduced in magnitude. But not if that
9161 would produce one outside of its types range. */
9162 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9163 && ((sgn0 == 1
9164 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9165 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9166 || (sgn0 == -1
9167 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9168 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9169 return NULL_TREE;
9170
9171 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9172 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9173 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9174 t = fold_convert (TREE_TYPE (arg1), t);
9175
9176 return fold_build2_loc (loc, code, type, t, arg1);
9177 }
9178
9179 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9180 overflow further. Try to decrease the magnitude of constants involved
9181 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9182 and put sole constants at the second argument position.
9183 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9184
9185 static tree
9186 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9187 tree arg0, tree arg1)
9188 {
9189 tree t;
9190 bool strict_overflow_p;
9191 const char * const warnmsg = G_("assuming signed overflow does not occur "
9192 "when reducing constant in comparison");
9193
9194 /* Try canonicalization by simplifying arg0. */
9195 strict_overflow_p = false;
9196 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9197 &strict_overflow_p);
9198 if (t)
9199 {
9200 if (strict_overflow_p)
9201 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9202 return t;
9203 }
9204
9205 /* Try canonicalization by simplifying arg1 using the swapped
9206 comparison. */
9207 code = swap_tree_comparison (code);
9208 strict_overflow_p = false;
9209 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9210 &strict_overflow_p);
9211 if (t && strict_overflow_p)
9212 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9213 return t;
9214 }
9215
9216 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9217 space. This is used to avoid issuing overflow warnings for
9218 expressions like &p->x which cannot wrap. */
9219
9220 static bool
9221 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9222 {
9223 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9224 return true;
9225
9226 if (maybe_lt (bitpos, 0))
9227 return true;
9228
9229 poly_wide_int wi_offset;
9230 int precision = TYPE_PRECISION (TREE_TYPE (base));
9231 if (offset == NULL_TREE)
9232 wi_offset = wi::zero (precision);
9233 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9234 return true;
9235 else
9236 wi_offset = wi::to_poly_wide (offset);
9237
9238 wi::overflow_type overflow;
9239 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9240 precision);
9241 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9242 if (overflow)
9243 return true;
9244
9245 poly_uint64 total_hwi, size;
9246 if (!total.to_uhwi (&total_hwi)
9247 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9248 &size)
9249 || known_eq (size, 0U))
9250 return true;
9251
9252 if (known_le (total_hwi, size))
9253 return false;
9254
9255 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9256 array. */
9257 if (TREE_CODE (base) == ADDR_EXPR
9258 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9259 &size)
9260 && maybe_ne (size, 0U)
9261 && known_le (total_hwi, size))
9262 return false;
9263
9264 return true;
9265 }
9266
9267 /* Return a positive integer when the symbol DECL is known to have
9268 a nonzero address, zero when it's known not to (e.g., it's a weak
9269 symbol), and a negative integer when the symbol is not yet in the
9270 symbol table and so whether or not its address is zero is unknown.
9271 For function local objects always return positive integer. */
9272 static int
9273 maybe_nonzero_address (tree decl)
9274 {
9275 if (DECL_P (decl) && decl_in_symtab_p (decl))
9276 if (struct symtab_node *symbol = symtab_node::get_create (decl))
9277 return symbol->nonzero_address ();
9278
9279 /* Function local objects are never NULL. */
9280 if (DECL_P (decl)
9281 && (DECL_CONTEXT (decl)
9282 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9283 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9284 return 1;
9285
9286 return -1;
9287 }
9288
9289 /* Subroutine of fold_binary. This routine performs all of the
9290 transformations that are common to the equality/inequality
9291 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9292 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9293 fold_binary should call fold_binary. Fold a comparison with
9294 tree code CODE and type TYPE with operands OP0 and OP1. Return
9295 the folded comparison or NULL_TREE. */
9296
9297 static tree
9298 fold_comparison (location_t loc, enum tree_code code, tree type,
9299 tree op0, tree op1)
9300 {
9301 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9302 tree arg0, arg1, tem;
9303
9304 arg0 = op0;
9305 arg1 = op1;
9306
9307 STRIP_SIGN_NOPS (arg0);
9308 STRIP_SIGN_NOPS (arg1);
9309
9310 /* For comparisons of pointers we can decompose it to a compile time
9311 comparison of the base objects and the offsets into the object.
9312 This requires at least one operand being an ADDR_EXPR or a
9313 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9314 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9315 && (TREE_CODE (arg0) == ADDR_EXPR
9316 || TREE_CODE (arg1) == ADDR_EXPR
9317 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9318 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9319 {
9320 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9321 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9322 machine_mode mode;
9323 int volatilep, reversep, unsignedp;
9324 bool indirect_base0 = false, indirect_base1 = false;
9325
9326 /* Get base and offset for the access. Strip ADDR_EXPR for
9327 get_inner_reference, but put it back by stripping INDIRECT_REF
9328 off the base object if possible. indirect_baseN will be true
9329 if baseN is not an address but refers to the object itself. */
9330 base0 = arg0;
9331 if (TREE_CODE (arg0) == ADDR_EXPR)
9332 {
9333 base0
9334 = get_inner_reference (TREE_OPERAND (arg0, 0),
9335 &bitsize, &bitpos0, &offset0, &mode,
9336 &unsignedp, &reversep, &volatilep);
9337 if (TREE_CODE (base0) == INDIRECT_REF)
9338 base0 = TREE_OPERAND (base0, 0);
9339 else
9340 indirect_base0 = true;
9341 }
9342 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9343 {
9344 base0 = TREE_OPERAND (arg0, 0);
9345 STRIP_SIGN_NOPS (base0);
9346 if (TREE_CODE (base0) == ADDR_EXPR)
9347 {
9348 base0
9349 = get_inner_reference (TREE_OPERAND (base0, 0),
9350 &bitsize, &bitpos0, &offset0, &mode,
9351 &unsignedp, &reversep, &volatilep);
9352 if (TREE_CODE (base0) == INDIRECT_REF)
9353 base0 = TREE_OPERAND (base0, 0);
9354 else
9355 indirect_base0 = true;
9356 }
9357 if (offset0 == NULL_TREE || integer_zerop (offset0))
9358 offset0 = TREE_OPERAND (arg0, 1);
9359 else
9360 offset0 = size_binop (PLUS_EXPR, offset0,
9361 TREE_OPERAND (arg0, 1));
9362 if (poly_int_tree_p (offset0))
9363 {
9364 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9365 TYPE_PRECISION (sizetype));
9366 tem <<= LOG2_BITS_PER_UNIT;
9367 tem += bitpos0;
9368 if (tem.to_shwi (&bitpos0))
9369 offset0 = NULL_TREE;
9370 }
9371 }
9372
9373 base1 = arg1;
9374 if (TREE_CODE (arg1) == ADDR_EXPR)
9375 {
9376 base1
9377 = get_inner_reference (TREE_OPERAND (arg1, 0),
9378 &bitsize, &bitpos1, &offset1, &mode,
9379 &unsignedp, &reversep, &volatilep);
9380 if (TREE_CODE (base1) == INDIRECT_REF)
9381 base1 = TREE_OPERAND (base1, 0);
9382 else
9383 indirect_base1 = true;
9384 }
9385 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9386 {
9387 base1 = TREE_OPERAND (arg1, 0);
9388 STRIP_SIGN_NOPS (base1);
9389 if (TREE_CODE (base1) == ADDR_EXPR)
9390 {
9391 base1
9392 = get_inner_reference (TREE_OPERAND (base1, 0),
9393 &bitsize, &bitpos1, &offset1, &mode,
9394 &unsignedp, &reversep, &volatilep);
9395 if (TREE_CODE (base1) == INDIRECT_REF)
9396 base1 = TREE_OPERAND (base1, 0);
9397 else
9398 indirect_base1 = true;
9399 }
9400 if (offset1 == NULL_TREE || integer_zerop (offset1))
9401 offset1 = TREE_OPERAND (arg1, 1);
9402 else
9403 offset1 = size_binop (PLUS_EXPR, offset1,
9404 TREE_OPERAND (arg1, 1));
9405 if (poly_int_tree_p (offset1))
9406 {
9407 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
9408 TYPE_PRECISION (sizetype));
9409 tem <<= LOG2_BITS_PER_UNIT;
9410 tem += bitpos1;
9411 if (tem.to_shwi (&bitpos1))
9412 offset1 = NULL_TREE;
9413 }
9414 }
9415
9416 /* If we have equivalent bases we might be able to simplify. */
9417 if (indirect_base0 == indirect_base1
9418 && operand_equal_p (base0, base1,
9419 indirect_base0 ? OEP_ADDRESS_OF : 0))
9420 {
9421 /* We can fold this expression to a constant if the non-constant
9422 offset parts are equal. */
9423 if ((offset0 == offset1
9424 || (offset0 && offset1
9425 && operand_equal_p (offset0, offset1, 0)))
9426 && (equality_code
9427 || (indirect_base0
9428 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9429 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9430 {
9431 if (!equality_code
9432 && maybe_ne (bitpos0, bitpos1)
9433 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9434 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9435 fold_overflow_warning (("assuming pointer wraparound does not "
9436 "occur when comparing P +- C1 with "
9437 "P +- C2"),
9438 WARN_STRICT_OVERFLOW_CONDITIONAL);
9439
9440 switch (code)
9441 {
9442 case EQ_EXPR:
9443 if (known_eq (bitpos0, bitpos1))
9444 return constant_boolean_node (true, type);
9445 if (known_ne (bitpos0, bitpos1))
9446 return constant_boolean_node (false, type);
9447 break;
9448 case NE_EXPR:
9449 if (known_ne (bitpos0, bitpos1))
9450 return constant_boolean_node (true, type);
9451 if (known_eq (bitpos0, bitpos1))
9452 return constant_boolean_node (false, type);
9453 break;
9454 case LT_EXPR:
9455 if (known_lt (bitpos0, bitpos1))
9456 return constant_boolean_node (true, type);
9457 if (known_ge (bitpos0, bitpos1))
9458 return constant_boolean_node (false, type);
9459 break;
9460 case LE_EXPR:
9461 if (known_le (bitpos0, bitpos1))
9462 return constant_boolean_node (true, type);
9463 if (known_gt (bitpos0, bitpos1))
9464 return constant_boolean_node (false, type);
9465 break;
9466 case GE_EXPR:
9467 if (known_ge (bitpos0, bitpos1))
9468 return constant_boolean_node (true, type);
9469 if (known_lt (bitpos0, bitpos1))
9470 return constant_boolean_node (false, type);
9471 break;
9472 case GT_EXPR:
9473 if (known_gt (bitpos0, bitpos1))
9474 return constant_boolean_node (true, type);
9475 if (known_le (bitpos0, bitpos1))
9476 return constant_boolean_node (false, type);
9477 break;
9478 default:;
9479 }
9480 }
9481 /* We can simplify the comparison to a comparison of the variable
9482 offset parts if the constant offset parts are equal.
9483 Be careful to use signed sizetype here because otherwise we
9484 mess with array offsets in the wrong way. This is possible
9485 because pointer arithmetic is restricted to retain within an
9486 object and overflow on pointer differences is undefined as of
9487 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9488 else if (known_eq (bitpos0, bitpos1)
9489 && (equality_code
9490 || (indirect_base0
9491 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9492 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9493 {
9494 /* By converting to signed sizetype we cover middle-end pointer
9495 arithmetic which operates on unsigned pointer types of size
9496 type size and ARRAY_REF offsets which are properly sign or
9497 zero extended from their type in case it is narrower than
9498 sizetype. */
9499 if (offset0 == NULL_TREE)
9500 offset0 = build_int_cst (ssizetype, 0);
9501 else
9502 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9503 if (offset1 == NULL_TREE)
9504 offset1 = build_int_cst (ssizetype, 0);
9505 else
9506 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9507
9508 if (!equality_code
9509 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9510 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9511 fold_overflow_warning (("assuming pointer wraparound does not "
9512 "occur when comparing P +- C1 with "
9513 "P +- C2"),
9514 WARN_STRICT_OVERFLOW_COMPARISON);
9515
9516 return fold_build2_loc (loc, code, type, offset0, offset1);
9517 }
9518 }
9519 /* For equal offsets we can simplify to a comparison of the
9520 base addresses. */
9521 else if (known_eq (bitpos0, bitpos1)
9522 && (indirect_base0
9523 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9524 && (indirect_base1
9525 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9526 && ((offset0 == offset1)
9527 || (offset0 && offset1
9528 && operand_equal_p (offset0, offset1, 0))))
9529 {
9530 if (indirect_base0)
9531 base0 = build_fold_addr_expr_loc (loc, base0);
9532 if (indirect_base1)
9533 base1 = build_fold_addr_expr_loc (loc, base1);
9534 return fold_build2_loc (loc, code, type, base0, base1);
9535 }
9536 /* Comparison between an ordinary (non-weak) symbol and a null
9537 pointer can be eliminated since such symbols must have a non
9538 null address. In C, relational expressions between pointers
9539 to objects and null pointers are undefined. The results
9540 below follow the C++ rules with the additional property that
9541 every object pointer compares greater than a null pointer.
9542 */
9543 else if (((DECL_P (base0)
9544 && maybe_nonzero_address (base0) > 0
9545 /* Avoid folding references to struct members at offset 0 to
9546 prevent tests like '&ptr->firstmember == 0' from getting
9547 eliminated. When ptr is null, although the -> expression
9548 is strictly speaking invalid, GCC retains it as a matter
9549 of QoI. See PR c/44555. */
9550 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9551 || CONSTANT_CLASS_P (base0))
9552 && indirect_base0
9553 /* The caller guarantees that when one of the arguments is
9554 constant (i.e., null in this case) it is second. */
9555 && integer_zerop (arg1))
9556 {
9557 switch (code)
9558 {
9559 case EQ_EXPR:
9560 case LE_EXPR:
9561 case LT_EXPR:
9562 return constant_boolean_node (false, type);
9563 case GE_EXPR:
9564 case GT_EXPR:
9565 case NE_EXPR:
9566 return constant_boolean_node (true, type);
9567 default:
9568 gcc_unreachable ();
9569 }
9570 }
9571 }
9572
9573 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9574 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9575 the resulting offset is smaller in absolute value than the
9576 original one and has the same sign. */
9577 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9578 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9579 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9580 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9581 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9582 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9583 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9584 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9585 {
9586 tree const1 = TREE_OPERAND (arg0, 1);
9587 tree const2 = TREE_OPERAND (arg1, 1);
9588 tree variable1 = TREE_OPERAND (arg0, 0);
9589 tree variable2 = TREE_OPERAND (arg1, 0);
9590 tree cst;
9591 const char * const warnmsg = G_("assuming signed overflow does not "
9592 "occur when combining constants around "
9593 "a comparison");
9594
9595 /* Put the constant on the side where it doesn't overflow and is
9596 of lower absolute value and of same sign than before. */
9597 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9598 ? MINUS_EXPR : PLUS_EXPR,
9599 const2, const1);
9600 if (!TREE_OVERFLOW (cst)
9601 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9602 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9603 {
9604 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9605 return fold_build2_loc (loc, code, type,
9606 variable1,
9607 fold_build2_loc (loc, TREE_CODE (arg1),
9608 TREE_TYPE (arg1),
9609 variable2, cst));
9610 }
9611
9612 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9613 ? MINUS_EXPR : PLUS_EXPR,
9614 const1, const2);
9615 if (!TREE_OVERFLOW (cst)
9616 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9617 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9618 {
9619 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9620 return fold_build2_loc (loc, code, type,
9621 fold_build2_loc (loc, TREE_CODE (arg0),
9622 TREE_TYPE (arg0),
9623 variable1, cst),
9624 variable2);
9625 }
9626 }
9627
9628 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9629 if (tem)
9630 return tem;
9631
9632 /* If we are comparing an expression that just has comparisons
9633 of two integer values, arithmetic expressions of those comparisons,
9634 and constants, we can simplify it. There are only three cases
9635 to check: the two values can either be equal, the first can be
9636 greater, or the second can be greater. Fold the expression for
9637 those three values. Since each value must be 0 or 1, we have
9638 eight possibilities, each of which corresponds to the constant 0
9639 or 1 or one of the six possible comparisons.
9640
9641 This handles common cases like (a > b) == 0 but also handles
9642 expressions like ((x > y) - (y > x)) > 0, which supposedly
9643 occur in macroized code. */
9644
9645 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9646 {
9647 tree cval1 = 0, cval2 = 0;
9648
9649 if (twoval_comparison_p (arg0, &cval1, &cval2)
9650 /* Don't handle degenerate cases here; they should already
9651 have been handled anyway. */
9652 && cval1 != 0 && cval2 != 0
9653 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9654 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9655 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9656 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9657 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9658 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9659 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9660 {
9661 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9662 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9663
9664 /* We can't just pass T to eval_subst in case cval1 or cval2
9665 was the same as ARG1. */
9666
9667 tree high_result
9668 = fold_build2_loc (loc, code, type,
9669 eval_subst (loc, arg0, cval1, maxval,
9670 cval2, minval),
9671 arg1);
9672 tree equal_result
9673 = fold_build2_loc (loc, code, type,
9674 eval_subst (loc, arg0, cval1, maxval,
9675 cval2, maxval),
9676 arg1);
9677 tree low_result
9678 = fold_build2_loc (loc, code, type,
9679 eval_subst (loc, arg0, cval1, minval,
9680 cval2, maxval),
9681 arg1);
9682
9683 /* All three of these results should be 0 or 1. Confirm they are.
9684 Then use those values to select the proper code to use. */
9685
9686 if (TREE_CODE (high_result) == INTEGER_CST
9687 && TREE_CODE (equal_result) == INTEGER_CST
9688 && TREE_CODE (low_result) == INTEGER_CST)
9689 {
9690 /* Make a 3-bit mask with the high-order bit being the
9691 value for `>', the next for '=', and the low for '<'. */
9692 switch ((integer_onep (high_result) * 4)
9693 + (integer_onep (equal_result) * 2)
9694 + integer_onep (low_result))
9695 {
9696 case 0:
9697 /* Always false. */
9698 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9699 case 1:
9700 code = LT_EXPR;
9701 break;
9702 case 2:
9703 code = EQ_EXPR;
9704 break;
9705 case 3:
9706 code = LE_EXPR;
9707 break;
9708 case 4:
9709 code = GT_EXPR;
9710 break;
9711 case 5:
9712 code = NE_EXPR;
9713 break;
9714 case 6:
9715 code = GE_EXPR;
9716 break;
9717 case 7:
9718 /* Always true. */
9719 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9720 }
9721
9722 return fold_build2_loc (loc, code, type, cval1, cval2);
9723 }
9724 }
9725 }
9726
9727 return NULL_TREE;
9728 }
9729
9730
9731 /* Subroutine of fold_binary. Optimize complex multiplications of the
9732 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9733 argument EXPR represents the expression "z" of type TYPE. */
9734
9735 static tree
9736 fold_mult_zconjz (location_t loc, tree type, tree expr)
9737 {
9738 tree itype = TREE_TYPE (type);
9739 tree rpart, ipart, tem;
9740
9741 if (TREE_CODE (expr) == COMPLEX_EXPR)
9742 {
9743 rpart = TREE_OPERAND (expr, 0);
9744 ipart = TREE_OPERAND (expr, 1);
9745 }
9746 else if (TREE_CODE (expr) == COMPLEX_CST)
9747 {
9748 rpart = TREE_REALPART (expr);
9749 ipart = TREE_IMAGPART (expr);
9750 }
9751 else
9752 {
9753 expr = save_expr (expr);
9754 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9755 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9756 }
9757
9758 rpart = save_expr (rpart);
9759 ipart = save_expr (ipart);
9760 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9761 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9762 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9763 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9764 build_zero_cst (itype));
9765 }
9766
9767
9768 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9769 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9770 true if successful. */
9771
9772 static bool
9773 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9774 {
9775 unsigned HOST_WIDE_INT i, nunits;
9776
9777 if (TREE_CODE (arg) == VECTOR_CST
9778 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9779 {
9780 for (i = 0; i < nunits; ++i)
9781 elts[i] = VECTOR_CST_ELT (arg, i);
9782 }
9783 else if (TREE_CODE (arg) == CONSTRUCTOR)
9784 {
9785 constructor_elt *elt;
9786
9787 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9788 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9789 return false;
9790 else
9791 elts[i] = elt->value;
9792 }
9793 else
9794 return false;
9795 for (; i < nelts; i++)
9796 elts[i]
9797 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9798 return true;
9799 }
9800
9801 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9802 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9803 NULL_TREE otherwise. */
9804
9805 tree
9806 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9807 {
9808 unsigned int i;
9809 unsigned HOST_WIDE_INT nelts;
9810 bool need_ctor = false;
9811
9812 if (!sel.length ().is_constant (&nelts))
9813 return NULL_TREE;
9814 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9815 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9816 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9817 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9818 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9819 return NULL_TREE;
9820
9821 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9822 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9823 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9824 return NULL_TREE;
9825
9826 tree_vector_builder out_elts (type, nelts, 1);
9827 for (i = 0; i < nelts; i++)
9828 {
9829 HOST_WIDE_INT index;
9830 if (!sel[i].is_constant (&index))
9831 return NULL_TREE;
9832 if (!CONSTANT_CLASS_P (in_elts[index]))
9833 need_ctor = true;
9834 out_elts.quick_push (unshare_expr (in_elts[index]));
9835 }
9836
9837 if (need_ctor)
9838 {
9839 vec<constructor_elt, va_gc> *v;
9840 vec_alloc (v, nelts);
9841 for (i = 0; i < nelts; i++)
9842 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9843 return build_constructor (type, v);
9844 }
9845 else
9846 return out_elts.build ();
9847 }
9848
9849 /* Try to fold a pointer difference of type TYPE two address expressions of
9850 array references AREF0 and AREF1 using location LOC. Return a
9851 simplified expression for the difference or NULL_TREE. */
9852
9853 static tree
9854 fold_addr_of_array_ref_difference (location_t loc, tree type,
9855 tree aref0, tree aref1,
9856 bool use_pointer_diff)
9857 {
9858 tree base0 = TREE_OPERAND (aref0, 0);
9859 tree base1 = TREE_OPERAND (aref1, 0);
9860 tree base_offset = build_int_cst (type, 0);
9861
9862 /* If the bases are array references as well, recurse. If the bases
9863 are pointer indirections compute the difference of the pointers.
9864 If the bases are equal, we are set. */
9865 if ((TREE_CODE (base0) == ARRAY_REF
9866 && TREE_CODE (base1) == ARRAY_REF
9867 && (base_offset
9868 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9869 use_pointer_diff)))
9870 || (INDIRECT_REF_P (base0)
9871 && INDIRECT_REF_P (base1)
9872 && (base_offset
9873 = use_pointer_diff
9874 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9875 TREE_OPERAND (base0, 0),
9876 TREE_OPERAND (base1, 0))
9877 : fold_binary_loc (loc, MINUS_EXPR, type,
9878 fold_convert (type,
9879 TREE_OPERAND (base0, 0)),
9880 fold_convert (type,
9881 TREE_OPERAND (base1, 0)))))
9882 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9883 {
9884 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9885 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9886 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9887 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9888 return fold_build2_loc (loc, PLUS_EXPR, type,
9889 base_offset,
9890 fold_build2_loc (loc, MULT_EXPR, type,
9891 diff, esz));
9892 }
9893 return NULL_TREE;
9894 }
9895
9896 /* If the real or vector real constant CST of type TYPE has an exact
9897 inverse, return it, else return NULL. */
9898
9899 tree
9900 exact_inverse (tree type, tree cst)
9901 {
9902 REAL_VALUE_TYPE r;
9903 tree unit_type;
9904 machine_mode mode;
9905
9906 switch (TREE_CODE (cst))
9907 {
9908 case REAL_CST:
9909 r = TREE_REAL_CST (cst);
9910
9911 if (exact_real_inverse (TYPE_MODE (type), &r))
9912 return build_real (type, r);
9913
9914 return NULL_TREE;
9915
9916 case VECTOR_CST:
9917 {
9918 unit_type = TREE_TYPE (type);
9919 mode = TYPE_MODE (unit_type);
9920
9921 tree_vector_builder elts;
9922 if (!elts.new_unary_operation (type, cst, false))
9923 return NULL_TREE;
9924 unsigned int count = elts.encoded_nelts ();
9925 for (unsigned int i = 0; i < count; ++i)
9926 {
9927 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9928 if (!exact_real_inverse (mode, &r))
9929 return NULL_TREE;
9930 elts.quick_push (build_real (unit_type, r));
9931 }
9932
9933 return elts.build ();
9934 }
9935
9936 default:
9937 return NULL_TREE;
9938 }
9939 }
9940
9941 /* Mask out the tz least significant bits of X of type TYPE where
9942 tz is the number of trailing zeroes in Y. */
9943 static wide_int
9944 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9945 {
9946 int tz = wi::ctz (y);
9947 if (tz > 0)
9948 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9949 return x;
9950 }
9951
9952 /* Return true when T is an address and is known to be nonzero.
9953 For floating point we further ensure that T is not denormal.
9954 Similar logic is present in nonzero_address in rtlanal.h.
9955
9956 If the return value is based on the assumption that signed overflow
9957 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9958 change *STRICT_OVERFLOW_P. */
9959
9960 static bool
9961 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9962 {
9963 tree type = TREE_TYPE (t);
9964 enum tree_code code;
9965
9966 /* Doing something useful for floating point would need more work. */
9967 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9968 return false;
9969
9970 code = TREE_CODE (t);
9971 switch (TREE_CODE_CLASS (code))
9972 {
9973 case tcc_unary:
9974 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9975 strict_overflow_p);
9976 case tcc_binary:
9977 case tcc_comparison:
9978 return tree_binary_nonzero_warnv_p (code, type,
9979 TREE_OPERAND (t, 0),
9980 TREE_OPERAND (t, 1),
9981 strict_overflow_p);
9982 case tcc_constant:
9983 case tcc_declaration:
9984 case tcc_reference:
9985 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9986
9987 default:
9988 break;
9989 }
9990
9991 switch (code)
9992 {
9993 case TRUTH_NOT_EXPR:
9994 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9995 strict_overflow_p);
9996
9997 case TRUTH_AND_EXPR:
9998 case TRUTH_OR_EXPR:
9999 case TRUTH_XOR_EXPR:
10000 return tree_binary_nonzero_warnv_p (code, type,
10001 TREE_OPERAND (t, 0),
10002 TREE_OPERAND (t, 1),
10003 strict_overflow_p);
10004
10005 case COND_EXPR:
10006 case CONSTRUCTOR:
10007 case OBJ_TYPE_REF:
10008 case ASSERT_EXPR:
10009 case ADDR_EXPR:
10010 case WITH_SIZE_EXPR:
10011 case SSA_NAME:
10012 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10013
10014 case COMPOUND_EXPR:
10015 case MODIFY_EXPR:
10016 case BIND_EXPR:
10017 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10018 strict_overflow_p);
10019
10020 case SAVE_EXPR:
10021 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10022 strict_overflow_p);
10023
10024 case CALL_EXPR:
10025 {
10026 tree fndecl = get_callee_fndecl (t);
10027 if (!fndecl) return false;
10028 if (flag_delete_null_pointer_checks && !flag_check_new
10029 && DECL_IS_OPERATOR_NEW_P (fndecl)
10030 && !TREE_NOTHROW (fndecl))
10031 return true;
10032 if (flag_delete_null_pointer_checks
10033 && lookup_attribute ("returns_nonnull",
10034 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10035 return true;
10036 return alloca_call_p (t);
10037 }
10038
10039 default:
10040 break;
10041 }
10042 return false;
10043 }
10044
10045 /* Return true when T is an address and is known to be nonzero.
10046 Handle warnings about undefined signed overflow. */
10047
10048 bool
10049 tree_expr_nonzero_p (tree t)
10050 {
10051 bool ret, strict_overflow_p;
10052
10053 strict_overflow_p = false;
10054 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10055 if (strict_overflow_p)
10056 fold_overflow_warning (("assuming signed overflow does not occur when "
10057 "determining that expression is always "
10058 "non-zero"),
10059 WARN_STRICT_OVERFLOW_MISC);
10060 return ret;
10061 }
10062
10063 /* Return true if T is known not to be equal to an integer W. */
10064
10065 bool
10066 expr_not_equal_to (tree t, const wide_int &w)
10067 {
10068 wide_int min, max, nz;
10069 value_range_kind rtype;
10070 switch (TREE_CODE (t))
10071 {
10072 case INTEGER_CST:
10073 return wi::to_wide (t) != w;
10074
10075 case SSA_NAME:
10076 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10077 return false;
10078 rtype = get_range_info (t, &min, &max);
10079 if (rtype == VR_RANGE)
10080 {
10081 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
10082 return true;
10083 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
10084 return true;
10085 }
10086 else if (rtype == VR_ANTI_RANGE
10087 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
10088 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
10089 return true;
10090 /* If T has some known zero bits and W has any of those bits set,
10091 then T is known not to be equal to W. */
10092 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10093 TYPE_PRECISION (TREE_TYPE (t))), 0))
10094 return true;
10095 return false;
10096
10097 default:
10098 return false;
10099 }
10100 }
10101
10102 /* Fold a binary expression of code CODE and type TYPE with operands
10103 OP0 and OP1. LOC is the location of the resulting expression.
10104 Return the folded expression if folding is successful. Otherwise,
10105 return NULL_TREE. */
10106
10107 tree
10108 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10109 tree op0, tree op1)
10110 {
10111 enum tree_code_class kind = TREE_CODE_CLASS (code);
10112 tree arg0, arg1, tem;
10113 tree t1 = NULL_TREE;
10114 bool strict_overflow_p;
10115 unsigned int prec;
10116
10117 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10118 && TREE_CODE_LENGTH (code) == 2
10119 && op0 != NULL_TREE
10120 && op1 != NULL_TREE);
10121
10122 arg0 = op0;
10123 arg1 = op1;
10124
10125 /* Strip any conversions that don't change the mode. This is
10126 safe for every expression, except for a comparison expression
10127 because its signedness is derived from its operands. So, in
10128 the latter case, only strip conversions that don't change the
10129 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10130 preserved.
10131
10132 Note that this is done as an internal manipulation within the
10133 constant folder, in order to find the simplest representation
10134 of the arguments so that their form can be studied. In any
10135 cases, the appropriate type conversions should be put back in
10136 the tree that will get out of the constant folder. */
10137
10138 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10139 {
10140 STRIP_SIGN_NOPS (arg0);
10141 STRIP_SIGN_NOPS (arg1);
10142 }
10143 else
10144 {
10145 STRIP_NOPS (arg0);
10146 STRIP_NOPS (arg1);
10147 }
10148
10149 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10150 constant but we can't do arithmetic on them. */
10151 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10152 {
10153 tem = const_binop (code, type, arg0, arg1);
10154 if (tem != NULL_TREE)
10155 {
10156 if (TREE_TYPE (tem) != type)
10157 tem = fold_convert_loc (loc, type, tem);
10158 return tem;
10159 }
10160 }
10161
10162 /* If this is a commutative operation, and ARG0 is a constant, move it
10163 to ARG1 to reduce the number of tests below. */
10164 if (commutative_tree_code (code)
10165 && tree_swap_operands_p (arg0, arg1))
10166 return fold_build2_loc (loc, code, type, op1, op0);
10167
10168 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10169 to ARG1 to reduce the number of tests below. */
10170 if (kind == tcc_comparison
10171 && tree_swap_operands_p (arg0, arg1))
10172 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10173
10174 tem = generic_simplify (loc, code, type, op0, op1);
10175 if (tem)
10176 return tem;
10177
10178 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10179
10180 First check for cases where an arithmetic operation is applied to a
10181 compound, conditional, or comparison operation. Push the arithmetic
10182 operation inside the compound or conditional to see if any folding
10183 can then be done. Convert comparison to conditional for this purpose.
10184 The also optimizes non-constant cases that used to be done in
10185 expand_expr.
10186
10187 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10188 one of the operands is a comparison and the other is a comparison, a
10189 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10190 code below would make the expression more complex. Change it to a
10191 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10192 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10193
10194 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10195 || code == EQ_EXPR || code == NE_EXPR)
10196 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10197 && ((truth_value_p (TREE_CODE (arg0))
10198 && (truth_value_p (TREE_CODE (arg1))
10199 || (TREE_CODE (arg1) == BIT_AND_EXPR
10200 && integer_onep (TREE_OPERAND (arg1, 1)))))
10201 || (truth_value_p (TREE_CODE (arg1))
10202 && (truth_value_p (TREE_CODE (arg0))
10203 || (TREE_CODE (arg0) == BIT_AND_EXPR
10204 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10205 {
10206 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10207 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10208 : TRUTH_XOR_EXPR,
10209 boolean_type_node,
10210 fold_convert_loc (loc, boolean_type_node, arg0),
10211 fold_convert_loc (loc, boolean_type_node, arg1));
10212
10213 if (code == EQ_EXPR)
10214 tem = invert_truthvalue_loc (loc, tem);
10215
10216 return fold_convert_loc (loc, type, tem);
10217 }
10218
10219 if (TREE_CODE_CLASS (code) == tcc_binary
10220 || TREE_CODE_CLASS (code) == tcc_comparison)
10221 {
10222 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10223 {
10224 tem = fold_build2_loc (loc, code, type,
10225 fold_convert_loc (loc, TREE_TYPE (op0),
10226 TREE_OPERAND (arg0, 1)), op1);
10227 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10228 tem);
10229 }
10230 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10231 {
10232 tem = fold_build2_loc (loc, code, type, op0,
10233 fold_convert_loc (loc, TREE_TYPE (op1),
10234 TREE_OPERAND (arg1, 1)));
10235 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10236 tem);
10237 }
10238
10239 if (TREE_CODE (arg0) == COND_EXPR
10240 || TREE_CODE (arg0) == VEC_COND_EXPR
10241 || COMPARISON_CLASS_P (arg0))
10242 {
10243 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10244 arg0, arg1,
10245 /*cond_first_p=*/1);
10246 if (tem != NULL_TREE)
10247 return tem;
10248 }
10249
10250 if (TREE_CODE (arg1) == COND_EXPR
10251 || TREE_CODE (arg1) == VEC_COND_EXPR
10252 || COMPARISON_CLASS_P (arg1))
10253 {
10254 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10255 arg1, arg0,
10256 /*cond_first_p=*/0);
10257 if (tem != NULL_TREE)
10258 return tem;
10259 }
10260 }
10261
10262 switch (code)
10263 {
10264 case MEM_REF:
10265 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10266 if (TREE_CODE (arg0) == ADDR_EXPR
10267 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10268 {
10269 tree iref = TREE_OPERAND (arg0, 0);
10270 return fold_build2 (MEM_REF, type,
10271 TREE_OPERAND (iref, 0),
10272 int_const_binop (PLUS_EXPR, arg1,
10273 TREE_OPERAND (iref, 1)));
10274 }
10275
10276 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10277 if (TREE_CODE (arg0) == ADDR_EXPR
10278 && handled_component_p (TREE_OPERAND (arg0, 0)))
10279 {
10280 tree base;
10281 poly_int64 coffset;
10282 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10283 &coffset);
10284 if (!base)
10285 return NULL_TREE;
10286 return fold_build2 (MEM_REF, type,
10287 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10288 int_const_binop (PLUS_EXPR, arg1,
10289 size_int (coffset)));
10290 }
10291
10292 return NULL_TREE;
10293
10294 case POINTER_PLUS_EXPR:
10295 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10296 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10297 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10298 return fold_convert_loc (loc, type,
10299 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10300 fold_convert_loc (loc, sizetype,
10301 arg1),
10302 fold_convert_loc (loc, sizetype,
10303 arg0)));
10304
10305 return NULL_TREE;
10306
10307 case PLUS_EXPR:
10308 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10309 {
10310 /* X + (X / CST) * -CST is X % CST. */
10311 if (TREE_CODE (arg1) == MULT_EXPR
10312 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10313 && operand_equal_p (arg0,
10314 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10315 {
10316 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10317 tree cst1 = TREE_OPERAND (arg1, 1);
10318 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10319 cst1, cst0);
10320 if (sum && integer_zerop (sum))
10321 return fold_convert_loc (loc, type,
10322 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10323 TREE_TYPE (arg0), arg0,
10324 cst0));
10325 }
10326 }
10327
10328 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10329 one. Make sure the type is not saturating and has the signedness of
10330 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10331 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10332 if ((TREE_CODE (arg0) == MULT_EXPR
10333 || TREE_CODE (arg1) == MULT_EXPR)
10334 && !TYPE_SATURATING (type)
10335 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10336 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10337 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10338 {
10339 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10340 if (tem)
10341 return tem;
10342 }
10343
10344 if (! FLOAT_TYPE_P (type))
10345 {
10346 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10347 (plus (plus (mult) (mult)) (foo)) so that we can
10348 take advantage of the factoring cases below. */
10349 if (ANY_INTEGRAL_TYPE_P (type)
10350 && TYPE_OVERFLOW_WRAPS (type)
10351 && (((TREE_CODE (arg0) == PLUS_EXPR
10352 || TREE_CODE (arg0) == MINUS_EXPR)
10353 && TREE_CODE (arg1) == MULT_EXPR)
10354 || ((TREE_CODE (arg1) == PLUS_EXPR
10355 || TREE_CODE (arg1) == MINUS_EXPR)
10356 && TREE_CODE (arg0) == MULT_EXPR)))
10357 {
10358 tree parg0, parg1, parg, marg;
10359 enum tree_code pcode;
10360
10361 if (TREE_CODE (arg1) == MULT_EXPR)
10362 parg = arg0, marg = arg1;
10363 else
10364 parg = arg1, marg = arg0;
10365 pcode = TREE_CODE (parg);
10366 parg0 = TREE_OPERAND (parg, 0);
10367 parg1 = TREE_OPERAND (parg, 1);
10368 STRIP_NOPS (parg0);
10369 STRIP_NOPS (parg1);
10370
10371 if (TREE_CODE (parg0) == MULT_EXPR
10372 && TREE_CODE (parg1) != MULT_EXPR)
10373 return fold_build2_loc (loc, pcode, type,
10374 fold_build2_loc (loc, PLUS_EXPR, type,
10375 fold_convert_loc (loc, type,
10376 parg0),
10377 fold_convert_loc (loc, type,
10378 marg)),
10379 fold_convert_loc (loc, type, parg1));
10380 if (TREE_CODE (parg0) != MULT_EXPR
10381 && TREE_CODE (parg1) == MULT_EXPR)
10382 return
10383 fold_build2_loc (loc, PLUS_EXPR, type,
10384 fold_convert_loc (loc, type, parg0),
10385 fold_build2_loc (loc, pcode, type,
10386 fold_convert_loc (loc, type, marg),
10387 fold_convert_loc (loc, type,
10388 parg1)));
10389 }
10390 }
10391 else
10392 {
10393 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10394 to __complex__ ( x, y ). This is not the same for SNaNs or
10395 if signed zeros are involved. */
10396 if (!HONOR_SNANS (element_mode (arg0))
10397 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10398 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10399 {
10400 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10401 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10402 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10403 bool arg0rz = false, arg0iz = false;
10404 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10405 || (arg0i && (arg0iz = real_zerop (arg0i))))
10406 {
10407 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10408 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10409 if (arg0rz && arg1i && real_zerop (arg1i))
10410 {
10411 tree rp = arg1r ? arg1r
10412 : build1 (REALPART_EXPR, rtype, arg1);
10413 tree ip = arg0i ? arg0i
10414 : build1 (IMAGPART_EXPR, rtype, arg0);
10415 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10416 }
10417 else if (arg0iz && arg1r && real_zerop (arg1r))
10418 {
10419 tree rp = arg0r ? arg0r
10420 : build1 (REALPART_EXPR, rtype, arg0);
10421 tree ip = arg1i ? arg1i
10422 : build1 (IMAGPART_EXPR, rtype, arg1);
10423 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10424 }
10425 }
10426 }
10427
10428 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10429 We associate floats only if the user has specified
10430 -fassociative-math. */
10431 if (flag_associative_math
10432 && TREE_CODE (arg1) == PLUS_EXPR
10433 && TREE_CODE (arg0) != MULT_EXPR)
10434 {
10435 tree tree10 = TREE_OPERAND (arg1, 0);
10436 tree tree11 = TREE_OPERAND (arg1, 1);
10437 if (TREE_CODE (tree11) == MULT_EXPR
10438 && TREE_CODE (tree10) == MULT_EXPR)
10439 {
10440 tree tree0;
10441 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10442 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10443 }
10444 }
10445 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10446 We associate floats only if the user has specified
10447 -fassociative-math. */
10448 if (flag_associative_math
10449 && TREE_CODE (arg0) == PLUS_EXPR
10450 && TREE_CODE (arg1) != MULT_EXPR)
10451 {
10452 tree tree00 = TREE_OPERAND (arg0, 0);
10453 tree tree01 = TREE_OPERAND (arg0, 1);
10454 if (TREE_CODE (tree01) == MULT_EXPR
10455 && TREE_CODE (tree00) == MULT_EXPR)
10456 {
10457 tree tree0;
10458 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10459 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10460 }
10461 }
10462 }
10463
10464 bit_rotate:
10465 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10466 is a rotate of A by C1 bits. */
10467 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10468 is a rotate of A by B bits.
10469 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10470 though in this case CODE must be | and not + or ^, otherwise
10471 it doesn't return A when B is 0. */
10472 {
10473 enum tree_code code0, code1;
10474 tree rtype;
10475 code0 = TREE_CODE (arg0);
10476 code1 = TREE_CODE (arg1);
10477 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10478 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10479 && operand_equal_p (TREE_OPERAND (arg0, 0),
10480 TREE_OPERAND (arg1, 0), 0)
10481 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10482 TYPE_UNSIGNED (rtype))
10483 /* Only create rotates in complete modes. Other cases are not
10484 expanded properly. */
10485 && (element_precision (rtype)
10486 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10487 {
10488 tree tree01, tree11;
10489 tree orig_tree01, orig_tree11;
10490 enum tree_code code01, code11;
10491
10492 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10493 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10494 STRIP_NOPS (tree01);
10495 STRIP_NOPS (tree11);
10496 code01 = TREE_CODE (tree01);
10497 code11 = TREE_CODE (tree11);
10498 if (code11 != MINUS_EXPR
10499 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10500 {
10501 std::swap (code0, code1);
10502 std::swap (code01, code11);
10503 std::swap (tree01, tree11);
10504 std::swap (orig_tree01, orig_tree11);
10505 }
10506 if (code01 == INTEGER_CST
10507 && code11 == INTEGER_CST
10508 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10509 == element_precision (rtype)))
10510 {
10511 tem = build2_loc (loc, LROTATE_EXPR,
10512 rtype, TREE_OPERAND (arg0, 0),
10513 code0 == LSHIFT_EXPR
10514 ? orig_tree01 : orig_tree11);
10515 return fold_convert_loc (loc, type, tem);
10516 }
10517 else if (code11 == MINUS_EXPR)
10518 {
10519 tree tree110, tree111;
10520 tree110 = TREE_OPERAND (tree11, 0);
10521 tree111 = TREE_OPERAND (tree11, 1);
10522 STRIP_NOPS (tree110);
10523 STRIP_NOPS (tree111);
10524 if (TREE_CODE (tree110) == INTEGER_CST
10525 && compare_tree_int (tree110,
10526 element_precision (rtype)) == 0
10527 && operand_equal_p (tree01, tree111, 0))
10528 {
10529 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10530 ? LROTATE_EXPR : RROTATE_EXPR),
10531 rtype, TREE_OPERAND (arg0, 0),
10532 orig_tree01);
10533 return fold_convert_loc (loc, type, tem);
10534 }
10535 }
10536 else if (code == BIT_IOR_EXPR
10537 && code11 == BIT_AND_EXPR
10538 && pow2p_hwi (element_precision (rtype)))
10539 {
10540 tree tree110, tree111;
10541 tree110 = TREE_OPERAND (tree11, 0);
10542 tree111 = TREE_OPERAND (tree11, 1);
10543 STRIP_NOPS (tree110);
10544 STRIP_NOPS (tree111);
10545 if (TREE_CODE (tree110) == NEGATE_EXPR
10546 && TREE_CODE (tree111) == INTEGER_CST
10547 && compare_tree_int (tree111,
10548 element_precision (rtype) - 1) == 0
10549 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10550 {
10551 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10552 ? LROTATE_EXPR : RROTATE_EXPR),
10553 rtype, TREE_OPERAND (arg0, 0),
10554 orig_tree01);
10555 return fold_convert_loc (loc, type, tem);
10556 }
10557 }
10558 }
10559 }
10560
10561 associate:
10562 /* In most languages, can't associate operations on floats through
10563 parentheses. Rather than remember where the parentheses were, we
10564 don't associate floats at all, unless the user has specified
10565 -fassociative-math.
10566 And, we need to make sure type is not saturating. */
10567
10568 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10569 && !TYPE_SATURATING (type))
10570 {
10571 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10572 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10573 tree atype = type;
10574 bool ok = true;
10575
10576 /* Split both trees into variables, constants, and literals. Then
10577 associate each group together, the constants with literals,
10578 then the result with variables. This increases the chances of
10579 literals being recombined later and of generating relocatable
10580 expressions for the sum of a constant and literal. */
10581 var0 = split_tree (arg0, type, code,
10582 &minus_var0, &con0, &minus_con0,
10583 &lit0, &minus_lit0, 0);
10584 var1 = split_tree (arg1, type, code,
10585 &minus_var1, &con1, &minus_con1,
10586 &lit1, &minus_lit1, code == MINUS_EXPR);
10587
10588 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10589 if (code == MINUS_EXPR)
10590 code = PLUS_EXPR;
10591
10592 /* With undefined overflow prefer doing association in a type
10593 which wraps on overflow, if that is one of the operand types. */
10594 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10595 && !TYPE_OVERFLOW_WRAPS (type))
10596 {
10597 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10598 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10599 atype = TREE_TYPE (arg0);
10600 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10601 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10602 atype = TREE_TYPE (arg1);
10603 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10604 }
10605
10606 /* With undefined overflow we can only associate constants with one
10607 variable, and constants whose association doesn't overflow. */
10608 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10609 && !TYPE_OVERFLOW_WRAPS (atype))
10610 {
10611 if ((var0 && var1) || (minus_var0 && minus_var1))
10612 {
10613 /* ??? If split_tree would handle NEGATE_EXPR we could
10614 simply reject these cases and the allowed cases would
10615 be the var0/minus_var1 ones. */
10616 tree tmp0 = var0 ? var0 : minus_var0;
10617 tree tmp1 = var1 ? var1 : minus_var1;
10618 bool one_neg = false;
10619
10620 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10621 {
10622 tmp0 = TREE_OPERAND (tmp0, 0);
10623 one_neg = !one_neg;
10624 }
10625 if (CONVERT_EXPR_P (tmp0)
10626 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10627 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10628 <= TYPE_PRECISION (atype)))
10629 tmp0 = TREE_OPERAND (tmp0, 0);
10630 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10631 {
10632 tmp1 = TREE_OPERAND (tmp1, 0);
10633 one_neg = !one_neg;
10634 }
10635 if (CONVERT_EXPR_P (tmp1)
10636 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10637 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10638 <= TYPE_PRECISION (atype)))
10639 tmp1 = TREE_OPERAND (tmp1, 0);
10640 /* The only case we can still associate with two variables
10641 is if they cancel out. */
10642 if (!one_neg
10643 || !operand_equal_p (tmp0, tmp1, 0))
10644 ok = false;
10645 }
10646 else if ((var0 && minus_var1
10647 && ! operand_equal_p (var0, minus_var1, 0))
10648 || (minus_var0 && var1
10649 && ! operand_equal_p (minus_var0, var1, 0)))
10650 ok = false;
10651 }
10652
10653 /* Only do something if we found more than two objects. Otherwise,
10654 nothing has changed and we risk infinite recursion. */
10655 if (ok
10656 && ((var0 != 0) + (var1 != 0)
10657 + (minus_var0 != 0) + (minus_var1 != 0)
10658 + (con0 != 0) + (con1 != 0)
10659 + (minus_con0 != 0) + (minus_con1 != 0)
10660 + (lit0 != 0) + (lit1 != 0)
10661 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10662 {
10663 var0 = associate_trees (loc, var0, var1, code, atype);
10664 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10665 code, atype);
10666 con0 = associate_trees (loc, con0, con1, code, atype);
10667 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10668 code, atype);
10669 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10670 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10671 code, atype);
10672
10673 if (minus_var0 && var0)
10674 {
10675 var0 = associate_trees (loc, var0, minus_var0,
10676 MINUS_EXPR, atype);
10677 minus_var0 = 0;
10678 }
10679 if (minus_con0 && con0)
10680 {
10681 con0 = associate_trees (loc, con0, minus_con0,
10682 MINUS_EXPR, atype);
10683 minus_con0 = 0;
10684 }
10685
10686 /* Preserve the MINUS_EXPR if the negative part of the literal is
10687 greater than the positive part. Otherwise, the multiplicative
10688 folding code (i.e extract_muldiv) may be fooled in case
10689 unsigned constants are subtracted, like in the following
10690 example: ((X*2 + 4) - 8U)/2. */
10691 if (minus_lit0 && lit0)
10692 {
10693 if (TREE_CODE (lit0) == INTEGER_CST
10694 && TREE_CODE (minus_lit0) == INTEGER_CST
10695 && tree_int_cst_lt (lit0, minus_lit0)
10696 /* But avoid ending up with only negated parts. */
10697 && (var0 || con0))
10698 {
10699 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10700 MINUS_EXPR, atype);
10701 lit0 = 0;
10702 }
10703 else
10704 {
10705 lit0 = associate_trees (loc, lit0, minus_lit0,
10706 MINUS_EXPR, atype);
10707 minus_lit0 = 0;
10708 }
10709 }
10710
10711 /* Don't introduce overflows through reassociation. */
10712 if ((lit0 && TREE_OVERFLOW_P (lit0))
10713 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10714 return NULL_TREE;
10715
10716 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10717 con0 = associate_trees (loc, con0, lit0, code, atype);
10718 lit0 = 0;
10719 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10720 code, atype);
10721 minus_lit0 = 0;
10722
10723 /* Eliminate minus_con0. */
10724 if (minus_con0)
10725 {
10726 if (con0)
10727 con0 = associate_trees (loc, con0, minus_con0,
10728 MINUS_EXPR, atype);
10729 else if (var0)
10730 var0 = associate_trees (loc, var0, minus_con0,
10731 MINUS_EXPR, atype);
10732 else
10733 gcc_unreachable ();
10734 minus_con0 = 0;
10735 }
10736
10737 /* Eliminate minus_var0. */
10738 if (minus_var0)
10739 {
10740 if (con0)
10741 con0 = associate_trees (loc, con0, minus_var0,
10742 MINUS_EXPR, atype);
10743 else
10744 gcc_unreachable ();
10745 minus_var0 = 0;
10746 }
10747
10748 return
10749 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10750 code, atype));
10751 }
10752 }
10753
10754 return NULL_TREE;
10755
10756 case POINTER_DIFF_EXPR:
10757 case MINUS_EXPR:
10758 /* Fold &a[i] - &a[j] to i-j. */
10759 if (TREE_CODE (arg0) == ADDR_EXPR
10760 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10761 && TREE_CODE (arg1) == ADDR_EXPR
10762 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10763 {
10764 tree tem = fold_addr_of_array_ref_difference (loc, type,
10765 TREE_OPERAND (arg0, 0),
10766 TREE_OPERAND (arg1, 0),
10767 code
10768 == POINTER_DIFF_EXPR);
10769 if (tem)
10770 return tem;
10771 }
10772
10773 /* Further transformations are not for pointers. */
10774 if (code == POINTER_DIFF_EXPR)
10775 return NULL_TREE;
10776
10777 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10778 if (TREE_CODE (arg0) == NEGATE_EXPR
10779 && negate_expr_p (op1)
10780 /* If arg0 is e.g. unsigned int and type is int, then this could
10781 introduce UB, because if A is INT_MIN at runtime, the original
10782 expression can be well defined while the latter is not.
10783 See PR83269. */
10784 && !(ANY_INTEGRAL_TYPE_P (type)
10785 && TYPE_OVERFLOW_UNDEFINED (type)
10786 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10787 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10788 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10789 fold_convert_loc (loc, type,
10790 TREE_OPERAND (arg0, 0)));
10791
10792 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10793 __complex__ ( x, -y ). This is not the same for SNaNs or if
10794 signed zeros are involved. */
10795 if (!HONOR_SNANS (element_mode (arg0))
10796 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10797 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10798 {
10799 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10800 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10801 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10802 bool arg0rz = false, arg0iz = false;
10803 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10804 || (arg0i && (arg0iz = real_zerop (arg0i))))
10805 {
10806 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10807 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10808 if (arg0rz && arg1i && real_zerop (arg1i))
10809 {
10810 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10811 arg1r ? arg1r
10812 : build1 (REALPART_EXPR, rtype, arg1));
10813 tree ip = arg0i ? arg0i
10814 : build1 (IMAGPART_EXPR, rtype, arg0);
10815 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10816 }
10817 else if (arg0iz && arg1r && real_zerop (arg1r))
10818 {
10819 tree rp = arg0r ? arg0r
10820 : build1 (REALPART_EXPR, rtype, arg0);
10821 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10822 arg1i ? arg1i
10823 : build1 (IMAGPART_EXPR, rtype, arg1));
10824 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10825 }
10826 }
10827 }
10828
10829 /* A - B -> A + (-B) if B is easily negatable. */
10830 if (negate_expr_p (op1)
10831 && ! TYPE_OVERFLOW_SANITIZED (type)
10832 && ((FLOAT_TYPE_P (type)
10833 /* Avoid this transformation if B is a positive REAL_CST. */
10834 && (TREE_CODE (op1) != REAL_CST
10835 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10836 || INTEGRAL_TYPE_P (type)))
10837 return fold_build2_loc (loc, PLUS_EXPR, type,
10838 fold_convert_loc (loc, type, arg0),
10839 negate_expr (op1));
10840
10841 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10842 one. Make sure the type is not saturating and has the signedness of
10843 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10844 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10845 if ((TREE_CODE (arg0) == MULT_EXPR
10846 || TREE_CODE (arg1) == MULT_EXPR)
10847 && !TYPE_SATURATING (type)
10848 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10849 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10850 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10851 {
10852 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10853 if (tem)
10854 return tem;
10855 }
10856
10857 goto associate;
10858
10859 case MULT_EXPR:
10860 if (! FLOAT_TYPE_P (type))
10861 {
10862 /* Transform x * -C into -x * C if x is easily negatable. */
10863 if (TREE_CODE (op1) == INTEGER_CST
10864 && tree_int_cst_sgn (op1) == -1
10865 && negate_expr_p (op0)
10866 && negate_expr_p (op1)
10867 && (tem = negate_expr (op1)) != op1
10868 && ! TREE_OVERFLOW (tem))
10869 return fold_build2_loc (loc, MULT_EXPR, type,
10870 fold_convert_loc (loc, type,
10871 negate_expr (op0)), tem);
10872
10873 strict_overflow_p = false;
10874 if (TREE_CODE (arg1) == INTEGER_CST
10875 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10876 &strict_overflow_p)) != 0)
10877 {
10878 if (strict_overflow_p)
10879 fold_overflow_warning (("assuming signed overflow does not "
10880 "occur when simplifying "
10881 "multiplication"),
10882 WARN_STRICT_OVERFLOW_MISC);
10883 return fold_convert_loc (loc, type, tem);
10884 }
10885
10886 /* Optimize z * conj(z) for integer complex numbers. */
10887 if (TREE_CODE (arg0) == CONJ_EXPR
10888 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10889 return fold_mult_zconjz (loc, type, arg1);
10890 if (TREE_CODE (arg1) == CONJ_EXPR
10891 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10892 return fold_mult_zconjz (loc, type, arg0);
10893 }
10894 else
10895 {
10896 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10897 This is not the same for NaNs or if signed zeros are
10898 involved. */
10899 if (!HONOR_NANS (arg0)
10900 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10901 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10902 && TREE_CODE (arg1) == COMPLEX_CST
10903 && real_zerop (TREE_REALPART (arg1)))
10904 {
10905 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10906 if (real_onep (TREE_IMAGPART (arg1)))
10907 return
10908 fold_build2_loc (loc, COMPLEX_EXPR, type,
10909 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10910 rtype, arg0)),
10911 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10912 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10913 return
10914 fold_build2_loc (loc, COMPLEX_EXPR, type,
10915 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10916 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10917 rtype, arg0)));
10918 }
10919
10920 /* Optimize z * conj(z) for floating point complex numbers.
10921 Guarded by flag_unsafe_math_optimizations as non-finite
10922 imaginary components don't produce scalar results. */
10923 if (flag_unsafe_math_optimizations
10924 && TREE_CODE (arg0) == CONJ_EXPR
10925 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10926 return fold_mult_zconjz (loc, type, arg1);
10927 if (flag_unsafe_math_optimizations
10928 && TREE_CODE (arg1) == CONJ_EXPR
10929 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10930 return fold_mult_zconjz (loc, type, arg0);
10931 }
10932 goto associate;
10933
10934 case BIT_IOR_EXPR:
10935 /* Canonicalize (X & C1) | C2. */
10936 if (TREE_CODE (arg0) == BIT_AND_EXPR
10937 && TREE_CODE (arg1) == INTEGER_CST
10938 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10939 {
10940 int width = TYPE_PRECISION (type), w;
10941 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10942 wide_int c2 = wi::to_wide (arg1);
10943
10944 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10945 if ((c1 & c2) == c1)
10946 return omit_one_operand_loc (loc, type, arg1,
10947 TREE_OPERAND (arg0, 0));
10948
10949 wide_int msk = wi::mask (width, false,
10950 TYPE_PRECISION (TREE_TYPE (arg1)));
10951
10952 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10953 if (wi::bit_and_not (msk, c1 | c2) == 0)
10954 {
10955 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10956 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10957 }
10958
10959 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10960 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10961 mode which allows further optimizations. */
10962 c1 &= msk;
10963 c2 &= msk;
10964 wide_int c3 = wi::bit_and_not (c1, c2);
10965 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10966 {
10967 wide_int mask = wi::mask (w, false,
10968 TYPE_PRECISION (type));
10969 if (((c1 | c2) & mask) == mask
10970 && wi::bit_and_not (c1, mask) == 0)
10971 {
10972 c3 = mask;
10973 break;
10974 }
10975 }
10976
10977 if (c3 != c1)
10978 {
10979 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10980 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10981 wide_int_to_tree (type, c3));
10982 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10983 }
10984 }
10985
10986 /* See if this can be simplified into a rotate first. If that
10987 is unsuccessful continue in the association code. */
10988 goto bit_rotate;
10989
10990 case BIT_XOR_EXPR:
10991 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10992 if (TREE_CODE (arg0) == BIT_AND_EXPR
10993 && INTEGRAL_TYPE_P (type)
10994 && integer_onep (TREE_OPERAND (arg0, 1))
10995 && integer_onep (arg1))
10996 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10997 build_zero_cst (TREE_TYPE (arg0)));
10998
10999 /* See if this can be simplified into a rotate first. If that
11000 is unsuccessful continue in the association code. */
11001 goto bit_rotate;
11002
11003 case BIT_AND_EXPR:
11004 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11005 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11006 && INTEGRAL_TYPE_P (type)
11007 && integer_onep (TREE_OPERAND (arg0, 1))
11008 && integer_onep (arg1))
11009 {
11010 tree tem2;
11011 tem = TREE_OPERAND (arg0, 0);
11012 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11013 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11014 tem, tem2);
11015 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11016 build_zero_cst (TREE_TYPE (tem)));
11017 }
11018 /* Fold ~X & 1 as (X & 1) == 0. */
11019 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11020 && INTEGRAL_TYPE_P (type)
11021 && integer_onep (arg1))
11022 {
11023 tree tem2;
11024 tem = TREE_OPERAND (arg0, 0);
11025 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11026 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11027 tem, tem2);
11028 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11029 build_zero_cst (TREE_TYPE (tem)));
11030 }
11031 /* Fold !X & 1 as X == 0. */
11032 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11033 && integer_onep (arg1))
11034 {
11035 tem = TREE_OPERAND (arg0, 0);
11036 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11037 build_zero_cst (TREE_TYPE (tem)));
11038 }
11039
11040 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11041 multiple of 1 << CST. */
11042 if (TREE_CODE (arg1) == INTEGER_CST)
11043 {
11044 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11045 wide_int ncst1 = -cst1;
11046 if ((cst1 & ncst1) == ncst1
11047 && multiple_of_p (type, arg0,
11048 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11049 return fold_convert_loc (loc, type, arg0);
11050 }
11051
11052 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11053 bits from CST2. */
11054 if (TREE_CODE (arg1) == INTEGER_CST
11055 && TREE_CODE (arg0) == MULT_EXPR
11056 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11057 {
11058 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11059 wide_int masked
11060 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11061
11062 if (masked == 0)
11063 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11064 arg0, arg1);
11065 else if (masked != warg1)
11066 {
11067 /* Avoid the transform if arg1 is a mask of some
11068 mode which allows further optimizations. */
11069 int pop = wi::popcount (warg1);
11070 if (!(pop >= BITS_PER_UNIT
11071 && pow2p_hwi (pop)
11072 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11073 return fold_build2_loc (loc, code, type, op0,
11074 wide_int_to_tree (type, masked));
11075 }
11076 }
11077
11078 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11079 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11080 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11081 {
11082 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11083
11084 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11085 if (mask == -1)
11086 return
11087 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11088 }
11089
11090 goto associate;
11091
11092 case RDIV_EXPR:
11093 /* Don't touch a floating-point divide by zero unless the mode
11094 of the constant can represent infinity. */
11095 if (TREE_CODE (arg1) == REAL_CST
11096 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11097 && real_zerop (arg1))
11098 return NULL_TREE;
11099
11100 /* (-A) / (-B) -> A / B */
11101 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11102 return fold_build2_loc (loc, RDIV_EXPR, type,
11103 TREE_OPERAND (arg0, 0),
11104 negate_expr (arg1));
11105 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11106 return fold_build2_loc (loc, RDIV_EXPR, type,
11107 negate_expr (arg0),
11108 TREE_OPERAND (arg1, 0));
11109 return NULL_TREE;
11110
11111 case TRUNC_DIV_EXPR:
11112 /* Fall through */
11113
11114 case FLOOR_DIV_EXPR:
11115 /* Simplify A / (B << N) where A and B are positive and B is
11116 a power of 2, to A >> (N + log2(B)). */
11117 strict_overflow_p = false;
11118 if (TREE_CODE (arg1) == LSHIFT_EXPR
11119 && (TYPE_UNSIGNED (type)
11120 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11121 {
11122 tree sval = TREE_OPERAND (arg1, 0);
11123 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11124 {
11125 tree sh_cnt = TREE_OPERAND (arg1, 1);
11126 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11127 wi::exact_log2 (wi::to_wide (sval)));
11128
11129 if (strict_overflow_p)
11130 fold_overflow_warning (("assuming signed overflow does not "
11131 "occur when simplifying A / (B << N)"),
11132 WARN_STRICT_OVERFLOW_MISC);
11133
11134 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11135 sh_cnt, pow2);
11136 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11137 fold_convert_loc (loc, type, arg0), sh_cnt);
11138 }
11139 }
11140
11141 /* Fall through */
11142
11143 case ROUND_DIV_EXPR:
11144 case CEIL_DIV_EXPR:
11145 case EXACT_DIV_EXPR:
11146 if (integer_zerop (arg1))
11147 return NULL_TREE;
11148
11149 /* Convert -A / -B to A / B when the type is signed and overflow is
11150 undefined. */
11151 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11152 && TREE_CODE (op0) == NEGATE_EXPR
11153 && negate_expr_p (op1))
11154 {
11155 if (ANY_INTEGRAL_TYPE_P (type))
11156 fold_overflow_warning (("assuming signed overflow does not occur "
11157 "when distributing negation across "
11158 "division"),
11159 WARN_STRICT_OVERFLOW_MISC);
11160 return fold_build2_loc (loc, code, type,
11161 fold_convert_loc (loc, type,
11162 TREE_OPERAND (arg0, 0)),
11163 negate_expr (op1));
11164 }
11165 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11166 && TREE_CODE (arg1) == NEGATE_EXPR
11167 && negate_expr_p (op0))
11168 {
11169 if (ANY_INTEGRAL_TYPE_P (type))
11170 fold_overflow_warning (("assuming signed overflow does not occur "
11171 "when distributing negation across "
11172 "division"),
11173 WARN_STRICT_OVERFLOW_MISC);
11174 return fold_build2_loc (loc, code, type,
11175 negate_expr (op0),
11176 fold_convert_loc (loc, type,
11177 TREE_OPERAND (arg1, 0)));
11178 }
11179
11180 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11181 operation, EXACT_DIV_EXPR.
11182
11183 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11184 At one time others generated faster code, it's not clear if they do
11185 after the last round to changes to the DIV code in expmed.c. */
11186 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11187 && multiple_of_p (type, arg0, arg1))
11188 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11189 fold_convert (type, arg0),
11190 fold_convert (type, arg1));
11191
11192 strict_overflow_p = false;
11193 if (TREE_CODE (arg1) == INTEGER_CST
11194 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11195 &strict_overflow_p)) != 0)
11196 {
11197 if (strict_overflow_p)
11198 fold_overflow_warning (("assuming signed overflow does not occur "
11199 "when simplifying division"),
11200 WARN_STRICT_OVERFLOW_MISC);
11201 return fold_convert_loc (loc, type, tem);
11202 }
11203
11204 return NULL_TREE;
11205
11206 case CEIL_MOD_EXPR:
11207 case FLOOR_MOD_EXPR:
11208 case ROUND_MOD_EXPR:
11209 case TRUNC_MOD_EXPR:
11210 strict_overflow_p = false;
11211 if (TREE_CODE (arg1) == INTEGER_CST
11212 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11213 &strict_overflow_p)) != 0)
11214 {
11215 if (strict_overflow_p)
11216 fold_overflow_warning (("assuming signed overflow does not occur "
11217 "when simplifying modulus"),
11218 WARN_STRICT_OVERFLOW_MISC);
11219 return fold_convert_loc (loc, type, tem);
11220 }
11221
11222 return NULL_TREE;
11223
11224 case LROTATE_EXPR:
11225 case RROTATE_EXPR:
11226 case RSHIFT_EXPR:
11227 case LSHIFT_EXPR:
11228 /* Since negative shift count is not well-defined,
11229 don't try to compute it in the compiler. */
11230 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11231 return NULL_TREE;
11232
11233 prec = element_precision (type);
11234
11235 /* If we have a rotate of a bit operation with the rotate count and
11236 the second operand of the bit operation both constant,
11237 permute the two operations. */
11238 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11239 && (TREE_CODE (arg0) == BIT_AND_EXPR
11240 || TREE_CODE (arg0) == BIT_IOR_EXPR
11241 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11242 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11243 {
11244 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11245 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11246 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11247 fold_build2_loc (loc, code, type,
11248 arg00, arg1),
11249 fold_build2_loc (loc, code, type,
11250 arg01, arg1));
11251 }
11252
11253 /* Two consecutive rotates adding up to the some integer
11254 multiple of the precision of the type can be ignored. */
11255 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11256 && TREE_CODE (arg0) == RROTATE_EXPR
11257 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11258 && wi::umod_trunc (wi::to_wide (arg1)
11259 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11260 prec) == 0)
11261 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11262
11263 return NULL_TREE;
11264
11265 case MIN_EXPR:
11266 case MAX_EXPR:
11267 goto associate;
11268
11269 case TRUTH_ANDIF_EXPR:
11270 /* Note that the operands of this must be ints
11271 and their values must be 0 or 1.
11272 ("true" is a fixed value perhaps depending on the language.) */
11273 /* If first arg is constant zero, return it. */
11274 if (integer_zerop (arg0))
11275 return fold_convert_loc (loc, type, arg0);
11276 /* FALLTHRU */
11277 case TRUTH_AND_EXPR:
11278 /* If either arg is constant true, drop it. */
11279 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11280 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11281 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11282 /* Preserve sequence points. */
11283 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11284 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11285 /* If second arg is constant zero, result is zero, but first arg
11286 must be evaluated. */
11287 if (integer_zerop (arg1))
11288 return omit_one_operand_loc (loc, type, arg1, arg0);
11289 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11290 case will be handled here. */
11291 if (integer_zerop (arg0))
11292 return omit_one_operand_loc (loc, type, arg0, arg1);
11293
11294 /* !X && X is always false. */
11295 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11296 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11297 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11298 /* X && !X is always false. */
11299 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11300 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11301 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11302
11303 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11304 means A >= Y && A != MAX, but in this case we know that
11305 A < X <= MAX. */
11306
11307 if (!TREE_SIDE_EFFECTS (arg0)
11308 && !TREE_SIDE_EFFECTS (arg1))
11309 {
11310 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11311 if (tem && !operand_equal_p (tem, arg0, 0))
11312 return fold_build2_loc (loc, code, type, tem, arg1);
11313
11314 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11315 if (tem && !operand_equal_p (tem, arg1, 0))
11316 return fold_build2_loc (loc, code, type, arg0, tem);
11317 }
11318
11319 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11320 != NULL_TREE)
11321 return tem;
11322
11323 return NULL_TREE;
11324
11325 case TRUTH_ORIF_EXPR:
11326 /* Note that the operands of this must be ints
11327 and their values must be 0 or true.
11328 ("true" is a fixed value perhaps depending on the language.) */
11329 /* If first arg is constant true, return it. */
11330 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11331 return fold_convert_loc (loc, type, arg0);
11332 /* FALLTHRU */
11333 case TRUTH_OR_EXPR:
11334 /* If either arg is constant zero, drop it. */
11335 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11336 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11337 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11338 /* Preserve sequence points. */
11339 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11340 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11341 /* If second arg is constant true, result is true, but we must
11342 evaluate first arg. */
11343 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11344 return omit_one_operand_loc (loc, type, arg1, arg0);
11345 /* Likewise for first arg, but note this only occurs here for
11346 TRUTH_OR_EXPR. */
11347 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11348 return omit_one_operand_loc (loc, type, arg0, arg1);
11349
11350 /* !X || X is always true. */
11351 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11352 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11353 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11354 /* X || !X is always true. */
11355 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11356 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11357 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11358
11359 /* (X && !Y) || (!X && Y) is X ^ Y */
11360 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11361 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11362 {
11363 tree a0, a1, l0, l1, n0, n1;
11364
11365 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11366 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11367
11368 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11369 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11370
11371 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11372 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11373
11374 if ((operand_equal_p (n0, a0, 0)
11375 && operand_equal_p (n1, a1, 0))
11376 || (operand_equal_p (n0, a1, 0)
11377 && operand_equal_p (n1, a0, 0)))
11378 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11379 }
11380
11381 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11382 != NULL_TREE)
11383 return tem;
11384
11385 return NULL_TREE;
11386
11387 case TRUTH_XOR_EXPR:
11388 /* If the second arg is constant zero, drop it. */
11389 if (integer_zerop (arg1))
11390 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11391 /* If the second arg is constant true, this is a logical inversion. */
11392 if (integer_onep (arg1))
11393 {
11394 tem = invert_truthvalue_loc (loc, arg0);
11395 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11396 }
11397 /* Identical arguments cancel to zero. */
11398 if (operand_equal_p (arg0, arg1, 0))
11399 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11400
11401 /* !X ^ X is always true. */
11402 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11403 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11404 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11405
11406 /* X ^ !X is always true. */
11407 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11408 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11409 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11410
11411 return NULL_TREE;
11412
11413 case EQ_EXPR:
11414 case NE_EXPR:
11415 STRIP_NOPS (arg0);
11416 STRIP_NOPS (arg1);
11417
11418 tem = fold_comparison (loc, code, type, op0, op1);
11419 if (tem != NULL_TREE)
11420 return tem;
11421
11422 /* bool_var != 1 becomes !bool_var. */
11423 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11424 && code == NE_EXPR)
11425 return fold_convert_loc (loc, type,
11426 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11427 TREE_TYPE (arg0), arg0));
11428
11429 /* bool_var == 0 becomes !bool_var. */
11430 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11431 && code == EQ_EXPR)
11432 return fold_convert_loc (loc, type,
11433 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11434 TREE_TYPE (arg0), arg0));
11435
11436 /* !exp != 0 becomes !exp */
11437 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11438 && code == NE_EXPR)
11439 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11440
11441 /* If this is an EQ or NE comparison with zero and ARG0 is
11442 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11443 two operations, but the latter can be done in one less insn
11444 on machines that have only two-operand insns or on which a
11445 constant cannot be the first operand. */
11446 if (TREE_CODE (arg0) == BIT_AND_EXPR
11447 && integer_zerop (arg1))
11448 {
11449 tree arg00 = TREE_OPERAND (arg0, 0);
11450 tree arg01 = TREE_OPERAND (arg0, 1);
11451 if (TREE_CODE (arg00) == LSHIFT_EXPR
11452 && integer_onep (TREE_OPERAND (arg00, 0)))
11453 {
11454 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11455 arg01, TREE_OPERAND (arg00, 1));
11456 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11457 build_int_cst (TREE_TYPE (arg0), 1));
11458 return fold_build2_loc (loc, code, type,
11459 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11460 arg1);
11461 }
11462 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11463 && integer_onep (TREE_OPERAND (arg01, 0)))
11464 {
11465 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11466 arg00, TREE_OPERAND (arg01, 1));
11467 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11468 build_int_cst (TREE_TYPE (arg0), 1));
11469 return fold_build2_loc (loc, code, type,
11470 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11471 arg1);
11472 }
11473 }
11474
11475 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11476 C1 is a valid shift constant, and C2 is a power of two, i.e.
11477 a single bit. */
11478 if (TREE_CODE (arg0) == BIT_AND_EXPR
11479 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11480 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11481 == INTEGER_CST
11482 && integer_pow2p (TREE_OPERAND (arg0, 1))
11483 && integer_zerop (arg1))
11484 {
11485 tree itype = TREE_TYPE (arg0);
11486 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11487 prec = TYPE_PRECISION (itype);
11488
11489 /* Check for a valid shift count. */
11490 if (wi::ltu_p (wi::to_wide (arg001), prec))
11491 {
11492 tree arg01 = TREE_OPERAND (arg0, 1);
11493 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11494 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11495 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11496 can be rewritten as (X & (C2 << C1)) != 0. */
11497 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11498 {
11499 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11500 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11501 return fold_build2_loc (loc, code, type, tem,
11502 fold_convert_loc (loc, itype, arg1));
11503 }
11504 /* Otherwise, for signed (arithmetic) shifts,
11505 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11506 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11507 else if (!TYPE_UNSIGNED (itype))
11508 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11509 arg000, build_int_cst (itype, 0));
11510 /* Otherwise, of unsigned (logical) shifts,
11511 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11512 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11513 else
11514 return omit_one_operand_loc (loc, type,
11515 code == EQ_EXPR ? integer_one_node
11516 : integer_zero_node,
11517 arg000);
11518 }
11519 }
11520
11521 /* If this is a comparison of a field, we may be able to simplify it. */
11522 if ((TREE_CODE (arg0) == COMPONENT_REF
11523 || TREE_CODE (arg0) == BIT_FIELD_REF)
11524 /* Handle the constant case even without -O
11525 to make sure the warnings are given. */
11526 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11527 {
11528 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11529 if (t1)
11530 return t1;
11531 }
11532
11533 /* Optimize comparisons of strlen vs zero to a compare of the
11534 first character of the string vs zero. To wit,
11535 strlen(ptr) == 0 => *ptr == 0
11536 strlen(ptr) != 0 => *ptr != 0
11537 Other cases should reduce to one of these two (or a constant)
11538 due to the return value of strlen being unsigned. */
11539 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11540 {
11541 tree fndecl = get_callee_fndecl (arg0);
11542
11543 if (fndecl
11544 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11545 && call_expr_nargs (arg0) == 1
11546 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11547 == POINTER_TYPE))
11548 {
11549 tree ptrtype
11550 = build_pointer_type (build_qualified_type (char_type_node,
11551 TYPE_QUAL_CONST));
11552 tree ptr = fold_convert_loc (loc, ptrtype,
11553 CALL_EXPR_ARG (arg0, 0));
11554 tree iref = build_fold_indirect_ref_loc (loc, ptr);
11555 return fold_build2_loc (loc, code, type, iref,
11556 build_int_cst (TREE_TYPE (iref), 0));
11557 }
11558 }
11559
11560 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11561 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11562 if (TREE_CODE (arg0) == RSHIFT_EXPR
11563 && integer_zerop (arg1)
11564 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11565 {
11566 tree arg00 = TREE_OPERAND (arg0, 0);
11567 tree arg01 = TREE_OPERAND (arg0, 1);
11568 tree itype = TREE_TYPE (arg00);
11569 if (wi::to_wide (arg01) == element_precision (itype) - 1)
11570 {
11571 if (TYPE_UNSIGNED (itype))
11572 {
11573 itype = signed_type_for (itype);
11574 arg00 = fold_convert_loc (loc, itype, arg00);
11575 }
11576 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11577 type, arg00, build_zero_cst (itype));
11578 }
11579 }
11580
11581 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11582 (X & C) == 0 when C is a single bit. */
11583 if (TREE_CODE (arg0) == BIT_AND_EXPR
11584 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11585 && integer_zerop (arg1)
11586 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11587 {
11588 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11589 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11590 TREE_OPERAND (arg0, 1));
11591 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11592 type, tem,
11593 fold_convert_loc (loc, TREE_TYPE (arg0),
11594 arg1));
11595 }
11596
11597 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11598 constant C is a power of two, i.e. a single bit. */
11599 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11600 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11601 && integer_zerop (arg1)
11602 && integer_pow2p (TREE_OPERAND (arg0, 1))
11603 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11604 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11605 {
11606 tree arg00 = TREE_OPERAND (arg0, 0);
11607 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11608 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11609 }
11610
11611 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11612 when is C is a power of two, i.e. a single bit. */
11613 if (TREE_CODE (arg0) == BIT_AND_EXPR
11614 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11615 && integer_zerop (arg1)
11616 && integer_pow2p (TREE_OPERAND (arg0, 1))
11617 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11618 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11619 {
11620 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11621 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11622 arg000, TREE_OPERAND (arg0, 1));
11623 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11624 tem, build_int_cst (TREE_TYPE (tem), 0));
11625 }
11626
11627 if (integer_zerop (arg1)
11628 && tree_expr_nonzero_p (arg0))
11629 {
11630 tree res = constant_boolean_node (code==NE_EXPR, type);
11631 return omit_one_operand_loc (loc, type, res, arg0);
11632 }
11633
11634 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11635 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11636 {
11637 tree arg00 = TREE_OPERAND (arg0, 0);
11638 tree arg01 = TREE_OPERAND (arg0, 1);
11639 tree arg10 = TREE_OPERAND (arg1, 0);
11640 tree arg11 = TREE_OPERAND (arg1, 1);
11641 tree itype = TREE_TYPE (arg0);
11642
11643 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11644 operand_equal_p guarantees no side-effects so we don't need
11645 to use omit_one_operand on Z. */
11646 if (operand_equal_p (arg01, arg11, 0))
11647 return fold_build2_loc (loc, code, type, arg00,
11648 fold_convert_loc (loc, TREE_TYPE (arg00),
11649 arg10));
11650 if (operand_equal_p (arg01, arg10, 0))
11651 return fold_build2_loc (loc, code, type, arg00,
11652 fold_convert_loc (loc, TREE_TYPE (arg00),
11653 arg11));
11654 if (operand_equal_p (arg00, arg11, 0))
11655 return fold_build2_loc (loc, code, type, arg01,
11656 fold_convert_loc (loc, TREE_TYPE (arg01),
11657 arg10));
11658 if (operand_equal_p (arg00, arg10, 0))
11659 return fold_build2_loc (loc, code, type, arg01,
11660 fold_convert_loc (loc, TREE_TYPE (arg01),
11661 arg11));
11662
11663 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11664 if (TREE_CODE (arg01) == INTEGER_CST
11665 && TREE_CODE (arg11) == INTEGER_CST)
11666 {
11667 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11668 fold_convert_loc (loc, itype, arg11));
11669 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11670 return fold_build2_loc (loc, code, type, tem,
11671 fold_convert_loc (loc, itype, arg10));
11672 }
11673 }
11674
11675 /* Attempt to simplify equality/inequality comparisons of complex
11676 values. Only lower the comparison if the result is known or
11677 can be simplified to a single scalar comparison. */
11678 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11679 || TREE_CODE (arg0) == COMPLEX_CST)
11680 && (TREE_CODE (arg1) == COMPLEX_EXPR
11681 || TREE_CODE (arg1) == COMPLEX_CST))
11682 {
11683 tree real0, imag0, real1, imag1;
11684 tree rcond, icond;
11685
11686 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11687 {
11688 real0 = TREE_OPERAND (arg0, 0);
11689 imag0 = TREE_OPERAND (arg0, 1);
11690 }
11691 else
11692 {
11693 real0 = TREE_REALPART (arg0);
11694 imag0 = TREE_IMAGPART (arg0);
11695 }
11696
11697 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11698 {
11699 real1 = TREE_OPERAND (arg1, 0);
11700 imag1 = TREE_OPERAND (arg1, 1);
11701 }
11702 else
11703 {
11704 real1 = TREE_REALPART (arg1);
11705 imag1 = TREE_IMAGPART (arg1);
11706 }
11707
11708 rcond = fold_binary_loc (loc, code, type, real0, real1);
11709 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11710 {
11711 if (integer_zerop (rcond))
11712 {
11713 if (code == EQ_EXPR)
11714 return omit_two_operands_loc (loc, type, boolean_false_node,
11715 imag0, imag1);
11716 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11717 }
11718 else
11719 {
11720 if (code == NE_EXPR)
11721 return omit_two_operands_loc (loc, type, boolean_true_node,
11722 imag0, imag1);
11723 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11724 }
11725 }
11726
11727 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11728 if (icond && TREE_CODE (icond) == INTEGER_CST)
11729 {
11730 if (integer_zerop (icond))
11731 {
11732 if (code == EQ_EXPR)
11733 return omit_two_operands_loc (loc, type, boolean_false_node,
11734 real0, real1);
11735 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11736 }
11737 else
11738 {
11739 if (code == NE_EXPR)
11740 return omit_two_operands_loc (loc, type, boolean_true_node,
11741 real0, real1);
11742 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11743 }
11744 }
11745 }
11746
11747 return NULL_TREE;
11748
11749 case LT_EXPR:
11750 case GT_EXPR:
11751 case LE_EXPR:
11752 case GE_EXPR:
11753 tem = fold_comparison (loc, code, type, op0, op1);
11754 if (tem != NULL_TREE)
11755 return tem;
11756
11757 /* Transform comparisons of the form X +- C CMP X. */
11758 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11759 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11760 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11761 && !HONOR_SNANS (arg0))
11762 {
11763 tree arg01 = TREE_OPERAND (arg0, 1);
11764 enum tree_code code0 = TREE_CODE (arg0);
11765 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11766
11767 /* (X - c) > X becomes false. */
11768 if (code == GT_EXPR
11769 && ((code0 == MINUS_EXPR && is_positive >= 0)
11770 || (code0 == PLUS_EXPR && is_positive <= 0)))
11771 return constant_boolean_node (0, type);
11772
11773 /* Likewise (X + c) < X becomes false. */
11774 if (code == LT_EXPR
11775 && ((code0 == PLUS_EXPR && is_positive >= 0)
11776 || (code0 == MINUS_EXPR && is_positive <= 0)))
11777 return constant_boolean_node (0, type);
11778
11779 /* Convert (X - c) <= X to true. */
11780 if (!HONOR_NANS (arg1)
11781 && code == LE_EXPR
11782 && ((code0 == MINUS_EXPR && is_positive >= 0)
11783 || (code0 == PLUS_EXPR && is_positive <= 0)))
11784 return constant_boolean_node (1, type);
11785
11786 /* Convert (X + c) >= X to true. */
11787 if (!HONOR_NANS (arg1)
11788 && code == GE_EXPR
11789 && ((code0 == PLUS_EXPR && is_positive >= 0)
11790 || (code0 == MINUS_EXPR && is_positive <= 0)))
11791 return constant_boolean_node (1, type);
11792 }
11793
11794 /* If we are comparing an ABS_EXPR with a constant, we can
11795 convert all the cases into explicit comparisons, but they may
11796 well not be faster than doing the ABS and one comparison.
11797 But ABS (X) <= C is a range comparison, which becomes a subtraction
11798 and a comparison, and is probably faster. */
11799 if (code == LE_EXPR
11800 && TREE_CODE (arg1) == INTEGER_CST
11801 && TREE_CODE (arg0) == ABS_EXPR
11802 && ! TREE_SIDE_EFFECTS (arg0)
11803 && (tem = negate_expr (arg1)) != 0
11804 && TREE_CODE (tem) == INTEGER_CST
11805 && !TREE_OVERFLOW (tem))
11806 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11807 build2 (GE_EXPR, type,
11808 TREE_OPERAND (arg0, 0), tem),
11809 build2 (LE_EXPR, type,
11810 TREE_OPERAND (arg0, 0), arg1));
11811
11812 /* Convert ABS_EXPR<x> >= 0 to true. */
11813 strict_overflow_p = false;
11814 if (code == GE_EXPR
11815 && (integer_zerop (arg1)
11816 || (! HONOR_NANS (arg0)
11817 && real_zerop (arg1)))
11818 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11819 {
11820 if (strict_overflow_p)
11821 fold_overflow_warning (("assuming signed overflow does not occur "
11822 "when simplifying comparison of "
11823 "absolute value and zero"),
11824 WARN_STRICT_OVERFLOW_CONDITIONAL);
11825 return omit_one_operand_loc (loc, type,
11826 constant_boolean_node (true, type),
11827 arg0);
11828 }
11829
11830 /* Convert ABS_EXPR<x> < 0 to false. */
11831 strict_overflow_p = false;
11832 if (code == LT_EXPR
11833 && (integer_zerop (arg1) || real_zerop (arg1))
11834 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11835 {
11836 if (strict_overflow_p)
11837 fold_overflow_warning (("assuming signed overflow does not occur "
11838 "when simplifying comparison of "
11839 "absolute value and zero"),
11840 WARN_STRICT_OVERFLOW_CONDITIONAL);
11841 return omit_one_operand_loc (loc, type,
11842 constant_boolean_node (false, type),
11843 arg0);
11844 }
11845
11846 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11847 and similarly for >= into !=. */
11848 if ((code == LT_EXPR || code == GE_EXPR)
11849 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11850 && TREE_CODE (arg1) == LSHIFT_EXPR
11851 && integer_onep (TREE_OPERAND (arg1, 0)))
11852 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11853 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11854 TREE_OPERAND (arg1, 1)),
11855 build_zero_cst (TREE_TYPE (arg0)));
11856
11857 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11858 otherwise Y might be >= # of bits in X's type and thus e.g.
11859 (unsigned char) (1 << Y) for Y 15 might be 0.
11860 If the cast is widening, then 1 << Y should have unsigned type,
11861 otherwise if Y is number of bits in the signed shift type minus 1,
11862 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11863 31 might be 0xffffffff80000000. */
11864 if ((code == LT_EXPR || code == GE_EXPR)
11865 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11866 && CONVERT_EXPR_P (arg1)
11867 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11868 && (element_precision (TREE_TYPE (arg1))
11869 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11870 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11871 || (element_precision (TREE_TYPE (arg1))
11872 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11873 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11874 {
11875 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11876 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11877 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11878 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11879 build_zero_cst (TREE_TYPE (arg0)));
11880 }
11881
11882 return NULL_TREE;
11883
11884 case UNORDERED_EXPR:
11885 case ORDERED_EXPR:
11886 case UNLT_EXPR:
11887 case UNLE_EXPR:
11888 case UNGT_EXPR:
11889 case UNGE_EXPR:
11890 case UNEQ_EXPR:
11891 case LTGT_EXPR:
11892 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11893 {
11894 tree targ0 = strip_float_extensions (arg0);
11895 tree targ1 = strip_float_extensions (arg1);
11896 tree newtype = TREE_TYPE (targ0);
11897
11898 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11899 newtype = TREE_TYPE (targ1);
11900
11901 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11902 return fold_build2_loc (loc, code, type,
11903 fold_convert_loc (loc, newtype, targ0),
11904 fold_convert_loc (loc, newtype, targ1));
11905 }
11906
11907 return NULL_TREE;
11908
11909 case COMPOUND_EXPR:
11910 /* When pedantic, a compound expression can be neither an lvalue
11911 nor an integer constant expression. */
11912 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11913 return NULL_TREE;
11914 /* Don't let (0, 0) be null pointer constant. */
11915 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11916 : fold_convert_loc (loc, type, arg1);
11917 return pedantic_non_lvalue_loc (loc, tem);
11918
11919 case ASSERT_EXPR:
11920 /* An ASSERT_EXPR should never be passed to fold_binary. */
11921 gcc_unreachable ();
11922
11923 default:
11924 return NULL_TREE;
11925 } /* switch (code) */
11926 }
11927
11928 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11929 ((A & N) + B) & M -> (A + B) & M
11930 Similarly if (N & M) == 0,
11931 ((A | N) + B) & M -> (A + B) & M
11932 and for - instead of + (or unary - instead of +)
11933 and/or ^ instead of |.
11934 If B is constant and (B & M) == 0, fold into A & M.
11935
11936 This function is a helper for match.pd patterns. Return non-NULL
11937 type in which the simplified operation should be performed only
11938 if any optimization is possible.
11939
11940 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
11941 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
11942 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
11943 +/-. */
11944 tree
11945 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
11946 tree arg00, enum tree_code code00, tree arg000, tree arg001,
11947 tree arg01, enum tree_code code01, tree arg010, tree arg011,
11948 tree *pmop)
11949 {
11950 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
11951 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
11952 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11953 if (~cst1 == 0
11954 || (cst1 & (cst1 + 1)) != 0
11955 || !INTEGRAL_TYPE_P (type)
11956 || (!TYPE_OVERFLOW_WRAPS (type)
11957 && TREE_CODE (type) != INTEGER_TYPE)
11958 || (wi::max_value (type) & cst1) != cst1)
11959 return NULL_TREE;
11960
11961 enum tree_code codes[2] = { code00, code01 };
11962 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
11963 int which = 0;
11964 wide_int cst0;
11965
11966 /* Now we know that arg0 is (C + D) or (C - D) or -C and
11967 arg1 (M) is == (1LL << cst) - 1.
11968 Store C into PMOP[0] and D into PMOP[1]. */
11969 pmop[0] = arg00;
11970 pmop[1] = arg01;
11971 which = code != NEGATE_EXPR;
11972
11973 for (; which >= 0; which--)
11974 switch (codes[which])
11975 {
11976 case BIT_AND_EXPR:
11977 case BIT_IOR_EXPR:
11978 case BIT_XOR_EXPR:
11979 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
11980 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
11981 if (codes[which] == BIT_AND_EXPR)
11982 {
11983 if (cst0 != cst1)
11984 break;
11985 }
11986 else if (cst0 != 0)
11987 break;
11988 /* If C or D is of the form (A & N) where
11989 (N & M) == M, or of the form (A | N) or
11990 (A ^ N) where (N & M) == 0, replace it with A. */
11991 pmop[which] = arg0xx[2 * which];
11992 break;
11993 case ERROR_MARK:
11994 if (TREE_CODE (pmop[which]) != INTEGER_CST)
11995 break;
11996 /* If C or D is a N where (N & M) == 0, it can be
11997 omitted (replaced with 0). */
11998 if ((code == PLUS_EXPR
11999 || (code == MINUS_EXPR && which == 0))
12000 && (cst1 & wi::to_wide (pmop[which])) == 0)
12001 pmop[which] = build_int_cst (type, 0);
12002 /* Similarly, with C - N where (-N & M) == 0. */
12003 if (code == MINUS_EXPR
12004 && which == 1
12005 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12006 pmop[which] = build_int_cst (type, 0);
12007 break;
12008 default:
12009 gcc_unreachable ();
12010 }
12011
12012 /* Only build anything new if we optimized one or both arguments above. */
12013 if (pmop[0] == arg00 && pmop[1] == arg01)
12014 return NULL_TREE;
12015
12016 if (TYPE_OVERFLOW_WRAPS (type))
12017 return type;
12018 else
12019 return unsigned_type_for (type);
12020 }
12021
12022 /* Used by contains_label_[p1]. */
12023
12024 struct contains_label_data
12025 {
12026 hash_set<tree> *pset;
12027 bool inside_switch_p;
12028 };
12029
12030 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12031 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12032 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12033
12034 static tree
12035 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12036 {
12037 contains_label_data *d = (contains_label_data *) data;
12038 switch (TREE_CODE (*tp))
12039 {
12040 case LABEL_EXPR:
12041 return *tp;
12042
12043 case CASE_LABEL_EXPR:
12044 if (!d->inside_switch_p)
12045 return *tp;
12046 return NULL_TREE;
12047
12048 case SWITCH_EXPR:
12049 if (!d->inside_switch_p)
12050 {
12051 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12052 return *tp;
12053 d->inside_switch_p = true;
12054 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12055 return *tp;
12056 d->inside_switch_p = false;
12057 *walk_subtrees = 0;
12058 }
12059 return NULL_TREE;
12060
12061 case GOTO_EXPR:
12062 *walk_subtrees = 0;
12063 return NULL_TREE;
12064
12065 default:
12066 return NULL_TREE;
12067 }
12068 }
12069
12070 /* Return whether the sub-tree ST contains a label which is accessible from
12071 outside the sub-tree. */
12072
12073 static bool
12074 contains_label_p (tree st)
12075 {
12076 hash_set<tree> pset;
12077 contains_label_data data = { &pset, false };
12078 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12079 }
12080
12081 /* Fold a ternary expression of code CODE and type TYPE with operands
12082 OP0, OP1, and OP2. Return the folded expression if folding is
12083 successful. Otherwise, return NULL_TREE. */
12084
12085 tree
12086 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12087 tree op0, tree op1, tree op2)
12088 {
12089 tree tem;
12090 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12091 enum tree_code_class kind = TREE_CODE_CLASS (code);
12092
12093 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12094 && TREE_CODE_LENGTH (code) == 3);
12095
12096 /* If this is a commutative operation, and OP0 is a constant, move it
12097 to OP1 to reduce the number of tests below. */
12098 if (commutative_ternary_tree_code (code)
12099 && tree_swap_operands_p (op0, op1))
12100 return fold_build3_loc (loc, code, type, op1, op0, op2);
12101
12102 tem = generic_simplify (loc, code, type, op0, op1, op2);
12103 if (tem)
12104 return tem;
12105
12106 /* Strip any conversions that don't change the mode. This is safe
12107 for every expression, except for a comparison expression because
12108 its signedness is derived from its operands. So, in the latter
12109 case, only strip conversions that don't change the signedness.
12110
12111 Note that this is done as an internal manipulation within the
12112 constant folder, in order to find the simplest representation of
12113 the arguments so that their form can be studied. In any cases,
12114 the appropriate type conversions should be put back in the tree
12115 that will get out of the constant folder. */
12116 if (op0)
12117 {
12118 arg0 = op0;
12119 STRIP_NOPS (arg0);
12120 }
12121
12122 if (op1)
12123 {
12124 arg1 = op1;
12125 STRIP_NOPS (arg1);
12126 }
12127
12128 if (op2)
12129 {
12130 arg2 = op2;
12131 STRIP_NOPS (arg2);
12132 }
12133
12134 switch (code)
12135 {
12136 case COMPONENT_REF:
12137 if (TREE_CODE (arg0) == CONSTRUCTOR
12138 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12139 {
12140 unsigned HOST_WIDE_INT idx;
12141 tree field, value;
12142 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12143 if (field == arg1)
12144 return value;
12145 }
12146 return NULL_TREE;
12147
12148 case COND_EXPR:
12149 case VEC_COND_EXPR:
12150 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12151 so all simple results must be passed through pedantic_non_lvalue. */
12152 if (TREE_CODE (arg0) == INTEGER_CST)
12153 {
12154 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12155 tem = integer_zerop (arg0) ? op2 : op1;
12156 /* Only optimize constant conditions when the selected branch
12157 has the same type as the COND_EXPR. This avoids optimizing
12158 away "c ? x : throw", where the throw has a void type.
12159 Avoid throwing away that operand which contains label. */
12160 if ((!TREE_SIDE_EFFECTS (unused_op)
12161 || !contains_label_p (unused_op))
12162 && (! VOID_TYPE_P (TREE_TYPE (tem))
12163 || VOID_TYPE_P (type)))
12164 return pedantic_non_lvalue_loc (loc, tem);
12165 return NULL_TREE;
12166 }
12167 else if (TREE_CODE (arg0) == VECTOR_CST)
12168 {
12169 unsigned HOST_WIDE_INT nelts;
12170 if ((TREE_CODE (arg1) == VECTOR_CST
12171 || TREE_CODE (arg1) == CONSTRUCTOR)
12172 && (TREE_CODE (arg2) == VECTOR_CST
12173 || TREE_CODE (arg2) == CONSTRUCTOR)
12174 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12175 {
12176 vec_perm_builder sel (nelts, nelts, 1);
12177 for (unsigned int i = 0; i < nelts; i++)
12178 {
12179 tree val = VECTOR_CST_ELT (arg0, i);
12180 if (integer_all_onesp (val))
12181 sel.quick_push (i);
12182 else if (integer_zerop (val))
12183 sel.quick_push (nelts + i);
12184 else /* Currently unreachable. */
12185 return NULL_TREE;
12186 }
12187 vec_perm_indices indices (sel, 2, nelts);
12188 tree t = fold_vec_perm (type, arg1, arg2, indices);
12189 if (t != NULL_TREE)
12190 return t;
12191 }
12192 }
12193
12194 /* If we have A op B ? A : C, we may be able to convert this to a
12195 simpler expression, depending on the operation and the values
12196 of B and C. Signed zeros prevent all of these transformations,
12197 for reasons given above each one.
12198
12199 Also try swapping the arguments and inverting the conditional. */
12200 if (COMPARISON_CLASS_P (arg0)
12201 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12202 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
12203 {
12204 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12205 if (tem)
12206 return tem;
12207 }
12208
12209 if (COMPARISON_CLASS_P (arg0)
12210 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12211 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12212 {
12213 location_t loc0 = expr_location_or (arg0, loc);
12214 tem = fold_invert_truthvalue (loc0, arg0);
12215 if (tem && COMPARISON_CLASS_P (tem))
12216 {
12217 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12218 if (tem)
12219 return tem;
12220 }
12221 }
12222
12223 /* If the second operand is simpler than the third, swap them
12224 since that produces better jump optimization results. */
12225 if (truth_value_p (TREE_CODE (arg0))
12226 && tree_swap_operands_p (op1, op2))
12227 {
12228 location_t loc0 = expr_location_or (arg0, loc);
12229 /* See if this can be inverted. If it can't, possibly because
12230 it was a floating-point inequality comparison, don't do
12231 anything. */
12232 tem = fold_invert_truthvalue (loc0, arg0);
12233 if (tem)
12234 return fold_build3_loc (loc, code, type, tem, op2, op1);
12235 }
12236
12237 /* Convert A ? 1 : 0 to simply A. */
12238 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12239 : (integer_onep (op1)
12240 && !VECTOR_TYPE_P (type)))
12241 && integer_zerop (op2)
12242 /* If we try to convert OP0 to our type, the
12243 call to fold will try to move the conversion inside
12244 a COND, which will recurse. In that case, the COND_EXPR
12245 is probably the best choice, so leave it alone. */
12246 && type == TREE_TYPE (arg0))
12247 return pedantic_non_lvalue_loc (loc, arg0);
12248
12249 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12250 over COND_EXPR in cases such as floating point comparisons. */
12251 if (integer_zerop (op1)
12252 && code == COND_EXPR
12253 && integer_onep (op2)
12254 && !VECTOR_TYPE_P (type)
12255 && truth_value_p (TREE_CODE (arg0)))
12256 return pedantic_non_lvalue_loc (loc,
12257 fold_convert_loc (loc, type,
12258 invert_truthvalue_loc (loc,
12259 arg0)));
12260
12261 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12262 if (TREE_CODE (arg0) == LT_EXPR
12263 && integer_zerop (TREE_OPERAND (arg0, 1))
12264 && integer_zerop (op2)
12265 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12266 {
12267 /* sign_bit_p looks through both zero and sign extensions,
12268 but for this optimization only sign extensions are
12269 usable. */
12270 tree tem2 = TREE_OPERAND (arg0, 0);
12271 while (tem != tem2)
12272 {
12273 if (TREE_CODE (tem2) != NOP_EXPR
12274 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12275 {
12276 tem = NULL_TREE;
12277 break;
12278 }
12279 tem2 = TREE_OPERAND (tem2, 0);
12280 }
12281 /* sign_bit_p only checks ARG1 bits within A's precision.
12282 If <sign bit of A> has wider type than A, bits outside
12283 of A's precision in <sign bit of A> need to be checked.
12284 If they are all 0, this optimization needs to be done
12285 in unsigned A's type, if they are all 1 in signed A's type,
12286 otherwise this can't be done. */
12287 if (tem
12288 && TYPE_PRECISION (TREE_TYPE (tem))
12289 < TYPE_PRECISION (TREE_TYPE (arg1))
12290 && TYPE_PRECISION (TREE_TYPE (tem))
12291 < TYPE_PRECISION (type))
12292 {
12293 int inner_width, outer_width;
12294 tree tem_type;
12295
12296 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12297 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12298 if (outer_width > TYPE_PRECISION (type))
12299 outer_width = TYPE_PRECISION (type);
12300
12301 wide_int mask = wi::shifted_mask
12302 (inner_width, outer_width - inner_width, false,
12303 TYPE_PRECISION (TREE_TYPE (arg1)));
12304
12305 wide_int common = mask & wi::to_wide (arg1);
12306 if (common == mask)
12307 {
12308 tem_type = signed_type_for (TREE_TYPE (tem));
12309 tem = fold_convert_loc (loc, tem_type, tem);
12310 }
12311 else if (common == 0)
12312 {
12313 tem_type = unsigned_type_for (TREE_TYPE (tem));
12314 tem = fold_convert_loc (loc, tem_type, tem);
12315 }
12316 else
12317 tem = NULL;
12318 }
12319
12320 if (tem)
12321 return
12322 fold_convert_loc (loc, type,
12323 fold_build2_loc (loc, BIT_AND_EXPR,
12324 TREE_TYPE (tem), tem,
12325 fold_convert_loc (loc,
12326 TREE_TYPE (tem),
12327 arg1)));
12328 }
12329
12330 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12331 already handled above. */
12332 if (TREE_CODE (arg0) == BIT_AND_EXPR
12333 && integer_onep (TREE_OPERAND (arg0, 1))
12334 && integer_zerop (op2)
12335 && integer_pow2p (arg1))
12336 {
12337 tree tem = TREE_OPERAND (arg0, 0);
12338 STRIP_NOPS (tem);
12339 if (TREE_CODE (tem) == RSHIFT_EXPR
12340 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12341 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12342 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12343 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12344 fold_convert_loc (loc, type,
12345 TREE_OPERAND (tem, 0)),
12346 op1);
12347 }
12348
12349 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12350 is probably obsolete because the first operand should be a
12351 truth value (that's why we have the two cases above), but let's
12352 leave it in until we can confirm this for all front-ends. */
12353 if (integer_zerop (op2)
12354 && TREE_CODE (arg0) == NE_EXPR
12355 && integer_zerop (TREE_OPERAND (arg0, 1))
12356 && integer_pow2p (arg1)
12357 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12358 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12359 arg1, OEP_ONLY_CONST)
12360 /* operand_equal_p compares just value, not precision, so e.g.
12361 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12362 second operand 32-bit -128, which is not a power of two (or vice
12363 versa. */
12364 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12365 return pedantic_non_lvalue_loc (loc,
12366 fold_convert_loc (loc, type,
12367 TREE_OPERAND (arg0,
12368 0)));
12369
12370 /* Disable the transformations below for vectors, since
12371 fold_binary_op_with_conditional_arg may undo them immediately,
12372 yielding an infinite loop. */
12373 if (code == VEC_COND_EXPR)
12374 return NULL_TREE;
12375
12376 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12377 if (integer_zerop (op2)
12378 && truth_value_p (TREE_CODE (arg0))
12379 && truth_value_p (TREE_CODE (arg1))
12380 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12381 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12382 : TRUTH_ANDIF_EXPR,
12383 type, fold_convert_loc (loc, type, arg0), op1);
12384
12385 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12386 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12387 && truth_value_p (TREE_CODE (arg0))
12388 && truth_value_p (TREE_CODE (arg1))
12389 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12390 {
12391 location_t loc0 = expr_location_or (arg0, loc);
12392 /* Only perform transformation if ARG0 is easily inverted. */
12393 tem = fold_invert_truthvalue (loc0, arg0);
12394 if (tem)
12395 return fold_build2_loc (loc, code == VEC_COND_EXPR
12396 ? BIT_IOR_EXPR
12397 : TRUTH_ORIF_EXPR,
12398 type, fold_convert_loc (loc, type, tem),
12399 op1);
12400 }
12401
12402 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12403 if (integer_zerop (arg1)
12404 && truth_value_p (TREE_CODE (arg0))
12405 && truth_value_p (TREE_CODE (op2))
12406 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12407 {
12408 location_t loc0 = expr_location_or (arg0, loc);
12409 /* Only perform transformation if ARG0 is easily inverted. */
12410 tem = fold_invert_truthvalue (loc0, arg0);
12411 if (tem)
12412 return fold_build2_loc (loc, code == VEC_COND_EXPR
12413 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12414 type, fold_convert_loc (loc, type, tem),
12415 op2);
12416 }
12417
12418 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12419 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12420 && truth_value_p (TREE_CODE (arg0))
12421 && truth_value_p (TREE_CODE (op2))
12422 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12423 return fold_build2_loc (loc, code == VEC_COND_EXPR
12424 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12425 type, fold_convert_loc (loc, type, arg0), op2);
12426
12427 return NULL_TREE;
12428
12429 case CALL_EXPR:
12430 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12431 of fold_ternary on them. */
12432 gcc_unreachable ();
12433
12434 case BIT_FIELD_REF:
12435 if (TREE_CODE (arg0) == VECTOR_CST
12436 && (type == TREE_TYPE (TREE_TYPE (arg0))
12437 || (VECTOR_TYPE_P (type)
12438 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12439 && tree_fits_uhwi_p (op1)
12440 && tree_fits_uhwi_p (op2))
12441 {
12442 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12443 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12444 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12445 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12446
12447 if (n != 0
12448 && (idx % width) == 0
12449 && (n % width) == 0
12450 && known_le ((idx + n) / width,
12451 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12452 {
12453 idx = idx / width;
12454 n = n / width;
12455
12456 if (TREE_CODE (arg0) == VECTOR_CST)
12457 {
12458 if (n == 1)
12459 {
12460 tem = VECTOR_CST_ELT (arg0, idx);
12461 if (VECTOR_TYPE_P (type))
12462 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12463 return tem;
12464 }
12465
12466 tree_vector_builder vals (type, n, 1);
12467 for (unsigned i = 0; i < n; ++i)
12468 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12469 return vals.build ();
12470 }
12471 }
12472 }
12473
12474 /* On constants we can use native encode/interpret to constant
12475 fold (nearly) all BIT_FIELD_REFs. */
12476 if (CONSTANT_CLASS_P (arg0)
12477 && can_native_interpret_type_p (type)
12478 && BITS_PER_UNIT == 8
12479 && tree_fits_uhwi_p (op1)
12480 && tree_fits_uhwi_p (op2))
12481 {
12482 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12483 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12484 /* Limit us to a reasonable amount of work. To relax the
12485 other limitations we need bit-shifting of the buffer
12486 and rounding up the size. */
12487 if (bitpos % BITS_PER_UNIT == 0
12488 && bitsize % BITS_PER_UNIT == 0
12489 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12490 {
12491 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12492 unsigned HOST_WIDE_INT len
12493 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12494 bitpos / BITS_PER_UNIT);
12495 if (len > 0
12496 && len * BITS_PER_UNIT >= bitsize)
12497 {
12498 tree v = native_interpret_expr (type, b,
12499 bitsize / BITS_PER_UNIT);
12500 if (v)
12501 return v;
12502 }
12503 }
12504 }
12505
12506 return NULL_TREE;
12507
12508 case VEC_PERM_EXPR:
12509 /* Perform constant folding of BIT_INSERT_EXPR. */
12510 if (TREE_CODE (arg2) == VECTOR_CST
12511 && TREE_CODE (op0) == VECTOR_CST
12512 && TREE_CODE (op1) == VECTOR_CST)
12513 {
12514 /* Build a vector of integers from the tree mask. */
12515 vec_perm_builder builder;
12516 if (!tree_to_vec_perm_builder (&builder, arg2))
12517 return NULL_TREE;
12518
12519 /* Create a vec_perm_indices for the integer vector. */
12520 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12521 bool single_arg = (op0 == op1);
12522 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12523 return fold_vec_perm (type, op0, op1, sel);
12524 }
12525 return NULL_TREE;
12526
12527 case BIT_INSERT_EXPR:
12528 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12529 if (TREE_CODE (arg0) == INTEGER_CST
12530 && TREE_CODE (arg1) == INTEGER_CST)
12531 {
12532 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12533 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12534 wide_int tem = (wi::to_wide (arg0)
12535 & wi::shifted_mask (bitpos, bitsize, true,
12536 TYPE_PRECISION (type)));
12537 wide_int tem2
12538 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12539 bitsize), bitpos);
12540 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12541 }
12542 else if (TREE_CODE (arg0) == VECTOR_CST
12543 && CONSTANT_CLASS_P (arg1)
12544 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12545 TREE_TYPE (arg1)))
12546 {
12547 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12548 unsigned HOST_WIDE_INT elsize
12549 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12550 if (bitpos % elsize == 0)
12551 {
12552 unsigned k = bitpos / elsize;
12553 unsigned HOST_WIDE_INT nelts;
12554 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12555 return arg0;
12556 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12557 {
12558 tree_vector_builder elts (type, nelts, 1);
12559 elts.quick_grow (nelts);
12560 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12561 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12562 return elts.build ();
12563 }
12564 }
12565 }
12566 return NULL_TREE;
12567
12568 default:
12569 return NULL_TREE;
12570 } /* switch (code) */
12571 }
12572
12573 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12574 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12575 constructor element index of the value returned. If the element is
12576 not found NULL_TREE is returned and *CTOR_IDX is updated to
12577 the index of the element after the ACCESS_INDEX position (which
12578 may be outside of the CTOR array). */
12579
12580 tree
12581 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12582 unsigned *ctor_idx)
12583 {
12584 tree index_type = NULL_TREE;
12585 signop index_sgn = UNSIGNED;
12586 offset_int low_bound = 0;
12587
12588 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12589 {
12590 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12591 if (domain_type && TYPE_MIN_VALUE (domain_type))
12592 {
12593 /* Static constructors for variably sized objects makes no sense. */
12594 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12595 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12596 /* ??? When it is obvious that the range is signed, treat it so. */
12597 if (TYPE_UNSIGNED (index_type)
12598 && TYPE_MAX_VALUE (domain_type)
12599 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12600 TYPE_MIN_VALUE (domain_type)))
12601 {
12602 index_sgn = SIGNED;
12603 low_bound
12604 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12605 SIGNED);
12606 }
12607 else
12608 {
12609 index_sgn = TYPE_SIGN (index_type);
12610 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12611 }
12612 }
12613 }
12614
12615 if (index_type)
12616 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12617 index_sgn);
12618
12619 offset_int index = low_bound;
12620 if (index_type)
12621 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12622
12623 offset_int max_index = index;
12624 unsigned cnt;
12625 tree cfield, cval;
12626 bool first_p = true;
12627
12628 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12629 {
12630 /* Array constructor might explicitly set index, or specify a range,
12631 or leave index NULL meaning that it is next index after previous
12632 one. */
12633 if (cfield)
12634 {
12635 if (TREE_CODE (cfield) == INTEGER_CST)
12636 max_index = index
12637 = offset_int::from (wi::to_wide (cfield), index_sgn);
12638 else
12639 {
12640 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12641 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12642 index_sgn);
12643 max_index
12644 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12645 index_sgn);
12646 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12647 }
12648 }
12649 else if (!first_p)
12650 {
12651 index = max_index + 1;
12652 if (index_type)
12653 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12654 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12655 max_index = index;
12656 }
12657 else
12658 first_p = false;
12659
12660 /* Do we have match? */
12661 if (wi::cmp (access_index, index, index_sgn) >= 0)
12662 {
12663 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12664 {
12665 if (ctor_idx)
12666 *ctor_idx = cnt;
12667 return cval;
12668 }
12669 }
12670 else if (in_gimple_form)
12671 /* We're past the element we search for. Note during parsing
12672 the elements might not be sorted.
12673 ??? We should use a binary search and a flag on the
12674 CONSTRUCTOR as to whether elements are sorted in declaration
12675 order. */
12676 break;
12677 }
12678 if (ctor_idx)
12679 *ctor_idx = cnt;
12680 return NULL_TREE;
12681 }
12682
12683 /* Perform constant folding and related simplification of EXPR.
12684 The related simplifications include x*1 => x, x*0 => 0, etc.,
12685 and application of the associative law.
12686 NOP_EXPR conversions may be removed freely (as long as we
12687 are careful not to change the type of the overall expression).
12688 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12689 but we can constant-fold them if they have constant operands. */
12690
12691 #ifdef ENABLE_FOLD_CHECKING
12692 # define fold(x) fold_1 (x)
12693 static tree fold_1 (tree);
12694 static
12695 #endif
12696 tree
12697 fold (tree expr)
12698 {
12699 const tree t = expr;
12700 enum tree_code code = TREE_CODE (t);
12701 enum tree_code_class kind = TREE_CODE_CLASS (code);
12702 tree tem;
12703 location_t loc = EXPR_LOCATION (expr);
12704
12705 /* Return right away if a constant. */
12706 if (kind == tcc_constant)
12707 return t;
12708
12709 /* CALL_EXPR-like objects with variable numbers of operands are
12710 treated specially. */
12711 if (kind == tcc_vl_exp)
12712 {
12713 if (code == CALL_EXPR)
12714 {
12715 tem = fold_call_expr (loc, expr, false);
12716 return tem ? tem : expr;
12717 }
12718 return expr;
12719 }
12720
12721 if (IS_EXPR_CODE_CLASS (kind))
12722 {
12723 tree type = TREE_TYPE (t);
12724 tree op0, op1, op2;
12725
12726 switch (TREE_CODE_LENGTH (code))
12727 {
12728 case 1:
12729 op0 = TREE_OPERAND (t, 0);
12730 tem = fold_unary_loc (loc, code, type, op0);
12731 return tem ? tem : expr;
12732 case 2:
12733 op0 = TREE_OPERAND (t, 0);
12734 op1 = TREE_OPERAND (t, 1);
12735 tem = fold_binary_loc (loc, code, type, op0, op1);
12736 return tem ? tem : expr;
12737 case 3:
12738 op0 = TREE_OPERAND (t, 0);
12739 op1 = TREE_OPERAND (t, 1);
12740 op2 = TREE_OPERAND (t, 2);
12741 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12742 return tem ? tem : expr;
12743 default:
12744 break;
12745 }
12746 }
12747
12748 switch (code)
12749 {
12750 case ARRAY_REF:
12751 {
12752 tree op0 = TREE_OPERAND (t, 0);
12753 tree op1 = TREE_OPERAND (t, 1);
12754
12755 if (TREE_CODE (op1) == INTEGER_CST
12756 && TREE_CODE (op0) == CONSTRUCTOR
12757 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12758 {
12759 tree val = get_array_ctor_element_at_index (op0,
12760 wi::to_offset (op1));
12761 if (val)
12762 return val;
12763 }
12764
12765 return t;
12766 }
12767
12768 /* Return a VECTOR_CST if possible. */
12769 case CONSTRUCTOR:
12770 {
12771 tree type = TREE_TYPE (t);
12772 if (TREE_CODE (type) != VECTOR_TYPE)
12773 return t;
12774
12775 unsigned i;
12776 tree val;
12777 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12778 if (! CONSTANT_CLASS_P (val))
12779 return t;
12780
12781 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12782 }
12783
12784 case CONST_DECL:
12785 return fold (DECL_INITIAL (t));
12786
12787 default:
12788 return t;
12789 } /* switch (code) */
12790 }
12791
12792 #ifdef ENABLE_FOLD_CHECKING
12793 #undef fold
12794
12795 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12796 hash_table<nofree_ptr_hash<const tree_node> > *);
12797 static void fold_check_failed (const_tree, const_tree);
12798 void print_fold_checksum (const_tree);
12799
12800 /* When --enable-checking=fold, compute a digest of expr before
12801 and after actual fold call to see if fold did not accidentally
12802 change original expr. */
12803
12804 tree
12805 fold (tree expr)
12806 {
12807 tree ret;
12808 struct md5_ctx ctx;
12809 unsigned char checksum_before[16], checksum_after[16];
12810 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12811
12812 md5_init_ctx (&ctx);
12813 fold_checksum_tree (expr, &ctx, &ht);
12814 md5_finish_ctx (&ctx, checksum_before);
12815 ht.empty ();
12816
12817 ret = fold_1 (expr);
12818
12819 md5_init_ctx (&ctx);
12820 fold_checksum_tree (expr, &ctx, &ht);
12821 md5_finish_ctx (&ctx, checksum_after);
12822
12823 if (memcmp (checksum_before, checksum_after, 16))
12824 fold_check_failed (expr, ret);
12825
12826 return ret;
12827 }
12828
12829 void
12830 print_fold_checksum (const_tree expr)
12831 {
12832 struct md5_ctx ctx;
12833 unsigned char checksum[16], cnt;
12834 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12835
12836 md5_init_ctx (&ctx);
12837 fold_checksum_tree (expr, &ctx, &ht);
12838 md5_finish_ctx (&ctx, checksum);
12839 for (cnt = 0; cnt < 16; ++cnt)
12840 fprintf (stderr, "%02x", checksum[cnt]);
12841 putc ('\n', stderr);
12842 }
12843
12844 static void
12845 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12846 {
12847 internal_error ("fold check: original tree changed by fold");
12848 }
12849
12850 static void
12851 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12852 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12853 {
12854 const tree_node **slot;
12855 enum tree_code code;
12856 union tree_node *buf;
12857 int i, len;
12858
12859 recursive_label:
12860 if (expr == NULL)
12861 return;
12862 slot = ht->find_slot (expr, INSERT);
12863 if (*slot != NULL)
12864 return;
12865 *slot = expr;
12866 code = TREE_CODE (expr);
12867 if (TREE_CODE_CLASS (code) == tcc_declaration
12868 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12869 {
12870 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12871 size_t sz = tree_size (expr);
12872 buf = XALLOCAVAR (union tree_node, sz);
12873 memcpy ((char *) buf, expr, sz);
12874 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
12875 buf->decl_with_vis.symtab_node = NULL;
12876 buf->base.nowarning_flag = 0;
12877 expr = (tree) buf;
12878 }
12879 else if (TREE_CODE_CLASS (code) == tcc_type
12880 && (TYPE_POINTER_TO (expr)
12881 || TYPE_REFERENCE_TO (expr)
12882 || TYPE_CACHED_VALUES_P (expr)
12883 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12884 || TYPE_NEXT_VARIANT (expr)
12885 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12886 {
12887 /* Allow these fields to be modified. */
12888 tree tmp;
12889 size_t sz = tree_size (expr);
12890 buf = XALLOCAVAR (union tree_node, sz);
12891 memcpy ((char *) buf, expr, sz);
12892 expr = tmp = (tree) buf;
12893 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12894 TYPE_POINTER_TO (tmp) = NULL;
12895 TYPE_REFERENCE_TO (tmp) = NULL;
12896 TYPE_NEXT_VARIANT (tmp) = NULL;
12897 TYPE_ALIAS_SET (tmp) = -1;
12898 if (TYPE_CACHED_VALUES_P (tmp))
12899 {
12900 TYPE_CACHED_VALUES_P (tmp) = 0;
12901 TYPE_CACHED_VALUES (tmp) = NULL;
12902 }
12903 }
12904 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
12905 {
12906 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
12907 and change builtins.c etc. instead - see PR89543. */
12908 size_t sz = tree_size (expr);
12909 buf = XALLOCAVAR (union tree_node, sz);
12910 memcpy ((char *) buf, expr, sz);
12911 buf->base.nowarning_flag = 0;
12912 expr = (tree) buf;
12913 }
12914 md5_process_bytes (expr, tree_size (expr), ctx);
12915 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12916 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12917 if (TREE_CODE_CLASS (code) != tcc_type
12918 && TREE_CODE_CLASS (code) != tcc_declaration
12919 && code != TREE_LIST
12920 && code != SSA_NAME
12921 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12922 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12923 switch (TREE_CODE_CLASS (code))
12924 {
12925 case tcc_constant:
12926 switch (code)
12927 {
12928 case STRING_CST:
12929 md5_process_bytes (TREE_STRING_POINTER (expr),
12930 TREE_STRING_LENGTH (expr), ctx);
12931 break;
12932 case COMPLEX_CST:
12933 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12934 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12935 break;
12936 case VECTOR_CST:
12937 len = vector_cst_encoded_nelts (expr);
12938 for (i = 0; i < len; ++i)
12939 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12940 break;
12941 default:
12942 break;
12943 }
12944 break;
12945 case tcc_exceptional:
12946 switch (code)
12947 {
12948 case TREE_LIST:
12949 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12950 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12951 expr = TREE_CHAIN (expr);
12952 goto recursive_label;
12953 break;
12954 case TREE_VEC:
12955 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12956 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12957 break;
12958 default:
12959 break;
12960 }
12961 break;
12962 case tcc_expression:
12963 case tcc_reference:
12964 case tcc_comparison:
12965 case tcc_unary:
12966 case tcc_binary:
12967 case tcc_statement:
12968 case tcc_vl_exp:
12969 len = TREE_OPERAND_LENGTH (expr);
12970 for (i = 0; i < len; ++i)
12971 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12972 break;
12973 case tcc_declaration:
12974 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12975 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12976 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12977 {
12978 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12979 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12980 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12981 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12982 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12983 }
12984
12985 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12986 {
12987 if (TREE_CODE (expr) == FUNCTION_DECL)
12988 {
12989 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12990 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12991 }
12992 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12993 }
12994 break;
12995 case tcc_type:
12996 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12997 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12998 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12999 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13000 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13001 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13002 if (INTEGRAL_TYPE_P (expr)
13003 || SCALAR_FLOAT_TYPE_P (expr))
13004 {
13005 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13006 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13007 }
13008 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13009 if (TREE_CODE (expr) == RECORD_TYPE
13010 || TREE_CODE (expr) == UNION_TYPE
13011 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13012 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13013 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13014 break;
13015 default:
13016 break;
13017 }
13018 }
13019
13020 /* Helper function for outputting the checksum of a tree T. When
13021 debugging with gdb, you can "define mynext" to be "next" followed
13022 by "call debug_fold_checksum (op0)", then just trace down till the
13023 outputs differ. */
13024
13025 DEBUG_FUNCTION void
13026 debug_fold_checksum (const_tree t)
13027 {
13028 int i;
13029 unsigned char checksum[16];
13030 struct md5_ctx ctx;
13031 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13032
13033 md5_init_ctx (&ctx);
13034 fold_checksum_tree (t, &ctx, &ht);
13035 md5_finish_ctx (&ctx, checksum);
13036 ht.empty ();
13037
13038 for (i = 0; i < 16; i++)
13039 fprintf (stderr, "%d ", checksum[i]);
13040
13041 fprintf (stderr, "\n");
13042 }
13043
13044 #endif
13045
13046 /* Fold a unary tree expression with code CODE of type TYPE with an
13047 operand OP0. LOC is the location of the resulting expression.
13048 Return a folded expression if successful. Otherwise, return a tree
13049 expression with code CODE of type TYPE with an operand OP0. */
13050
13051 tree
13052 fold_build1_loc (location_t loc,
13053 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13054 {
13055 tree tem;
13056 #ifdef ENABLE_FOLD_CHECKING
13057 unsigned char checksum_before[16], checksum_after[16];
13058 struct md5_ctx ctx;
13059 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13060
13061 md5_init_ctx (&ctx);
13062 fold_checksum_tree (op0, &ctx, &ht);
13063 md5_finish_ctx (&ctx, checksum_before);
13064 ht.empty ();
13065 #endif
13066
13067 tem = fold_unary_loc (loc, code, type, op0);
13068 if (!tem)
13069 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13070
13071 #ifdef ENABLE_FOLD_CHECKING
13072 md5_init_ctx (&ctx);
13073 fold_checksum_tree (op0, &ctx, &ht);
13074 md5_finish_ctx (&ctx, checksum_after);
13075
13076 if (memcmp (checksum_before, checksum_after, 16))
13077 fold_check_failed (op0, tem);
13078 #endif
13079 return tem;
13080 }
13081
13082 /* Fold a binary tree expression with code CODE of type TYPE with
13083 operands OP0 and OP1. LOC is the location of the resulting
13084 expression. Return a folded expression if successful. Otherwise,
13085 return a tree expression with code CODE of type TYPE with operands
13086 OP0 and OP1. */
13087
13088 tree
13089 fold_build2_loc (location_t loc,
13090 enum tree_code code, tree type, tree op0, tree op1
13091 MEM_STAT_DECL)
13092 {
13093 tree tem;
13094 #ifdef ENABLE_FOLD_CHECKING
13095 unsigned char checksum_before_op0[16],
13096 checksum_before_op1[16],
13097 checksum_after_op0[16],
13098 checksum_after_op1[16];
13099 struct md5_ctx ctx;
13100 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13101
13102 md5_init_ctx (&ctx);
13103 fold_checksum_tree (op0, &ctx, &ht);
13104 md5_finish_ctx (&ctx, checksum_before_op0);
13105 ht.empty ();
13106
13107 md5_init_ctx (&ctx);
13108 fold_checksum_tree (op1, &ctx, &ht);
13109 md5_finish_ctx (&ctx, checksum_before_op1);
13110 ht.empty ();
13111 #endif
13112
13113 tem = fold_binary_loc (loc, code, type, op0, op1);
13114 if (!tem)
13115 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13116
13117 #ifdef ENABLE_FOLD_CHECKING
13118 md5_init_ctx (&ctx);
13119 fold_checksum_tree (op0, &ctx, &ht);
13120 md5_finish_ctx (&ctx, checksum_after_op0);
13121 ht.empty ();
13122
13123 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13124 fold_check_failed (op0, tem);
13125
13126 md5_init_ctx (&ctx);
13127 fold_checksum_tree (op1, &ctx, &ht);
13128 md5_finish_ctx (&ctx, checksum_after_op1);
13129
13130 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13131 fold_check_failed (op1, tem);
13132 #endif
13133 return tem;
13134 }
13135
13136 /* Fold a ternary tree expression with code CODE of type TYPE with
13137 operands OP0, OP1, and OP2. Return a folded expression if
13138 successful. Otherwise, return a tree expression with code CODE of
13139 type TYPE with operands OP0, OP1, and OP2. */
13140
13141 tree
13142 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13143 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13144 {
13145 tree tem;
13146 #ifdef ENABLE_FOLD_CHECKING
13147 unsigned char checksum_before_op0[16],
13148 checksum_before_op1[16],
13149 checksum_before_op2[16],
13150 checksum_after_op0[16],
13151 checksum_after_op1[16],
13152 checksum_after_op2[16];
13153 struct md5_ctx ctx;
13154 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13155
13156 md5_init_ctx (&ctx);
13157 fold_checksum_tree (op0, &ctx, &ht);
13158 md5_finish_ctx (&ctx, checksum_before_op0);
13159 ht.empty ();
13160
13161 md5_init_ctx (&ctx);
13162 fold_checksum_tree (op1, &ctx, &ht);
13163 md5_finish_ctx (&ctx, checksum_before_op1);
13164 ht.empty ();
13165
13166 md5_init_ctx (&ctx);
13167 fold_checksum_tree (op2, &ctx, &ht);
13168 md5_finish_ctx (&ctx, checksum_before_op2);
13169 ht.empty ();
13170 #endif
13171
13172 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13173 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13174 if (!tem)
13175 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13176
13177 #ifdef ENABLE_FOLD_CHECKING
13178 md5_init_ctx (&ctx);
13179 fold_checksum_tree (op0, &ctx, &ht);
13180 md5_finish_ctx (&ctx, checksum_after_op0);
13181 ht.empty ();
13182
13183 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13184 fold_check_failed (op0, tem);
13185
13186 md5_init_ctx (&ctx);
13187 fold_checksum_tree (op1, &ctx, &ht);
13188 md5_finish_ctx (&ctx, checksum_after_op1);
13189 ht.empty ();
13190
13191 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13192 fold_check_failed (op1, tem);
13193
13194 md5_init_ctx (&ctx);
13195 fold_checksum_tree (op2, &ctx, &ht);
13196 md5_finish_ctx (&ctx, checksum_after_op2);
13197
13198 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13199 fold_check_failed (op2, tem);
13200 #endif
13201 return tem;
13202 }
13203
13204 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13205 arguments in ARGARRAY, and a null static chain.
13206 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13207 of type TYPE from the given operands as constructed by build_call_array. */
13208
13209 tree
13210 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13211 int nargs, tree *argarray)
13212 {
13213 tree tem;
13214 #ifdef ENABLE_FOLD_CHECKING
13215 unsigned char checksum_before_fn[16],
13216 checksum_before_arglist[16],
13217 checksum_after_fn[16],
13218 checksum_after_arglist[16];
13219 struct md5_ctx ctx;
13220 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13221 int i;
13222
13223 md5_init_ctx (&ctx);
13224 fold_checksum_tree (fn, &ctx, &ht);
13225 md5_finish_ctx (&ctx, checksum_before_fn);
13226 ht.empty ();
13227
13228 md5_init_ctx (&ctx);
13229 for (i = 0; i < nargs; i++)
13230 fold_checksum_tree (argarray[i], &ctx, &ht);
13231 md5_finish_ctx (&ctx, checksum_before_arglist);
13232 ht.empty ();
13233 #endif
13234
13235 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13236 if (!tem)
13237 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13238
13239 #ifdef ENABLE_FOLD_CHECKING
13240 md5_init_ctx (&ctx);
13241 fold_checksum_tree (fn, &ctx, &ht);
13242 md5_finish_ctx (&ctx, checksum_after_fn);
13243 ht.empty ();
13244
13245 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13246 fold_check_failed (fn, tem);
13247
13248 md5_init_ctx (&ctx);
13249 for (i = 0; i < nargs; i++)
13250 fold_checksum_tree (argarray[i], &ctx, &ht);
13251 md5_finish_ctx (&ctx, checksum_after_arglist);
13252
13253 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13254 fold_check_failed (NULL_TREE, tem);
13255 #endif
13256 return tem;
13257 }
13258
13259 /* Perform constant folding and related simplification of initializer
13260 expression EXPR. These behave identically to "fold_buildN" but ignore
13261 potential run-time traps and exceptions that fold must preserve. */
13262
13263 #define START_FOLD_INIT \
13264 int saved_signaling_nans = flag_signaling_nans;\
13265 int saved_trapping_math = flag_trapping_math;\
13266 int saved_rounding_math = flag_rounding_math;\
13267 int saved_trapv = flag_trapv;\
13268 int saved_folding_initializer = folding_initializer;\
13269 flag_signaling_nans = 0;\
13270 flag_trapping_math = 0;\
13271 flag_rounding_math = 0;\
13272 flag_trapv = 0;\
13273 folding_initializer = 1;
13274
13275 #define END_FOLD_INIT \
13276 flag_signaling_nans = saved_signaling_nans;\
13277 flag_trapping_math = saved_trapping_math;\
13278 flag_rounding_math = saved_rounding_math;\
13279 flag_trapv = saved_trapv;\
13280 folding_initializer = saved_folding_initializer;
13281
13282 tree
13283 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13284 tree type, tree op)
13285 {
13286 tree result;
13287 START_FOLD_INIT;
13288
13289 result = fold_build1_loc (loc, code, type, op);
13290
13291 END_FOLD_INIT;
13292 return result;
13293 }
13294
13295 tree
13296 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13297 tree type, tree op0, tree op1)
13298 {
13299 tree result;
13300 START_FOLD_INIT;
13301
13302 result = fold_build2_loc (loc, code, type, op0, op1);
13303
13304 END_FOLD_INIT;
13305 return result;
13306 }
13307
13308 tree
13309 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13310 int nargs, tree *argarray)
13311 {
13312 tree result;
13313 START_FOLD_INIT;
13314
13315 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13316
13317 END_FOLD_INIT;
13318 return result;
13319 }
13320
13321 #undef START_FOLD_INIT
13322 #undef END_FOLD_INIT
13323
13324 /* Determine if first argument is a multiple of second argument. Return 0 if
13325 it is not, or we cannot easily determined it to be.
13326
13327 An example of the sort of thing we care about (at this point; this routine
13328 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13329 fold cases do now) is discovering that
13330
13331 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13332
13333 is a multiple of
13334
13335 SAVE_EXPR (J * 8)
13336
13337 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13338
13339 This code also handles discovering that
13340
13341 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13342
13343 is a multiple of 8 so we don't have to worry about dealing with a
13344 possible remainder.
13345
13346 Note that we *look* inside a SAVE_EXPR only to determine how it was
13347 calculated; it is not safe for fold to do much of anything else with the
13348 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13349 at run time. For example, the latter example above *cannot* be implemented
13350 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13351 evaluation time of the original SAVE_EXPR is not necessarily the same at
13352 the time the new expression is evaluated. The only optimization of this
13353 sort that would be valid is changing
13354
13355 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13356
13357 divided by 8 to
13358
13359 SAVE_EXPR (I) * SAVE_EXPR (J)
13360
13361 (where the same SAVE_EXPR (J) is used in the original and the
13362 transformed version). */
13363
13364 int
13365 multiple_of_p (tree type, const_tree top, const_tree bottom)
13366 {
13367 gimple *stmt;
13368 tree t1, op1, op2;
13369
13370 if (operand_equal_p (top, bottom, 0))
13371 return 1;
13372
13373 if (TREE_CODE (type) != INTEGER_TYPE)
13374 return 0;
13375
13376 switch (TREE_CODE (top))
13377 {
13378 case BIT_AND_EXPR:
13379 /* Bitwise and provides a power of two multiple. If the mask is
13380 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13381 if (!integer_pow2p (bottom))
13382 return 0;
13383 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13384 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13385
13386 case MULT_EXPR:
13387 if (TREE_CODE (bottom) == INTEGER_CST)
13388 {
13389 op1 = TREE_OPERAND (top, 0);
13390 op2 = TREE_OPERAND (top, 1);
13391 if (TREE_CODE (op1) == INTEGER_CST)
13392 std::swap (op1, op2);
13393 if (TREE_CODE (op2) == INTEGER_CST)
13394 {
13395 if (multiple_of_p (type, op2, bottom))
13396 return 1;
13397 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13398 if (multiple_of_p (type, bottom, op2))
13399 {
13400 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13401 wi::to_widest (op2));
13402 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13403 {
13404 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13405 return multiple_of_p (type, op1, op2);
13406 }
13407 }
13408 return multiple_of_p (type, op1, bottom);
13409 }
13410 }
13411 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13412 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13413
13414 case MINUS_EXPR:
13415 /* It is impossible to prove if op0 - op1 is multiple of bottom
13416 precisely, so be conservative here checking if both op0 and op1
13417 are multiple of bottom. Note we check the second operand first
13418 since it's usually simpler. */
13419 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13420 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13421
13422 case PLUS_EXPR:
13423 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13424 as op0 - 3 if the expression has unsigned type. For example,
13425 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13426 op1 = TREE_OPERAND (top, 1);
13427 if (TYPE_UNSIGNED (type)
13428 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13429 op1 = fold_build1 (NEGATE_EXPR, type, op1);
13430 return (multiple_of_p (type, op1, bottom)
13431 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13432
13433 case LSHIFT_EXPR:
13434 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13435 {
13436 op1 = TREE_OPERAND (top, 1);
13437 /* const_binop may not detect overflow correctly,
13438 so check for it explicitly here. */
13439 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13440 wi::to_wide (op1))
13441 && (t1 = fold_convert (type,
13442 const_binop (LSHIFT_EXPR, size_one_node,
13443 op1))) != 0
13444 && !TREE_OVERFLOW (t1))
13445 return multiple_of_p (type, t1, bottom);
13446 }
13447 return 0;
13448
13449 case NOP_EXPR:
13450 /* Can't handle conversions from non-integral or wider integral type. */
13451 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13452 || (TYPE_PRECISION (type)
13453 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13454 return 0;
13455
13456 /* fall through */
13457
13458 case SAVE_EXPR:
13459 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13460
13461 case COND_EXPR:
13462 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13463 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13464
13465 case INTEGER_CST:
13466 if (TREE_CODE (bottom) != INTEGER_CST
13467 || integer_zerop (bottom)
13468 || (TYPE_UNSIGNED (type)
13469 && (tree_int_cst_sgn (top) < 0
13470 || tree_int_cst_sgn (bottom) < 0)))
13471 return 0;
13472 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13473 SIGNED);
13474
13475 case SSA_NAME:
13476 if (TREE_CODE (bottom) == INTEGER_CST
13477 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13478 && gimple_code (stmt) == GIMPLE_ASSIGN)
13479 {
13480 enum tree_code code = gimple_assign_rhs_code (stmt);
13481
13482 /* Check for special cases to see if top is defined as multiple
13483 of bottom:
13484
13485 top = (X & ~(bottom - 1) ; bottom is power of 2
13486
13487 or
13488
13489 Y = X % bottom
13490 top = X - Y. */
13491 if (code == BIT_AND_EXPR
13492 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13493 && TREE_CODE (op2) == INTEGER_CST
13494 && integer_pow2p (bottom)
13495 && wi::multiple_of_p (wi::to_widest (op2),
13496 wi::to_widest (bottom), UNSIGNED))
13497 return 1;
13498
13499 op1 = gimple_assign_rhs1 (stmt);
13500 if (code == MINUS_EXPR
13501 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13502 && TREE_CODE (op2) == SSA_NAME
13503 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13504 && gimple_code (stmt) == GIMPLE_ASSIGN
13505 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13506 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13507 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13508 return 1;
13509 }
13510
13511 /* fall through */
13512
13513 default:
13514 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13515 return multiple_p (wi::to_poly_widest (top),
13516 wi::to_poly_widest (bottom));
13517
13518 return 0;
13519 }
13520 }
13521
13522 #define tree_expr_nonnegative_warnv_p(X, Y) \
13523 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13524
13525 #define RECURSE(X) \
13526 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13527
13528 /* Return true if CODE or TYPE is known to be non-negative. */
13529
13530 static bool
13531 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13532 {
13533 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13534 && truth_value_p (code))
13535 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13536 have a signed:1 type (where the value is -1 and 0). */
13537 return true;
13538 return false;
13539 }
13540
13541 /* Return true if (CODE OP0) is known to be non-negative. If the return
13542 value is based on the assumption that signed overflow is undefined,
13543 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13544 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13545
13546 bool
13547 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13548 bool *strict_overflow_p, int depth)
13549 {
13550 if (TYPE_UNSIGNED (type))
13551 return true;
13552
13553 switch (code)
13554 {
13555 case ABS_EXPR:
13556 /* We can't return 1 if flag_wrapv is set because
13557 ABS_EXPR<INT_MIN> = INT_MIN. */
13558 if (!ANY_INTEGRAL_TYPE_P (type))
13559 return true;
13560 if (TYPE_OVERFLOW_UNDEFINED (type))
13561 {
13562 *strict_overflow_p = true;
13563 return true;
13564 }
13565 break;
13566
13567 case NON_LVALUE_EXPR:
13568 case FLOAT_EXPR:
13569 case FIX_TRUNC_EXPR:
13570 return RECURSE (op0);
13571
13572 CASE_CONVERT:
13573 {
13574 tree inner_type = TREE_TYPE (op0);
13575 tree outer_type = type;
13576
13577 if (TREE_CODE (outer_type) == REAL_TYPE)
13578 {
13579 if (TREE_CODE (inner_type) == REAL_TYPE)
13580 return RECURSE (op0);
13581 if (INTEGRAL_TYPE_P (inner_type))
13582 {
13583 if (TYPE_UNSIGNED (inner_type))
13584 return true;
13585 return RECURSE (op0);
13586 }
13587 }
13588 else if (INTEGRAL_TYPE_P (outer_type))
13589 {
13590 if (TREE_CODE (inner_type) == REAL_TYPE)
13591 return RECURSE (op0);
13592 if (INTEGRAL_TYPE_P (inner_type))
13593 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13594 && TYPE_UNSIGNED (inner_type);
13595 }
13596 }
13597 break;
13598
13599 default:
13600 return tree_simple_nonnegative_warnv_p (code, type);
13601 }
13602
13603 /* We don't know sign of `t', so be conservative and return false. */
13604 return false;
13605 }
13606
13607 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13608 value is based on the assumption that signed overflow is undefined,
13609 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13610 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13611
13612 bool
13613 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13614 tree op1, bool *strict_overflow_p,
13615 int depth)
13616 {
13617 if (TYPE_UNSIGNED (type))
13618 return true;
13619
13620 switch (code)
13621 {
13622 case POINTER_PLUS_EXPR:
13623 case PLUS_EXPR:
13624 if (FLOAT_TYPE_P (type))
13625 return RECURSE (op0) && RECURSE (op1);
13626
13627 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13628 both unsigned and at least 2 bits shorter than the result. */
13629 if (TREE_CODE (type) == INTEGER_TYPE
13630 && TREE_CODE (op0) == NOP_EXPR
13631 && TREE_CODE (op1) == NOP_EXPR)
13632 {
13633 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13634 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13635 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13636 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13637 {
13638 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13639 TYPE_PRECISION (inner2)) + 1;
13640 return prec < TYPE_PRECISION (type);
13641 }
13642 }
13643 break;
13644
13645 case MULT_EXPR:
13646 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13647 {
13648 /* x * x is always non-negative for floating point x
13649 or without overflow. */
13650 if (operand_equal_p (op0, op1, 0)
13651 || (RECURSE (op0) && RECURSE (op1)))
13652 {
13653 if (ANY_INTEGRAL_TYPE_P (type)
13654 && TYPE_OVERFLOW_UNDEFINED (type))
13655 *strict_overflow_p = true;
13656 return true;
13657 }
13658 }
13659
13660 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13661 both unsigned and their total bits is shorter than the result. */
13662 if (TREE_CODE (type) == INTEGER_TYPE
13663 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13664 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13665 {
13666 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13667 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13668 : TREE_TYPE (op0);
13669 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13670 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13671 : TREE_TYPE (op1);
13672
13673 bool unsigned0 = TYPE_UNSIGNED (inner0);
13674 bool unsigned1 = TYPE_UNSIGNED (inner1);
13675
13676 if (TREE_CODE (op0) == INTEGER_CST)
13677 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13678
13679 if (TREE_CODE (op1) == INTEGER_CST)
13680 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13681
13682 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13683 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13684 {
13685 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13686 ? tree_int_cst_min_precision (op0, UNSIGNED)
13687 : TYPE_PRECISION (inner0);
13688
13689 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13690 ? tree_int_cst_min_precision (op1, UNSIGNED)
13691 : TYPE_PRECISION (inner1);
13692
13693 return precision0 + precision1 < TYPE_PRECISION (type);
13694 }
13695 }
13696 return false;
13697
13698 case BIT_AND_EXPR:
13699 case MAX_EXPR:
13700 return RECURSE (op0) || RECURSE (op1);
13701
13702 case BIT_IOR_EXPR:
13703 case BIT_XOR_EXPR:
13704 case MIN_EXPR:
13705 case RDIV_EXPR:
13706 case TRUNC_DIV_EXPR:
13707 case CEIL_DIV_EXPR:
13708 case FLOOR_DIV_EXPR:
13709 case ROUND_DIV_EXPR:
13710 return RECURSE (op0) && RECURSE (op1);
13711
13712 case TRUNC_MOD_EXPR:
13713 return RECURSE (op0);
13714
13715 case FLOOR_MOD_EXPR:
13716 return RECURSE (op1);
13717
13718 case CEIL_MOD_EXPR:
13719 case ROUND_MOD_EXPR:
13720 default:
13721 return tree_simple_nonnegative_warnv_p (code, type);
13722 }
13723
13724 /* We don't know sign of `t', so be conservative and return false. */
13725 return false;
13726 }
13727
13728 /* Return true if T is known to be non-negative. If the return
13729 value is based on the assumption that signed overflow is undefined,
13730 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13731 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13732
13733 bool
13734 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13735 {
13736 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13737 return true;
13738
13739 switch (TREE_CODE (t))
13740 {
13741 case INTEGER_CST:
13742 return tree_int_cst_sgn (t) >= 0;
13743
13744 case REAL_CST:
13745 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13746
13747 case FIXED_CST:
13748 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13749
13750 case COND_EXPR:
13751 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13752
13753 case SSA_NAME:
13754 /* Limit the depth of recursion to avoid quadratic behavior.
13755 This is expected to catch almost all occurrences in practice.
13756 If this code misses important cases that unbounded recursion
13757 would not, passes that need this information could be revised
13758 to provide it through dataflow propagation. */
13759 return (!name_registered_for_update_p (t)
13760 && depth < param_max_ssa_name_query_depth
13761 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13762 strict_overflow_p, depth));
13763
13764 default:
13765 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13766 }
13767 }
13768
13769 /* Return true if T is known to be non-negative. If the return
13770 value is based on the assumption that signed overflow is undefined,
13771 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13772 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13773
13774 bool
13775 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13776 bool *strict_overflow_p, int depth)
13777 {
13778 switch (fn)
13779 {
13780 CASE_CFN_ACOS:
13781 CASE_CFN_ACOSH:
13782 CASE_CFN_CABS:
13783 CASE_CFN_COSH:
13784 CASE_CFN_ERFC:
13785 CASE_CFN_EXP:
13786 CASE_CFN_EXP10:
13787 CASE_CFN_EXP2:
13788 CASE_CFN_FABS:
13789 CASE_CFN_FDIM:
13790 CASE_CFN_HYPOT:
13791 CASE_CFN_POW10:
13792 CASE_CFN_FFS:
13793 CASE_CFN_PARITY:
13794 CASE_CFN_POPCOUNT:
13795 CASE_CFN_CLZ:
13796 CASE_CFN_CLRSB:
13797 case CFN_BUILT_IN_BSWAP16:
13798 case CFN_BUILT_IN_BSWAP32:
13799 case CFN_BUILT_IN_BSWAP64:
13800 case CFN_BUILT_IN_BSWAP128:
13801 /* Always true. */
13802 return true;
13803
13804 CASE_CFN_SQRT:
13805 CASE_CFN_SQRT_FN:
13806 /* sqrt(-0.0) is -0.0. */
13807 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13808 return true;
13809 return RECURSE (arg0);
13810
13811 CASE_CFN_ASINH:
13812 CASE_CFN_ATAN:
13813 CASE_CFN_ATANH:
13814 CASE_CFN_CBRT:
13815 CASE_CFN_CEIL:
13816 CASE_CFN_CEIL_FN:
13817 CASE_CFN_ERF:
13818 CASE_CFN_EXPM1:
13819 CASE_CFN_FLOOR:
13820 CASE_CFN_FLOOR_FN:
13821 CASE_CFN_FMOD:
13822 CASE_CFN_FREXP:
13823 CASE_CFN_ICEIL:
13824 CASE_CFN_IFLOOR:
13825 CASE_CFN_IRINT:
13826 CASE_CFN_IROUND:
13827 CASE_CFN_LCEIL:
13828 CASE_CFN_LDEXP:
13829 CASE_CFN_LFLOOR:
13830 CASE_CFN_LLCEIL:
13831 CASE_CFN_LLFLOOR:
13832 CASE_CFN_LLRINT:
13833 CASE_CFN_LLROUND:
13834 CASE_CFN_LRINT:
13835 CASE_CFN_LROUND:
13836 CASE_CFN_MODF:
13837 CASE_CFN_NEARBYINT:
13838 CASE_CFN_NEARBYINT_FN:
13839 CASE_CFN_RINT:
13840 CASE_CFN_RINT_FN:
13841 CASE_CFN_ROUND:
13842 CASE_CFN_ROUND_FN:
13843 CASE_CFN_ROUNDEVEN:
13844 CASE_CFN_ROUNDEVEN_FN:
13845 CASE_CFN_SCALB:
13846 CASE_CFN_SCALBLN:
13847 CASE_CFN_SCALBN:
13848 CASE_CFN_SIGNBIT:
13849 CASE_CFN_SIGNIFICAND:
13850 CASE_CFN_SINH:
13851 CASE_CFN_TANH:
13852 CASE_CFN_TRUNC:
13853 CASE_CFN_TRUNC_FN:
13854 /* True if the 1st argument is nonnegative. */
13855 return RECURSE (arg0);
13856
13857 CASE_CFN_FMAX:
13858 CASE_CFN_FMAX_FN:
13859 /* True if the 1st OR 2nd arguments are nonnegative. */
13860 return RECURSE (arg0) || RECURSE (arg1);
13861
13862 CASE_CFN_FMIN:
13863 CASE_CFN_FMIN_FN:
13864 /* True if the 1st AND 2nd arguments are nonnegative. */
13865 return RECURSE (arg0) && RECURSE (arg1);
13866
13867 CASE_CFN_COPYSIGN:
13868 CASE_CFN_COPYSIGN_FN:
13869 /* True if the 2nd argument is nonnegative. */
13870 return RECURSE (arg1);
13871
13872 CASE_CFN_POWI:
13873 /* True if the 1st argument is nonnegative or the second
13874 argument is an even integer. */
13875 if (TREE_CODE (arg1) == INTEGER_CST
13876 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13877 return true;
13878 return RECURSE (arg0);
13879
13880 CASE_CFN_POW:
13881 /* True if the 1st argument is nonnegative or the second
13882 argument is an even integer valued real. */
13883 if (TREE_CODE (arg1) == REAL_CST)
13884 {
13885 REAL_VALUE_TYPE c;
13886 HOST_WIDE_INT n;
13887
13888 c = TREE_REAL_CST (arg1);
13889 n = real_to_integer (&c);
13890 if ((n & 1) == 0)
13891 {
13892 REAL_VALUE_TYPE cint;
13893 real_from_integer (&cint, VOIDmode, n, SIGNED);
13894 if (real_identical (&c, &cint))
13895 return true;
13896 }
13897 }
13898 return RECURSE (arg0);
13899
13900 default:
13901 break;
13902 }
13903 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13904 }
13905
13906 /* Return true if T is known to be non-negative. If the return
13907 value is based on the assumption that signed overflow is undefined,
13908 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13909 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13910
13911 static bool
13912 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13913 {
13914 enum tree_code code = TREE_CODE (t);
13915 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13916 return true;
13917
13918 switch (code)
13919 {
13920 case TARGET_EXPR:
13921 {
13922 tree temp = TARGET_EXPR_SLOT (t);
13923 t = TARGET_EXPR_INITIAL (t);
13924
13925 /* If the initializer is non-void, then it's a normal expression
13926 that will be assigned to the slot. */
13927 if (!VOID_TYPE_P (t))
13928 return RECURSE (t);
13929
13930 /* Otherwise, the initializer sets the slot in some way. One common
13931 way is an assignment statement at the end of the initializer. */
13932 while (1)
13933 {
13934 if (TREE_CODE (t) == BIND_EXPR)
13935 t = expr_last (BIND_EXPR_BODY (t));
13936 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13937 || TREE_CODE (t) == TRY_CATCH_EXPR)
13938 t = expr_last (TREE_OPERAND (t, 0));
13939 else if (TREE_CODE (t) == STATEMENT_LIST)
13940 t = expr_last (t);
13941 else
13942 break;
13943 }
13944 if (TREE_CODE (t) == MODIFY_EXPR
13945 && TREE_OPERAND (t, 0) == temp)
13946 return RECURSE (TREE_OPERAND (t, 1));
13947
13948 return false;
13949 }
13950
13951 case CALL_EXPR:
13952 {
13953 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13954 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13955
13956 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13957 get_call_combined_fn (t),
13958 arg0,
13959 arg1,
13960 strict_overflow_p, depth);
13961 }
13962 case COMPOUND_EXPR:
13963 case MODIFY_EXPR:
13964 return RECURSE (TREE_OPERAND (t, 1));
13965
13966 case BIND_EXPR:
13967 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13968
13969 case SAVE_EXPR:
13970 return RECURSE (TREE_OPERAND (t, 0));
13971
13972 default:
13973 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13974 }
13975 }
13976
13977 #undef RECURSE
13978 #undef tree_expr_nonnegative_warnv_p
13979
13980 /* Return true if T is known to be non-negative. If the return
13981 value is based on the assumption that signed overflow is undefined,
13982 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13983 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13984
13985 bool
13986 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13987 {
13988 enum tree_code code;
13989 if (t == error_mark_node)
13990 return false;
13991
13992 code = TREE_CODE (t);
13993 switch (TREE_CODE_CLASS (code))
13994 {
13995 case tcc_binary:
13996 case tcc_comparison:
13997 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13998 TREE_TYPE (t),
13999 TREE_OPERAND (t, 0),
14000 TREE_OPERAND (t, 1),
14001 strict_overflow_p, depth);
14002
14003 case tcc_unary:
14004 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14005 TREE_TYPE (t),
14006 TREE_OPERAND (t, 0),
14007 strict_overflow_p, depth);
14008
14009 case tcc_constant:
14010 case tcc_declaration:
14011 case tcc_reference:
14012 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14013
14014 default:
14015 break;
14016 }
14017
14018 switch (code)
14019 {
14020 case TRUTH_AND_EXPR:
14021 case TRUTH_OR_EXPR:
14022 case TRUTH_XOR_EXPR:
14023 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14024 TREE_TYPE (t),
14025 TREE_OPERAND (t, 0),
14026 TREE_OPERAND (t, 1),
14027 strict_overflow_p, depth);
14028 case TRUTH_NOT_EXPR:
14029 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14030 TREE_TYPE (t),
14031 TREE_OPERAND (t, 0),
14032 strict_overflow_p, depth);
14033
14034 case COND_EXPR:
14035 case CONSTRUCTOR:
14036 case OBJ_TYPE_REF:
14037 case ASSERT_EXPR:
14038 case ADDR_EXPR:
14039 case WITH_SIZE_EXPR:
14040 case SSA_NAME:
14041 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14042
14043 default:
14044 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14045 }
14046 }
14047
14048 /* Return true if `t' is known to be non-negative. Handle warnings
14049 about undefined signed overflow. */
14050
14051 bool
14052 tree_expr_nonnegative_p (tree t)
14053 {
14054 bool ret, strict_overflow_p;
14055
14056 strict_overflow_p = false;
14057 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14058 if (strict_overflow_p)
14059 fold_overflow_warning (("assuming signed overflow does not occur when "
14060 "determining that expression is always "
14061 "non-negative"),
14062 WARN_STRICT_OVERFLOW_MISC);
14063 return ret;
14064 }
14065
14066
14067 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14068 For floating point we further ensure that T is not denormal.
14069 Similar logic is present in nonzero_address in rtlanal.h.
14070
14071 If the return value is based on the assumption that signed overflow
14072 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14073 change *STRICT_OVERFLOW_P. */
14074
14075 bool
14076 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14077 bool *strict_overflow_p)
14078 {
14079 switch (code)
14080 {
14081 case ABS_EXPR:
14082 return tree_expr_nonzero_warnv_p (op0,
14083 strict_overflow_p);
14084
14085 case NOP_EXPR:
14086 {
14087 tree inner_type = TREE_TYPE (op0);
14088 tree outer_type = type;
14089
14090 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14091 && tree_expr_nonzero_warnv_p (op0,
14092 strict_overflow_p));
14093 }
14094 break;
14095
14096 case NON_LVALUE_EXPR:
14097 return tree_expr_nonzero_warnv_p (op0,
14098 strict_overflow_p);
14099
14100 default:
14101 break;
14102 }
14103
14104 return false;
14105 }
14106
14107 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14108 For floating point we further ensure that T is not denormal.
14109 Similar logic is present in nonzero_address in rtlanal.h.
14110
14111 If the return value is based on the assumption that signed overflow
14112 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14113 change *STRICT_OVERFLOW_P. */
14114
14115 bool
14116 tree_binary_nonzero_warnv_p (enum tree_code code,
14117 tree type,
14118 tree op0,
14119 tree op1, bool *strict_overflow_p)
14120 {
14121 bool sub_strict_overflow_p;
14122 switch (code)
14123 {
14124 case POINTER_PLUS_EXPR:
14125 case PLUS_EXPR:
14126 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14127 {
14128 /* With the presence of negative values it is hard
14129 to say something. */
14130 sub_strict_overflow_p = false;
14131 if (!tree_expr_nonnegative_warnv_p (op0,
14132 &sub_strict_overflow_p)
14133 || !tree_expr_nonnegative_warnv_p (op1,
14134 &sub_strict_overflow_p))
14135 return false;
14136 /* One of operands must be positive and the other non-negative. */
14137 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14138 overflows, on a twos-complement machine the sum of two
14139 nonnegative numbers can never be zero. */
14140 return (tree_expr_nonzero_warnv_p (op0,
14141 strict_overflow_p)
14142 || tree_expr_nonzero_warnv_p (op1,
14143 strict_overflow_p));
14144 }
14145 break;
14146
14147 case MULT_EXPR:
14148 if (TYPE_OVERFLOW_UNDEFINED (type))
14149 {
14150 if (tree_expr_nonzero_warnv_p (op0,
14151 strict_overflow_p)
14152 && tree_expr_nonzero_warnv_p (op1,
14153 strict_overflow_p))
14154 {
14155 *strict_overflow_p = true;
14156 return true;
14157 }
14158 }
14159 break;
14160
14161 case MIN_EXPR:
14162 sub_strict_overflow_p = false;
14163 if (tree_expr_nonzero_warnv_p (op0,
14164 &sub_strict_overflow_p)
14165 && tree_expr_nonzero_warnv_p (op1,
14166 &sub_strict_overflow_p))
14167 {
14168 if (sub_strict_overflow_p)
14169 *strict_overflow_p = true;
14170 }
14171 break;
14172
14173 case MAX_EXPR:
14174 sub_strict_overflow_p = false;
14175 if (tree_expr_nonzero_warnv_p (op0,
14176 &sub_strict_overflow_p))
14177 {
14178 if (sub_strict_overflow_p)
14179 *strict_overflow_p = true;
14180
14181 /* When both operands are nonzero, then MAX must be too. */
14182 if (tree_expr_nonzero_warnv_p (op1,
14183 strict_overflow_p))
14184 return true;
14185
14186 /* MAX where operand 0 is positive is positive. */
14187 return tree_expr_nonnegative_warnv_p (op0,
14188 strict_overflow_p);
14189 }
14190 /* MAX where operand 1 is positive is positive. */
14191 else if (tree_expr_nonzero_warnv_p (op1,
14192 &sub_strict_overflow_p)
14193 && tree_expr_nonnegative_warnv_p (op1,
14194 &sub_strict_overflow_p))
14195 {
14196 if (sub_strict_overflow_p)
14197 *strict_overflow_p = true;
14198 return true;
14199 }
14200 break;
14201
14202 case BIT_IOR_EXPR:
14203 return (tree_expr_nonzero_warnv_p (op1,
14204 strict_overflow_p)
14205 || tree_expr_nonzero_warnv_p (op0,
14206 strict_overflow_p));
14207
14208 default:
14209 break;
14210 }
14211
14212 return false;
14213 }
14214
14215 /* Return true when T is an address and is known to be nonzero.
14216 For floating point we further ensure that T is not denormal.
14217 Similar logic is present in nonzero_address in rtlanal.h.
14218
14219 If the return value is based on the assumption that signed overflow
14220 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14221 change *STRICT_OVERFLOW_P. */
14222
14223 bool
14224 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14225 {
14226 bool sub_strict_overflow_p;
14227 switch (TREE_CODE (t))
14228 {
14229 case INTEGER_CST:
14230 return !integer_zerop (t);
14231
14232 case ADDR_EXPR:
14233 {
14234 tree base = TREE_OPERAND (t, 0);
14235
14236 if (!DECL_P (base))
14237 base = get_base_address (base);
14238
14239 if (base && TREE_CODE (base) == TARGET_EXPR)
14240 base = TARGET_EXPR_SLOT (base);
14241
14242 if (!base)
14243 return false;
14244
14245 /* For objects in symbol table check if we know they are non-zero.
14246 Don't do anything for variables and functions before symtab is built;
14247 it is quite possible that they will be declared weak later. */
14248 int nonzero_addr = maybe_nonzero_address (base);
14249 if (nonzero_addr >= 0)
14250 return nonzero_addr;
14251
14252 /* Constants are never weak. */
14253 if (CONSTANT_CLASS_P (base))
14254 return true;
14255
14256 return false;
14257 }
14258
14259 case COND_EXPR:
14260 sub_strict_overflow_p = false;
14261 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14262 &sub_strict_overflow_p)
14263 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14264 &sub_strict_overflow_p))
14265 {
14266 if (sub_strict_overflow_p)
14267 *strict_overflow_p = true;
14268 return true;
14269 }
14270 break;
14271
14272 case SSA_NAME:
14273 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14274 break;
14275 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
14276
14277 default:
14278 break;
14279 }
14280 return false;
14281 }
14282
14283 #define integer_valued_real_p(X) \
14284 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14285
14286 #define RECURSE(X) \
14287 ((integer_valued_real_p) (X, depth + 1))
14288
14289 /* Return true if the floating point result of (CODE OP0) has an
14290 integer value. We also allow +Inf, -Inf and NaN to be considered
14291 integer values. Return false for signaling NaN.
14292
14293 DEPTH is the current nesting depth of the query. */
14294
14295 bool
14296 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
14297 {
14298 switch (code)
14299 {
14300 case FLOAT_EXPR:
14301 return true;
14302
14303 case ABS_EXPR:
14304 return RECURSE (op0);
14305
14306 CASE_CONVERT:
14307 {
14308 tree type = TREE_TYPE (op0);
14309 if (TREE_CODE (type) == INTEGER_TYPE)
14310 return true;
14311 if (TREE_CODE (type) == REAL_TYPE)
14312 return RECURSE (op0);
14313 break;
14314 }
14315
14316 default:
14317 break;
14318 }
14319 return false;
14320 }
14321
14322 /* Return true if the floating point result of (CODE OP0 OP1) has an
14323 integer value. We also allow +Inf, -Inf and NaN to be considered
14324 integer values. Return false for signaling NaN.
14325
14326 DEPTH is the current nesting depth of the query. */
14327
14328 bool
14329 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
14330 {
14331 switch (code)
14332 {
14333 case PLUS_EXPR:
14334 case MINUS_EXPR:
14335 case MULT_EXPR:
14336 case MIN_EXPR:
14337 case MAX_EXPR:
14338 return RECURSE (op0) && RECURSE (op1);
14339
14340 default:
14341 break;
14342 }
14343 return false;
14344 }
14345
14346 /* Return true if the floating point result of calling FNDECL with arguments
14347 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
14348 considered integer values. Return false for signaling NaN. If FNDECL
14349 takes fewer than 2 arguments, the remaining ARGn are null.
14350
14351 DEPTH is the current nesting depth of the query. */
14352
14353 bool
14354 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
14355 {
14356 switch (fn)
14357 {
14358 CASE_CFN_CEIL:
14359 CASE_CFN_CEIL_FN:
14360 CASE_CFN_FLOOR:
14361 CASE_CFN_FLOOR_FN:
14362 CASE_CFN_NEARBYINT:
14363 CASE_CFN_NEARBYINT_FN:
14364 CASE_CFN_RINT:
14365 CASE_CFN_RINT_FN:
14366 CASE_CFN_ROUND:
14367 CASE_CFN_ROUND_FN:
14368 CASE_CFN_ROUNDEVEN:
14369 CASE_CFN_ROUNDEVEN_FN:
14370 CASE_CFN_TRUNC:
14371 CASE_CFN_TRUNC_FN:
14372 return true;
14373
14374 CASE_CFN_FMIN:
14375 CASE_CFN_FMIN_FN:
14376 CASE_CFN_FMAX:
14377 CASE_CFN_FMAX_FN:
14378 return RECURSE (arg0) && RECURSE (arg1);
14379
14380 default:
14381 break;
14382 }
14383 return false;
14384 }
14385
14386 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14387 has an integer value. We also allow +Inf, -Inf and NaN to be
14388 considered integer values. Return false for signaling NaN.
14389
14390 DEPTH is the current nesting depth of the query. */
14391
14392 bool
14393 integer_valued_real_single_p (tree t, int depth)
14394 {
14395 switch (TREE_CODE (t))
14396 {
14397 case REAL_CST:
14398 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14399
14400 case COND_EXPR:
14401 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14402
14403 case SSA_NAME:
14404 /* Limit the depth of recursion to avoid quadratic behavior.
14405 This is expected to catch almost all occurrences in practice.
14406 If this code misses important cases that unbounded recursion
14407 would not, passes that need this information could be revised
14408 to provide it through dataflow propagation. */
14409 return (!name_registered_for_update_p (t)
14410 && depth < param_max_ssa_name_query_depth
14411 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14412 depth));
14413
14414 default:
14415 break;
14416 }
14417 return false;
14418 }
14419
14420 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14421 has an integer value. We also allow +Inf, -Inf and NaN to be
14422 considered integer values. Return false for signaling NaN.
14423
14424 DEPTH is the current nesting depth of the query. */
14425
14426 static bool
14427 integer_valued_real_invalid_p (tree t, int depth)
14428 {
14429 switch (TREE_CODE (t))
14430 {
14431 case COMPOUND_EXPR:
14432 case MODIFY_EXPR:
14433 case BIND_EXPR:
14434 return RECURSE (TREE_OPERAND (t, 1));
14435
14436 case SAVE_EXPR:
14437 return RECURSE (TREE_OPERAND (t, 0));
14438
14439 default:
14440 break;
14441 }
14442 return false;
14443 }
14444
14445 #undef RECURSE
14446 #undef integer_valued_real_p
14447
14448 /* Return true if the floating point expression T has an integer value.
14449 We also allow +Inf, -Inf and NaN to be considered integer values.
14450 Return false for signaling NaN.
14451
14452 DEPTH is the current nesting depth of the query. */
14453
14454 bool
14455 integer_valued_real_p (tree t, int depth)
14456 {
14457 if (t == error_mark_node)
14458 return false;
14459
14460 STRIP_ANY_LOCATION_WRAPPER (t);
14461
14462 tree_code code = TREE_CODE (t);
14463 switch (TREE_CODE_CLASS (code))
14464 {
14465 case tcc_binary:
14466 case tcc_comparison:
14467 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14468 TREE_OPERAND (t, 1), depth);
14469
14470 case tcc_unary:
14471 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14472
14473 case tcc_constant:
14474 case tcc_declaration:
14475 case tcc_reference:
14476 return integer_valued_real_single_p (t, depth);
14477
14478 default:
14479 break;
14480 }
14481
14482 switch (code)
14483 {
14484 case COND_EXPR:
14485 case SSA_NAME:
14486 return integer_valued_real_single_p (t, depth);
14487
14488 case CALL_EXPR:
14489 {
14490 tree arg0 = (call_expr_nargs (t) > 0
14491 ? CALL_EXPR_ARG (t, 0)
14492 : NULL_TREE);
14493 tree arg1 = (call_expr_nargs (t) > 1
14494 ? CALL_EXPR_ARG (t, 1)
14495 : NULL_TREE);
14496 return integer_valued_real_call_p (get_call_combined_fn (t),
14497 arg0, arg1, depth);
14498 }
14499
14500 default:
14501 return integer_valued_real_invalid_p (t, depth);
14502 }
14503 }
14504
14505 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14506 attempt to fold the expression to a constant without modifying TYPE,
14507 OP0 or OP1.
14508
14509 If the expression could be simplified to a constant, then return
14510 the constant. If the expression would not be simplified to a
14511 constant, then return NULL_TREE. */
14512
14513 tree
14514 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14515 {
14516 tree tem = fold_binary (code, type, op0, op1);
14517 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14518 }
14519
14520 /* Given the components of a unary expression CODE, TYPE and OP0,
14521 attempt to fold the expression to a constant without modifying
14522 TYPE or OP0.
14523
14524 If the expression could be simplified to a constant, then return
14525 the constant. If the expression would not be simplified to a
14526 constant, then return NULL_TREE. */
14527
14528 tree
14529 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14530 {
14531 tree tem = fold_unary (code, type, op0);
14532 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14533 }
14534
14535 /* If EXP represents referencing an element in a constant string
14536 (either via pointer arithmetic or array indexing), return the
14537 tree representing the value accessed, otherwise return NULL. */
14538
14539 tree
14540 fold_read_from_constant_string (tree exp)
14541 {
14542 if ((TREE_CODE (exp) == INDIRECT_REF
14543 || TREE_CODE (exp) == ARRAY_REF)
14544 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14545 {
14546 tree exp1 = TREE_OPERAND (exp, 0);
14547 tree index;
14548 tree string;
14549 location_t loc = EXPR_LOCATION (exp);
14550
14551 if (TREE_CODE (exp) == INDIRECT_REF)
14552 string = string_constant (exp1, &index, NULL, NULL);
14553 else
14554 {
14555 tree low_bound = array_ref_low_bound (exp);
14556 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14557
14558 /* Optimize the special-case of a zero lower bound.
14559
14560 We convert the low_bound to sizetype to avoid some problems
14561 with constant folding. (E.g. suppose the lower bound is 1,
14562 and its mode is QI. Without the conversion,l (ARRAY
14563 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14564 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14565 if (! integer_zerop (low_bound))
14566 index = size_diffop_loc (loc, index,
14567 fold_convert_loc (loc, sizetype, low_bound));
14568
14569 string = exp1;
14570 }
14571
14572 scalar_int_mode char_mode;
14573 if (string
14574 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14575 && TREE_CODE (string) == STRING_CST
14576 && TREE_CODE (index) == INTEGER_CST
14577 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14578 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14579 &char_mode)
14580 && GET_MODE_SIZE (char_mode) == 1)
14581 return build_int_cst_type (TREE_TYPE (exp),
14582 (TREE_STRING_POINTER (string)
14583 [TREE_INT_CST_LOW (index)]));
14584 }
14585 return NULL;
14586 }
14587
14588 /* Folds a read from vector element at IDX of vector ARG. */
14589
14590 tree
14591 fold_read_from_vector (tree arg, poly_uint64 idx)
14592 {
14593 unsigned HOST_WIDE_INT i;
14594 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14595 && known_ge (idx, 0u)
14596 && idx.is_constant (&i))
14597 {
14598 if (TREE_CODE (arg) == VECTOR_CST)
14599 return VECTOR_CST_ELT (arg, i);
14600 else if (TREE_CODE (arg) == CONSTRUCTOR)
14601 {
14602 if (i >= CONSTRUCTOR_NELTS (arg))
14603 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14604 return CONSTRUCTOR_ELT (arg, i)->value;
14605 }
14606 }
14607 return NULL_TREE;
14608 }
14609
14610 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14611 an integer constant, real, or fixed-point constant.
14612
14613 TYPE is the type of the result. */
14614
14615 static tree
14616 fold_negate_const (tree arg0, tree type)
14617 {
14618 tree t = NULL_TREE;
14619
14620 switch (TREE_CODE (arg0))
14621 {
14622 case REAL_CST:
14623 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14624 break;
14625
14626 case FIXED_CST:
14627 {
14628 FIXED_VALUE_TYPE f;
14629 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14630 &(TREE_FIXED_CST (arg0)), NULL,
14631 TYPE_SATURATING (type));
14632 t = build_fixed (type, f);
14633 /* Propagate overflow flags. */
14634 if (overflow_p | TREE_OVERFLOW (arg0))
14635 TREE_OVERFLOW (t) = 1;
14636 break;
14637 }
14638
14639 default:
14640 if (poly_int_tree_p (arg0))
14641 {
14642 wi::overflow_type overflow;
14643 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14644 t = force_fit_type (type, res, 1,
14645 (overflow && ! TYPE_UNSIGNED (type))
14646 || TREE_OVERFLOW (arg0));
14647 break;
14648 }
14649
14650 gcc_unreachable ();
14651 }
14652
14653 return t;
14654 }
14655
14656 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14657 an integer constant or real constant.
14658
14659 TYPE is the type of the result. */
14660
14661 tree
14662 fold_abs_const (tree arg0, tree type)
14663 {
14664 tree t = NULL_TREE;
14665
14666 switch (TREE_CODE (arg0))
14667 {
14668 case INTEGER_CST:
14669 {
14670 /* If the value is unsigned or non-negative, then the absolute value
14671 is the same as the ordinary value. */
14672 wide_int val = wi::to_wide (arg0);
14673 wi::overflow_type overflow = wi::OVF_NONE;
14674 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14675 ;
14676
14677 /* If the value is negative, then the absolute value is
14678 its negation. */
14679 else
14680 val = wi::neg (val, &overflow);
14681
14682 /* Force to the destination type, set TREE_OVERFLOW for signed
14683 TYPE only. */
14684 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14685 }
14686 break;
14687
14688 case REAL_CST:
14689 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14690 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14691 else
14692 t = arg0;
14693 break;
14694
14695 default:
14696 gcc_unreachable ();
14697 }
14698
14699 return t;
14700 }
14701
14702 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14703 constant. TYPE is the type of the result. */
14704
14705 static tree
14706 fold_not_const (const_tree arg0, tree type)
14707 {
14708 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14709
14710 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14711 }
14712
14713 /* Given CODE, a relational operator, the target type, TYPE and two
14714 constant operands OP0 and OP1, return the result of the
14715 relational operation. If the result is not a compile time
14716 constant, then return NULL_TREE. */
14717
14718 static tree
14719 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14720 {
14721 int result, invert;
14722
14723 /* From here on, the only cases we handle are when the result is
14724 known to be a constant. */
14725
14726 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14727 {
14728 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14729 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14730
14731 /* Handle the cases where either operand is a NaN. */
14732 if (real_isnan (c0) || real_isnan (c1))
14733 {
14734 switch (code)
14735 {
14736 case EQ_EXPR:
14737 case ORDERED_EXPR:
14738 result = 0;
14739 break;
14740
14741 case NE_EXPR:
14742 case UNORDERED_EXPR:
14743 case UNLT_EXPR:
14744 case UNLE_EXPR:
14745 case UNGT_EXPR:
14746 case UNGE_EXPR:
14747 case UNEQ_EXPR:
14748 result = 1;
14749 break;
14750
14751 case LT_EXPR:
14752 case LE_EXPR:
14753 case GT_EXPR:
14754 case GE_EXPR:
14755 case LTGT_EXPR:
14756 if (flag_trapping_math)
14757 return NULL_TREE;
14758 result = 0;
14759 break;
14760
14761 default:
14762 gcc_unreachable ();
14763 }
14764
14765 return constant_boolean_node (result, type);
14766 }
14767
14768 return constant_boolean_node (real_compare (code, c0, c1), type);
14769 }
14770
14771 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14772 {
14773 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14774 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14775 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14776 }
14777
14778 /* Handle equality/inequality of complex constants. */
14779 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14780 {
14781 tree rcond = fold_relational_const (code, type,
14782 TREE_REALPART (op0),
14783 TREE_REALPART (op1));
14784 tree icond = fold_relational_const (code, type,
14785 TREE_IMAGPART (op0),
14786 TREE_IMAGPART (op1));
14787 if (code == EQ_EXPR)
14788 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14789 else if (code == NE_EXPR)
14790 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14791 else
14792 return NULL_TREE;
14793 }
14794
14795 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14796 {
14797 if (!VECTOR_TYPE_P (type))
14798 {
14799 /* Have vector comparison with scalar boolean result. */
14800 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14801 && known_eq (VECTOR_CST_NELTS (op0),
14802 VECTOR_CST_NELTS (op1)));
14803 unsigned HOST_WIDE_INT nunits;
14804 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14805 return NULL_TREE;
14806 for (unsigned i = 0; i < nunits; i++)
14807 {
14808 tree elem0 = VECTOR_CST_ELT (op0, i);
14809 tree elem1 = VECTOR_CST_ELT (op1, i);
14810 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14811 if (tmp == NULL_TREE)
14812 return NULL_TREE;
14813 if (integer_zerop (tmp))
14814 return constant_boolean_node (code == NE_EXPR, type);
14815 }
14816 return constant_boolean_node (code == EQ_EXPR, type);
14817 }
14818 tree_vector_builder elts;
14819 if (!elts.new_binary_operation (type, op0, op1, false))
14820 return NULL_TREE;
14821 unsigned int count = elts.encoded_nelts ();
14822 for (unsigned i = 0; i < count; i++)
14823 {
14824 tree elem_type = TREE_TYPE (type);
14825 tree elem0 = VECTOR_CST_ELT (op0, i);
14826 tree elem1 = VECTOR_CST_ELT (op1, i);
14827
14828 tree tem = fold_relational_const (code, elem_type,
14829 elem0, elem1);
14830
14831 if (tem == NULL_TREE)
14832 return NULL_TREE;
14833
14834 elts.quick_push (build_int_cst (elem_type,
14835 integer_zerop (tem) ? 0 : -1));
14836 }
14837
14838 return elts.build ();
14839 }
14840
14841 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14842
14843 To compute GT, swap the arguments and do LT.
14844 To compute GE, do LT and invert the result.
14845 To compute LE, swap the arguments, do LT and invert the result.
14846 To compute NE, do EQ and invert the result.
14847
14848 Therefore, the code below must handle only EQ and LT. */
14849
14850 if (code == LE_EXPR || code == GT_EXPR)
14851 {
14852 std::swap (op0, op1);
14853 code = swap_tree_comparison (code);
14854 }
14855
14856 /* Note that it is safe to invert for real values here because we
14857 have already handled the one case that it matters. */
14858
14859 invert = 0;
14860 if (code == NE_EXPR || code == GE_EXPR)
14861 {
14862 invert = 1;
14863 code = invert_tree_comparison (code, false);
14864 }
14865
14866 /* Compute a result for LT or EQ if args permit;
14867 Otherwise return T. */
14868 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14869 {
14870 if (code == EQ_EXPR)
14871 result = tree_int_cst_equal (op0, op1);
14872 else
14873 result = tree_int_cst_lt (op0, op1);
14874 }
14875 else
14876 return NULL_TREE;
14877
14878 if (invert)
14879 result ^= 1;
14880 return constant_boolean_node (result, type);
14881 }
14882
14883 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14884 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14885 itself. */
14886
14887 tree
14888 fold_build_cleanup_point_expr (tree type, tree expr)
14889 {
14890 /* If the expression does not have side effects then we don't have to wrap
14891 it with a cleanup point expression. */
14892 if (!TREE_SIDE_EFFECTS (expr))
14893 return expr;
14894
14895 /* If the expression is a return, check to see if the expression inside the
14896 return has no side effects or the right hand side of the modify expression
14897 inside the return. If either don't have side effects set we don't need to
14898 wrap the expression in a cleanup point expression. Note we don't check the
14899 left hand side of the modify because it should always be a return decl. */
14900 if (TREE_CODE (expr) == RETURN_EXPR)
14901 {
14902 tree op = TREE_OPERAND (expr, 0);
14903 if (!op || !TREE_SIDE_EFFECTS (op))
14904 return expr;
14905 op = TREE_OPERAND (op, 1);
14906 if (!TREE_SIDE_EFFECTS (op))
14907 return expr;
14908 }
14909
14910 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14911 }
14912
14913 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14914 of an indirection through OP0, or NULL_TREE if no simplification is
14915 possible. */
14916
14917 tree
14918 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14919 {
14920 tree sub = op0;
14921 tree subtype;
14922 poly_uint64 const_op01;
14923
14924 STRIP_NOPS (sub);
14925 subtype = TREE_TYPE (sub);
14926 if (!POINTER_TYPE_P (subtype)
14927 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14928 return NULL_TREE;
14929
14930 if (TREE_CODE (sub) == ADDR_EXPR)
14931 {
14932 tree op = TREE_OPERAND (sub, 0);
14933 tree optype = TREE_TYPE (op);
14934
14935 /* *&CONST_DECL -> to the value of the const decl. */
14936 if (TREE_CODE (op) == CONST_DECL)
14937 return DECL_INITIAL (op);
14938 /* *&p => p; make sure to handle *&"str"[cst] here. */
14939 if (type == optype)
14940 {
14941 tree fop = fold_read_from_constant_string (op);
14942 if (fop)
14943 return fop;
14944 else
14945 return op;
14946 }
14947 /* *(foo *)&fooarray => fooarray[0] */
14948 else if (TREE_CODE (optype) == ARRAY_TYPE
14949 && type == TREE_TYPE (optype)
14950 && (!in_gimple_form
14951 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14952 {
14953 tree type_domain = TYPE_DOMAIN (optype);
14954 tree min_val = size_zero_node;
14955 if (type_domain && TYPE_MIN_VALUE (type_domain))
14956 min_val = TYPE_MIN_VALUE (type_domain);
14957 if (in_gimple_form
14958 && TREE_CODE (min_val) != INTEGER_CST)
14959 return NULL_TREE;
14960 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14961 NULL_TREE, NULL_TREE);
14962 }
14963 /* *(foo *)&complexfoo => __real__ complexfoo */
14964 else if (TREE_CODE (optype) == COMPLEX_TYPE
14965 && type == TREE_TYPE (optype))
14966 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14967 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14968 else if (VECTOR_TYPE_P (optype)
14969 && type == TREE_TYPE (optype))
14970 {
14971 tree part_width = TYPE_SIZE (type);
14972 tree index = bitsize_int (0);
14973 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14974 index);
14975 }
14976 }
14977
14978 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14979 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14980 {
14981 tree op00 = TREE_OPERAND (sub, 0);
14982 tree op01 = TREE_OPERAND (sub, 1);
14983
14984 STRIP_NOPS (op00);
14985 if (TREE_CODE (op00) == ADDR_EXPR)
14986 {
14987 tree op00type;
14988 op00 = TREE_OPERAND (op00, 0);
14989 op00type = TREE_TYPE (op00);
14990
14991 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14992 if (VECTOR_TYPE_P (op00type)
14993 && type == TREE_TYPE (op00type)
14994 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14995 but we want to treat offsets with MSB set as negative.
14996 For the code below negative offsets are invalid and
14997 TYPE_SIZE of the element is something unsigned, so
14998 check whether op01 fits into poly_int64, which implies
14999 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15000 then just use poly_uint64 because we want to treat the
15001 value as unsigned. */
15002 && tree_fits_poly_int64_p (op01))
15003 {
15004 tree part_width = TYPE_SIZE (type);
15005 poly_uint64 max_offset
15006 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
15007 * TYPE_VECTOR_SUBPARTS (op00type));
15008 if (known_lt (const_op01, max_offset))
15009 {
15010 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15011 return fold_build3_loc (loc,
15012 BIT_FIELD_REF, type, op00,
15013 part_width, index);
15014 }
15015 }
15016 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15017 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15018 && type == TREE_TYPE (op00type))
15019 {
15020 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15021 const_op01))
15022 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15023 }
15024 /* ((foo *)&fooarray)[1] => fooarray[1] */
15025 else if (TREE_CODE (op00type) == ARRAY_TYPE
15026 && type == TREE_TYPE (op00type))
15027 {
15028 tree type_domain = TYPE_DOMAIN (op00type);
15029 tree min_val = size_zero_node;
15030 if (type_domain && TYPE_MIN_VALUE (type_domain))
15031 min_val = TYPE_MIN_VALUE (type_domain);
15032 poly_uint64 type_size, index;
15033 if (poly_int_tree_p (min_val)
15034 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15035 && multiple_p (const_op01, type_size, &index))
15036 {
15037 poly_offset_int off = index + wi::to_poly_offset (min_val);
15038 op01 = wide_int_to_tree (sizetype, off);
15039 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15040 NULL_TREE, NULL_TREE);
15041 }
15042 }
15043 }
15044 }
15045
15046 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15047 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15048 && type == TREE_TYPE (TREE_TYPE (subtype))
15049 && (!in_gimple_form
15050 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15051 {
15052 tree type_domain;
15053 tree min_val = size_zero_node;
15054 sub = build_fold_indirect_ref_loc (loc, sub);
15055 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15056 if (type_domain && TYPE_MIN_VALUE (type_domain))
15057 min_val = TYPE_MIN_VALUE (type_domain);
15058 if (in_gimple_form
15059 && TREE_CODE (min_val) != INTEGER_CST)
15060 return NULL_TREE;
15061 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15062 NULL_TREE);
15063 }
15064
15065 return NULL_TREE;
15066 }
15067
15068 /* Builds an expression for an indirection through T, simplifying some
15069 cases. */
15070
15071 tree
15072 build_fold_indirect_ref_loc (location_t loc, tree t)
15073 {
15074 tree type = TREE_TYPE (TREE_TYPE (t));
15075 tree sub = fold_indirect_ref_1 (loc, type, t);
15076
15077 if (sub)
15078 return sub;
15079
15080 return build1_loc (loc, INDIRECT_REF, type, t);
15081 }
15082
15083 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15084
15085 tree
15086 fold_indirect_ref_loc (location_t loc, tree t)
15087 {
15088 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15089
15090 if (sub)
15091 return sub;
15092 else
15093 return t;
15094 }
15095
15096 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15097 whose result is ignored. The type of the returned tree need not be
15098 the same as the original expression. */
15099
15100 tree
15101 fold_ignored_result (tree t)
15102 {
15103 if (!TREE_SIDE_EFFECTS (t))
15104 return integer_zero_node;
15105
15106 for (;;)
15107 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15108 {
15109 case tcc_unary:
15110 t = TREE_OPERAND (t, 0);
15111 break;
15112
15113 case tcc_binary:
15114 case tcc_comparison:
15115 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15116 t = TREE_OPERAND (t, 0);
15117 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15118 t = TREE_OPERAND (t, 1);
15119 else
15120 return t;
15121 break;
15122
15123 case tcc_expression:
15124 switch (TREE_CODE (t))
15125 {
15126 case COMPOUND_EXPR:
15127 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15128 return t;
15129 t = TREE_OPERAND (t, 0);
15130 break;
15131
15132 case COND_EXPR:
15133 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15134 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15135 return t;
15136 t = TREE_OPERAND (t, 0);
15137 break;
15138
15139 default:
15140 return t;
15141 }
15142 break;
15143
15144 default:
15145 return t;
15146 }
15147 }
15148
15149 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15150
15151 tree
15152 round_up_loc (location_t loc, tree value, unsigned int divisor)
15153 {
15154 tree div = NULL_TREE;
15155
15156 if (divisor == 1)
15157 return value;
15158
15159 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15160 have to do anything. Only do this when we are not given a const,
15161 because in that case, this check is more expensive than just
15162 doing it. */
15163 if (TREE_CODE (value) != INTEGER_CST)
15164 {
15165 div = build_int_cst (TREE_TYPE (value), divisor);
15166
15167 if (multiple_of_p (TREE_TYPE (value), value, div))
15168 return value;
15169 }
15170
15171 /* If divisor is a power of two, simplify this to bit manipulation. */
15172 if (pow2_or_zerop (divisor))
15173 {
15174 if (TREE_CODE (value) == INTEGER_CST)
15175 {
15176 wide_int val = wi::to_wide (value);
15177 bool overflow_p;
15178
15179 if ((val & (divisor - 1)) == 0)
15180 return value;
15181
15182 overflow_p = TREE_OVERFLOW (value);
15183 val += divisor - 1;
15184 val &= (int) -divisor;
15185 if (val == 0)
15186 overflow_p = true;
15187
15188 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15189 }
15190 else
15191 {
15192 tree t;
15193
15194 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15195 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15196 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15197 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15198 }
15199 }
15200 else
15201 {
15202 if (!div)
15203 div = build_int_cst (TREE_TYPE (value), divisor);
15204 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15205 value = size_binop_loc (loc, MULT_EXPR, value, div);
15206 }
15207
15208 return value;
15209 }
15210
15211 /* Likewise, but round down. */
15212
15213 tree
15214 round_down_loc (location_t loc, tree value, int divisor)
15215 {
15216 tree div = NULL_TREE;
15217
15218 gcc_assert (divisor > 0);
15219 if (divisor == 1)
15220 return value;
15221
15222 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15223 have to do anything. Only do this when we are not given a const,
15224 because in that case, this check is more expensive than just
15225 doing it. */
15226 if (TREE_CODE (value) != INTEGER_CST)
15227 {
15228 div = build_int_cst (TREE_TYPE (value), divisor);
15229
15230 if (multiple_of_p (TREE_TYPE (value), value, div))
15231 return value;
15232 }
15233
15234 /* If divisor is a power of two, simplify this to bit manipulation. */
15235 if (pow2_or_zerop (divisor))
15236 {
15237 tree t;
15238
15239 t = build_int_cst (TREE_TYPE (value), -divisor);
15240 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15241 }
15242 else
15243 {
15244 if (!div)
15245 div = build_int_cst (TREE_TYPE (value), divisor);
15246 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15247 value = size_binop_loc (loc, MULT_EXPR, value, div);
15248 }
15249
15250 return value;
15251 }
15252
15253 /* Returns the pointer to the base of the object addressed by EXP and
15254 extracts the information about the offset of the access, storing it
15255 to PBITPOS and POFFSET. */
15256
15257 static tree
15258 split_address_to_core_and_offset (tree exp,
15259 poly_int64_pod *pbitpos, tree *poffset)
15260 {
15261 tree core;
15262 machine_mode mode;
15263 int unsignedp, reversep, volatilep;
15264 poly_int64 bitsize;
15265 location_t loc = EXPR_LOCATION (exp);
15266
15267 if (TREE_CODE (exp) == ADDR_EXPR)
15268 {
15269 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15270 poffset, &mode, &unsignedp, &reversep,
15271 &volatilep);
15272 core = build_fold_addr_expr_loc (loc, core);
15273 }
15274 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
15275 {
15276 core = TREE_OPERAND (exp, 0);
15277 STRIP_NOPS (core);
15278 *pbitpos = 0;
15279 *poffset = TREE_OPERAND (exp, 1);
15280 if (poly_int_tree_p (*poffset))
15281 {
15282 poly_offset_int tem
15283 = wi::sext (wi::to_poly_offset (*poffset),
15284 TYPE_PRECISION (TREE_TYPE (*poffset)));
15285 tem <<= LOG2_BITS_PER_UNIT;
15286 if (tem.to_shwi (pbitpos))
15287 *poffset = NULL_TREE;
15288 }
15289 }
15290 else
15291 {
15292 core = exp;
15293 *pbitpos = 0;
15294 *poffset = NULL_TREE;
15295 }
15296
15297 return core;
15298 }
15299
15300 /* Returns true if addresses of E1 and E2 differ by a constant, false
15301 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15302
15303 bool
15304 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
15305 {
15306 tree core1, core2;
15307 poly_int64 bitpos1, bitpos2;
15308 tree toffset1, toffset2, tdiff, type;
15309
15310 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15311 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15312
15313 poly_int64 bytepos1, bytepos2;
15314 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
15315 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
15316 || !operand_equal_p (core1, core2, 0))
15317 return false;
15318
15319 if (toffset1 && toffset2)
15320 {
15321 type = TREE_TYPE (toffset1);
15322 if (type != TREE_TYPE (toffset2))
15323 toffset2 = fold_convert (type, toffset2);
15324
15325 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15326 if (!cst_and_fits_in_hwi (tdiff))
15327 return false;
15328
15329 *diff = int_cst_value (tdiff);
15330 }
15331 else if (toffset1 || toffset2)
15332 {
15333 /* If only one of the offsets is non-constant, the difference cannot
15334 be a constant. */
15335 return false;
15336 }
15337 else
15338 *diff = 0;
15339
15340 *diff += bytepos1 - bytepos2;
15341 return true;
15342 }
15343
15344 /* Return OFF converted to a pointer offset type suitable as offset for
15345 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15346 tree
15347 convert_to_ptrofftype_loc (location_t loc, tree off)
15348 {
15349 return fold_convert_loc (loc, sizetype, off);
15350 }
15351
15352 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15353 tree
15354 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15355 {
15356 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15357 ptr, convert_to_ptrofftype_loc (loc, off));
15358 }
15359
15360 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15361 tree
15362 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15363 {
15364 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15365 ptr, size_int (off));
15366 }
15367
15368 /* Return a pointer P to a NUL-terminated string representing the sequence
15369 of constant characters referred to by SRC (or a subsequence of such
15370 characters within it if SRC is a reference to a string plus some
15371 constant offset). If STRLEN is non-null, store the number of bytes
15372 in the string constant including the terminating NUL char. *STRLEN is
15373 typically strlen(P) + 1 in the absence of embedded NUL characters. */
15374
15375 const char *
15376 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */)
15377 {
15378 tree offset_node;
15379 tree mem_size;
15380
15381 if (strlen)
15382 *strlen = 0;
15383
15384 src = string_constant (src, &offset_node, &mem_size, NULL);
15385 if (src == 0)
15386 return NULL;
15387
15388 unsigned HOST_WIDE_INT offset = 0;
15389 if (offset_node != NULL_TREE)
15390 {
15391 if (!tree_fits_uhwi_p (offset_node))
15392 return NULL;
15393 else
15394 offset = tree_to_uhwi (offset_node);
15395 }
15396
15397 if (!tree_fits_uhwi_p (mem_size))
15398 return NULL;
15399
15400 /* STRING_LENGTH is the size of the string literal, including any
15401 embedded NULs. STRING_SIZE is the size of the array the string
15402 literal is stored in. */
15403 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
15404 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size);
15405
15406 /* Ideally this would turn into a gcc_checking_assert over time. */
15407 if (string_length > string_size)
15408 string_length = string_size;
15409
15410 const char *string = TREE_STRING_POINTER (src);
15411
15412 /* Ideally this would turn into a gcc_checking_assert over time. */
15413 if (string_length > string_size)
15414 string_length = string_size;
15415
15416 if (string_length == 0
15417 || offset >= string_size)
15418 return NULL;
15419
15420 if (strlen)
15421 {
15422 /* Compute and store the length of the substring at OFFSET.
15423 All offsets past the initial length refer to null strings. */
15424 if (offset < string_length)
15425 *strlen = string_length - offset;
15426 else
15427 *strlen = 1;
15428 }
15429 else
15430 {
15431 tree eltype = TREE_TYPE (TREE_TYPE (src));
15432 /* Support only properly NUL-terminated single byte strings. */
15433 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15434 return NULL;
15435 if (string[string_length - 1] != '\0')
15436 return NULL;
15437 }
15438
15439 return offset < string_length ? string + offset : "";
15440 }
15441
15442 /* Given a tree T, compute which bits in T may be nonzero. */
15443
15444 wide_int
15445 tree_nonzero_bits (const_tree t)
15446 {
15447 switch (TREE_CODE (t))
15448 {
15449 case INTEGER_CST:
15450 return wi::to_wide (t);
15451 case SSA_NAME:
15452 return get_nonzero_bits (t);
15453 case NON_LVALUE_EXPR:
15454 case SAVE_EXPR:
15455 return tree_nonzero_bits (TREE_OPERAND (t, 0));
15456 case BIT_AND_EXPR:
15457 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15458 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15459 case BIT_IOR_EXPR:
15460 case BIT_XOR_EXPR:
15461 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15462 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15463 case COND_EXPR:
15464 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15465 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15466 CASE_CONVERT:
15467 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15468 TYPE_PRECISION (TREE_TYPE (t)),
15469 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15470 case PLUS_EXPR:
15471 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15472 {
15473 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15474 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15475 if (wi::bit_and (nzbits1, nzbits2) == 0)
15476 return wi::bit_or (nzbits1, nzbits2);
15477 }
15478 break;
15479 case LSHIFT_EXPR:
15480 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15481 {
15482 tree type = TREE_TYPE (t);
15483 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15484 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15485 TYPE_PRECISION (type));
15486 return wi::neg_p (arg1)
15487 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15488 : wi::lshift (nzbits, arg1);
15489 }
15490 break;
15491 case RSHIFT_EXPR:
15492 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15493 {
15494 tree type = TREE_TYPE (t);
15495 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15496 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15497 TYPE_PRECISION (type));
15498 return wi::neg_p (arg1)
15499 ? wi::lshift (nzbits, -arg1)
15500 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15501 }
15502 break;
15503 default:
15504 break;
15505 }
15506
15507 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15508 }
15509
15510 #if CHECKING_P
15511
15512 namespace selftest {
15513
15514 /* Helper functions for writing tests of folding trees. */
15515
15516 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15517
15518 static void
15519 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15520 tree constant)
15521 {
15522 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15523 }
15524
15525 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15526 wrapping WRAPPED_EXPR. */
15527
15528 static void
15529 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15530 tree wrapped_expr)
15531 {
15532 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15533 ASSERT_NE (wrapped_expr, result);
15534 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15535 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15536 }
15537
15538 /* Verify that various arithmetic binary operations are folded
15539 correctly. */
15540
15541 static void
15542 test_arithmetic_folding ()
15543 {
15544 tree type = integer_type_node;
15545 tree x = create_tmp_var_raw (type, "x");
15546 tree zero = build_zero_cst (type);
15547 tree one = build_int_cst (type, 1);
15548
15549 /* Addition. */
15550 /* 1 <-- (0 + 1) */
15551 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15552 one);
15553 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15554 one);
15555
15556 /* (nonlvalue)x <-- (x + 0) */
15557 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15558 x);
15559
15560 /* Subtraction. */
15561 /* 0 <-- (x - x) */
15562 assert_binop_folds_to_const (x, MINUS_EXPR, x,
15563 zero);
15564 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15565 x);
15566
15567 /* Multiplication. */
15568 /* 0 <-- (x * 0) */
15569 assert_binop_folds_to_const (x, MULT_EXPR, zero,
15570 zero);
15571
15572 /* (nonlvalue)x <-- (x * 1) */
15573 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15574 x);
15575 }
15576
15577 /* Verify that various binary operations on vectors are folded
15578 correctly. */
15579
15580 static void
15581 test_vector_folding ()
15582 {
15583 tree inner_type = integer_type_node;
15584 tree type = build_vector_type (inner_type, 4);
15585 tree zero = build_zero_cst (type);
15586 tree one = build_one_cst (type);
15587 tree index = build_index_vector (type, 0, 1);
15588
15589 /* Verify equality tests that return a scalar boolean result. */
15590 tree res_type = boolean_type_node;
15591 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15592 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15593 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15594 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15595 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15596 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15597 index, one)));
15598 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15599 index, index)));
15600 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15601 index, index)));
15602 }
15603
15604 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15605
15606 static void
15607 test_vec_duplicate_folding ()
15608 {
15609 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15610 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15611 /* This will be 1 if VEC_MODE isn't a vector mode. */
15612 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15613
15614 tree type = build_vector_type (ssizetype, nunits);
15615 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15616 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15617 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15618 }
15619
15620 /* Run all of the selftests within this file. */
15621
15622 void
15623 fold_const_c_tests ()
15624 {
15625 test_arithmetic_folding ();
15626 test_vector_folding ();
15627 test_vec_duplicate_folding ();
15628 }
15629
15630 } // namespace selftest
15631
15632 #endif /* CHECKING_P */