]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fold-const.c
Merge with trunk.
[thirdparty/gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
71
72 /* Nonzero if we are folding constants inside an initializer; zero
73 otherwise. */
74 int folding_initializer = 0;
75
76 /* The following constants represent a bit based encoding of GCC's
77 comparison operators. This encoding simplifies transformations
78 on relational comparison operators, such as AND and OR. */
79 enum comparison_code {
80 COMPCODE_FALSE = 0,
81 COMPCODE_LT = 1,
82 COMPCODE_EQ = 2,
83 COMPCODE_LE = 3,
84 COMPCODE_GT = 4,
85 COMPCODE_LTGT = 5,
86 COMPCODE_GE = 6,
87 COMPCODE_ORD = 7,
88 COMPCODE_UNORD = 8,
89 COMPCODE_UNLT = 9,
90 COMPCODE_UNEQ = 10,
91 COMPCODE_UNLE = 11,
92 COMPCODE_UNGT = 12,
93 COMPCODE_NE = 13,
94 COMPCODE_UNGE = 14,
95 COMPCODE_TRUE = 15
96 };
97
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static tree sign_bit_p (tree, const_tree);
120 static int simple_operand_p (const_tree);
121 static bool simple_operand_p_2 (tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree optimize_minmax_comparison (location_t, enum tree_code,
129 tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_mathfn_compare (location_t,
137 enum built_in_function, enum tree_code,
138 tree, tree, tree);
139 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (const_tree, const_tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (const_tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static tree fold_convert_const (enum tree_code, tree, tree);
146
147 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
148 Otherwise, return LOC. */
149
150 static location_t
151 expr_location_or (tree t, location_t loc)
152 {
153 location_t tloc = EXPR_LOCATION (t);
154 return tloc == UNKNOWN_LOCATION ? loc : tloc;
155 }
156
157 /* Similar to protected_set_expr_location, but never modify x in place,
158 if location can and needs to be set, unshare it. */
159
160 static inline tree
161 protected_set_expr_location_unshare (tree x, location_t loc)
162 {
163 if (CAN_HAVE_LOCATION_P (x)
164 && EXPR_LOCATION (x) != loc
165 && !(TREE_CODE (x) == SAVE_EXPR
166 || TREE_CODE (x) == TARGET_EXPR
167 || TREE_CODE (x) == BIND_EXPR))
168 {
169 x = copy_node (x);
170 SET_EXPR_LOCATION (x, loc);
171 }
172 return x;
173 }
174 \f
175 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
176 division and returns the quotient. Otherwise returns
177 NULL_TREE. */
178
179 tree
180 div_if_zero_remainder (const_tree arg1, const_tree arg2)
181 {
182 widest_int quo;
183
184 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
185 SIGNED, &quo))
186 return wide_int_to_tree (TREE_TYPE (arg1), quo);
187
188 return NULL_TREE;
189 }
190 \f
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
199
200 static int fold_deferring_overflow_warnings;
201
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
206
207 static const char* fold_deferred_overflow_warning;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
211
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
216
217 void
218 fold_defer_overflow_warnings (void)
219 {
220 ++fold_deferring_overflow_warnings;
221 }
222
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
231
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
234 {
235 const char *warnmsg;
236 location_t locus;
237
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
241 {
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
247 }
248
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
251
252 if (!issue || warnmsg == NULL)
253 return;
254
255 if (gimple_no_warning_p (stmt))
256 return;
257
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
262
263 if (!issue_strict_overflow_warning (code))
264 return;
265
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 }
272
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
275
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
278 {
279 fold_undefer_overflow_warnings (false, NULL, 0);
280 }
281
282 /* Whether we are deferring overflow warnings. */
283
284 bool
285 fold_deferring_overflow_warnings_p (void)
286 {
287 return fold_deferring_overflow_warnings > 0;
288 }
289
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
292
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 {
296 if (fold_deferring_overflow_warnings > 0)
297 {
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
300 {
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
303 }
304 }
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
307 }
308 \f
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
311
312 static bool
313 negate_mathfn_p (enum built_in_function code)
314 {
315 switch (code)
316 {
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
341
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
347
348 default:
349 break;
350 }
351 return false;
352 }
353
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
356
357 bool
358 may_negate_without_overflow_p (const_tree t)
359 {
360 tree type;
361
362 gcc_assert (TREE_CODE (t) == INTEGER_CST);
363
364 type = TREE_TYPE (t);
365 if (TYPE_UNSIGNED (type))
366 return false;
367
368 return !wi::only_sign_bit_p (t);
369 }
370
371 /* Determine whether an expression T can be cheaply negated using
372 the function negate_expr without introducing undefined overflow. */
373
374 static bool
375 negate_expr_p (tree t)
376 {
377 tree type;
378
379 if (t == 0)
380 return false;
381
382 type = TREE_TYPE (t);
383
384 STRIP_SIGN_NOPS (t);
385 switch (TREE_CODE (t))
386 {
387 case INTEGER_CST:
388 if (TYPE_OVERFLOW_WRAPS (type))
389 return true;
390
391 /* Check that -CST will not overflow type. */
392 return may_negate_without_overflow_p (t);
393 case BIT_NOT_EXPR:
394 return (INTEGRAL_TYPE_P (type)
395 && TYPE_OVERFLOW_WRAPS (type));
396
397 case FIXED_CST:
398 case NEGATE_EXPR:
399 return true;
400
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
409
410 case VECTOR_CST:
411 {
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
414
415 int count = TYPE_VECTOR_SUBPARTS (type), i;
416
417 for (i = 0; i < count; i++)
418 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
419 return false;
420
421 return true;
422 }
423
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
427
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
430
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
442
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
449
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
453
454 /* Fall through. */
455
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
461
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case FLOOR_DIV_EXPR:
465 case CEIL_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
473 {
474 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
475 break;
476 /* If overflow is undefined then we have to be careful because
477 we ask whether it's ok to associate the negate with the
478 division which is not ok for example for
479 -((a - b) / c) where (-(a - b)) / c may invoke undefined
480 overflow because of negating INT_MIN. So do not use
481 negate_expr_p here but open-code the two important cases. */
482 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
483 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
484 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
485 return true;
486 }
487 else if (negate_expr_p (TREE_OPERAND (t, 0)))
488 return true;
489 return negate_expr_p (TREE_OPERAND (t, 1));
490
491 case NOP_EXPR:
492 /* Negate -((double)float) as (double)(-float). */
493 if (TREE_CODE (type) == REAL_TYPE)
494 {
495 tree tem = strip_float_extensions (t);
496 if (tem != t)
497 return negate_expr_p (tem);
498 }
499 break;
500
501 case CALL_EXPR:
502 /* Negate -f(x) as f(-x). */
503 if (negate_mathfn_p (builtin_mathfn_code (t)))
504 return negate_expr_p (CALL_EXPR_ARG (t, 0));
505 break;
506
507 case RSHIFT_EXPR:
508 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
509 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
510 {
511 tree op1 = TREE_OPERAND (t, 1);
512 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
513 return true;
514 }
515 break;
516
517 default:
518 break;
519 }
520 return false;
521 }
522
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
526 returned. */
527
528 static tree
529 fold_negate_expr (location_t loc, tree t)
530 {
531 tree type = TREE_TYPE (t);
532 tree tem;
533
534 switch (TREE_CODE (t))
535 {
536 /* Convert - (~A) to A + 1. */
537 case BIT_NOT_EXPR:
538 if (INTEGRAL_TYPE_P (type))
539 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
540 build_one_cst (type));
541 break;
542
543 case INTEGER_CST:
544 tem = fold_negate_const (t, type);
545 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
546 || !TYPE_OVERFLOW_TRAPS (type))
547 return tem;
548 break;
549
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 /* Two's complement FP formats, such as c4x, may overflow. */
553 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
554 return tem;
555 break;
556
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
560
561 case COMPLEX_CST:
562 {
563 tree rpart = negate_expr (TREE_REALPART (t));
564 tree ipart = negate_expr (TREE_IMAGPART (t));
565
566 if ((TREE_CODE (rpart) == REAL_CST
567 && TREE_CODE (ipart) == REAL_CST)
568 || (TREE_CODE (rpart) == INTEGER_CST
569 && TREE_CODE (ipart) == INTEGER_CST))
570 return build_complex (type, rpart, ipart);
571 }
572 break;
573
574 case VECTOR_CST:
575 {
576 int count = TYPE_VECTOR_SUBPARTS (type), i;
577 tree *elts = XALLOCAVEC (tree, count);
578
579 for (i = 0; i < count; i++)
580 {
581 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
582 if (elts[i] == NULL_TREE)
583 return NULL_TREE;
584 }
585
586 return build_vector (type, elts);
587 }
588
589 case COMPLEX_EXPR:
590 if (negate_expr_p (t))
591 return fold_build2_loc (loc, COMPLEX_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
593 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
594 break;
595
596 case CONJ_EXPR:
597 if (negate_expr_p (t))
598 return fold_build1_loc (loc, CONJ_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
600 break;
601
602 case NEGATE_EXPR:
603 return TREE_OPERAND (t, 0);
604
605 case PLUS_EXPR:
606 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
607 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
608 {
609 /* -(A + B) -> (-B) - A. */
610 if (negate_expr_p (TREE_OPERAND (t, 1))
611 && reorder_operands_p (TREE_OPERAND (t, 0),
612 TREE_OPERAND (t, 1)))
613 {
614 tem = negate_expr (TREE_OPERAND (t, 1));
615 return fold_build2_loc (loc, MINUS_EXPR, type,
616 tem, TREE_OPERAND (t, 0));
617 }
618
619 /* -(A + B) -> (-A) - B. */
620 if (negate_expr_p (TREE_OPERAND (t, 0)))
621 {
622 tem = negate_expr (TREE_OPERAND (t, 0));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 1));
625 }
626 }
627 break;
628
629 case MINUS_EXPR:
630 /* - (A - B) -> B - A */
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
632 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
633 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
634 return fold_build2_loc (loc, MINUS_EXPR, type,
635 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
636 break;
637
638 case MULT_EXPR:
639 if (TYPE_UNSIGNED (type))
640 break;
641
642 /* Fall through. */
643
644 case RDIV_EXPR:
645 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
646 {
647 tem = TREE_OPERAND (t, 1);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 TREE_OPERAND (t, 0), negate_expr (tem));
651 tem = TREE_OPERAND (t, 0);
652 if (negate_expr_p (tem))
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 negate_expr (tem), TREE_OPERAND (t, 1));
655 }
656 break;
657
658 case TRUNC_DIV_EXPR:
659 case ROUND_DIV_EXPR:
660 case FLOOR_DIV_EXPR:
661 case CEIL_DIV_EXPR:
662 case EXACT_DIV_EXPR:
663 /* In general we can't negate A / B, because if A is INT_MIN and
664 B is 1, we may turn this into INT_MIN / -1 which is undefined
665 and actually traps on some architectures. But if overflow is
666 undefined, we can negate, because - (INT_MIN / 1) is an
667 overflow. */
668 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
669 {
670 const char * const warnmsg = G_("assuming signed overflow does not "
671 "occur when negating a division");
672 tem = TREE_OPERAND (t, 1);
673 if (negate_expr_p (tem))
674 {
675 if (INTEGRAL_TYPE_P (type)
676 && (TREE_CODE (tem) != INTEGER_CST
677 || integer_onep (tem)))
678 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
679 return fold_build2_loc (loc, TREE_CODE (t), type,
680 TREE_OPERAND (t, 0), negate_expr (tem));
681 }
682 /* If overflow is undefined then we have to be careful because
683 we ask whether it's ok to associate the negate with the
684 division which is not ok for example for
685 -((a - b) / c) where (-(a - b)) / c may invoke undefined
686 overflow because of negating INT_MIN. So do not use
687 negate_expr_p here but open-code the two important cases. */
688 tem = TREE_OPERAND (t, 0);
689 if ((INTEGRAL_TYPE_P (type)
690 && (TREE_CODE (tem) == NEGATE_EXPR
691 || (TREE_CODE (tem) == INTEGER_CST
692 && may_negate_without_overflow_p (tem))))
693 || !INTEGRAL_TYPE_P (type))
694 return fold_build2_loc (loc, TREE_CODE (t), type,
695 negate_expr (tem), TREE_OPERAND (t, 1));
696 }
697 break;
698
699 case NOP_EXPR:
700 /* Convert -((double)float) into (double)(-float). */
701 if (TREE_CODE (type) == REAL_TYPE)
702 {
703 tem = strip_float_extensions (t);
704 if (tem != t && negate_expr_p (tem))
705 return fold_convert_loc (loc, type, negate_expr (tem));
706 }
707 break;
708
709 case CALL_EXPR:
710 /* Negate -f(x) as f(-x). */
711 if (negate_mathfn_p (builtin_mathfn_code (t))
712 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
713 {
714 tree fndecl, arg;
715
716 fndecl = get_callee_fndecl (t);
717 arg = negate_expr (CALL_EXPR_ARG (t, 0));
718 return build_call_expr_loc (loc, fndecl, 1, arg);
719 }
720 break;
721
722 case RSHIFT_EXPR:
723 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
724 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
725 {
726 tree op1 = TREE_OPERAND (t, 1);
727 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
728 {
729 tree ntype = TYPE_UNSIGNED (type)
730 ? signed_type_for (type)
731 : unsigned_type_for (type);
732 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
733 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
734 return fold_convert_loc (loc, type, temp);
735 }
736 }
737 break;
738
739 default:
740 break;
741 }
742
743 return NULL_TREE;
744 }
745
746 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
747 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
748 return NULL_TREE. */
749
750 static tree
751 negate_expr (tree t)
752 {
753 tree type, tem;
754 location_t loc;
755
756 if (t == NULL_TREE)
757 return NULL_TREE;
758
759 loc = EXPR_LOCATION (t);
760 type = TREE_TYPE (t);
761 STRIP_SIGN_NOPS (t);
762
763 tem = fold_negate_expr (loc, t);
764 if (!tem)
765 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
766 return fold_convert_loc (loc, type, tem);
767 }
768 \f
769 /* Split a tree IN into a constant, literal and variable parts that could be
770 combined with CODE to make IN. "constant" means an expression with
771 TREE_CONSTANT but that isn't an actual constant. CODE must be a
772 commutative arithmetic operation. Store the constant part into *CONP,
773 the literal in *LITP and return the variable part. If a part isn't
774 present, set it to null. If the tree does not decompose in this way,
775 return the entire tree as the variable part and the other parts as null.
776
777 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
778 case, we negate an operand that was subtracted. Except if it is a
779 literal for which we use *MINUS_LITP instead.
780
781 If NEGATE_P is true, we are negating all of IN, again except a literal
782 for which we use *MINUS_LITP instead.
783
784 If IN is itself a literal or constant, return it as appropriate.
785
786 Note that we do not guarantee that any of the three values will be the
787 same type as IN, but they will have the same signedness and mode. */
788
789 static tree
790 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
791 tree *minus_litp, int negate_p)
792 {
793 tree var = 0;
794
795 *conp = 0;
796 *litp = 0;
797 *minus_litp = 0;
798
799 /* Strip any conversions that don't change the machine mode or signedness. */
800 STRIP_SIGN_NOPS (in);
801
802 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
803 || TREE_CODE (in) == FIXED_CST)
804 *litp = in;
805 else if (TREE_CODE (in) == code
806 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
807 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
808 /* We can associate addition and subtraction together (even
809 though the C standard doesn't say so) for integers because
810 the value is not affected. For reals, the value might be
811 affected, so we can't. */
812 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
813 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
814 {
815 tree op0 = TREE_OPERAND (in, 0);
816 tree op1 = TREE_OPERAND (in, 1);
817 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
818 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
819
820 /* First see if either of the operands is a literal, then a constant. */
821 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
822 || TREE_CODE (op0) == FIXED_CST)
823 *litp = op0, op0 = 0;
824 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
825 || TREE_CODE (op1) == FIXED_CST)
826 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
827
828 if (op0 != 0 && TREE_CONSTANT (op0))
829 *conp = op0, op0 = 0;
830 else if (op1 != 0 && TREE_CONSTANT (op1))
831 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
832
833 /* If we haven't dealt with either operand, this is not a case we can
834 decompose. Otherwise, VAR is either of the ones remaining, if any. */
835 if (op0 != 0 && op1 != 0)
836 var = in;
837 else if (op0 != 0)
838 var = op0;
839 else
840 var = op1, neg_var_p = neg1_p;
841
842 /* Now do any needed negations. */
843 if (neg_litp_p)
844 *minus_litp = *litp, *litp = 0;
845 if (neg_conp_p)
846 *conp = negate_expr (*conp);
847 if (neg_var_p)
848 var = negate_expr (var);
849 }
850 else if (TREE_CODE (in) == BIT_NOT_EXPR
851 && code == PLUS_EXPR)
852 {
853 /* -X - 1 is folded to ~X, undo that here. */
854 *minus_litp = build_one_cst (TREE_TYPE (in));
855 var = negate_expr (TREE_OPERAND (in, 0));
856 }
857 else if (TREE_CONSTANT (in))
858 *conp = in;
859 else
860 var = in;
861
862 if (negate_p)
863 {
864 if (*litp)
865 *minus_litp = *litp, *litp = 0;
866 else if (*minus_litp)
867 *litp = *minus_litp, *minus_litp = 0;
868 *conp = negate_expr (*conp);
869 var = negate_expr (var);
870 }
871
872 return var;
873 }
874
875 /* Re-associate trees split by the above function. T1 and T2 are
876 either expressions to associate or null. Return the new
877 expression, if any. LOC is the location of the new expression. If
878 we build an operation, do it in TYPE and with CODE. */
879
880 static tree
881 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
882 {
883 if (t1 == 0)
884 return t2;
885 else if (t2 == 0)
886 return t1;
887
888 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
889 try to fold this since we will have infinite recursion. But do
890 deal with any NEGATE_EXPRs. */
891 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
892 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
893 {
894 if (code == PLUS_EXPR)
895 {
896 if (TREE_CODE (t1) == NEGATE_EXPR)
897 return build2_loc (loc, MINUS_EXPR, type,
898 fold_convert_loc (loc, type, t2),
899 fold_convert_loc (loc, type,
900 TREE_OPERAND (t1, 0)));
901 else if (TREE_CODE (t2) == NEGATE_EXPR)
902 return build2_loc (loc, MINUS_EXPR, type,
903 fold_convert_loc (loc, type, t1),
904 fold_convert_loc (loc, type,
905 TREE_OPERAND (t2, 0)));
906 else if (integer_zerop (t2))
907 return fold_convert_loc (loc, type, t1);
908 }
909 else if (code == MINUS_EXPR)
910 {
911 if (integer_zerop (t2))
912 return fold_convert_loc (loc, type, t1);
913 }
914
915 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type, t2));
917 }
918
919 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
920 fold_convert_loc (loc, type, t2));
921 }
922 \f
923 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
924 for use in int_const_binop, size_binop and size_diffop. */
925
926 static bool
927 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
928 {
929 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
930 return false;
931 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
932 return false;
933
934 switch (code)
935 {
936 case LSHIFT_EXPR:
937 case RSHIFT_EXPR:
938 case LROTATE_EXPR:
939 case RROTATE_EXPR:
940 return true;
941
942 default:
943 break;
944 }
945
946 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
947 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
948 && TYPE_MODE (type1) == TYPE_MODE (type2);
949 }
950
951
952 /* Combine two integer constants ARG1 and ARG2 under operation CODE
953 to produce a new constant. Return NULL_TREE if we don't know how
954 to evaluate CODE at compile-time. */
955
956 static tree
957 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
958 int overflowable)
959 {
960 wide_int res;
961 tree t;
962 tree type = TREE_TYPE (arg1);
963 signop sign = TYPE_SIGN (type);
964 bool overflow = false;
965
966 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
967 TYPE_SIGN (TREE_TYPE (parg2)));
968
969 switch (code)
970 {
971 case BIT_IOR_EXPR:
972 res = wi::bit_or (arg1, arg2);
973 break;
974
975 case BIT_XOR_EXPR:
976 res = wi::bit_xor (arg1, arg2);
977 break;
978
979 case BIT_AND_EXPR:
980 res = wi::bit_and (arg1, arg2);
981 break;
982
983 case RSHIFT_EXPR:
984 case LSHIFT_EXPR:
985 if (wi::neg_p (arg2))
986 {
987 arg2 = -arg2;
988 if (code == RSHIFT_EXPR)
989 code = LSHIFT_EXPR;
990 else
991 code = RSHIFT_EXPR;
992 }
993
994 if (code == RSHIFT_EXPR)
995 /* It's unclear from the C standard whether shifts can overflow.
996 The following code ignores overflow; perhaps a C standard
997 interpretation ruling is needed. */
998 res = wi::rshift (arg1, arg2, sign);
999 else
1000 res = wi::lshift (arg1, arg2);
1001 break;
1002
1003 case RROTATE_EXPR:
1004 case LROTATE_EXPR:
1005 if (wi::neg_p (arg2))
1006 {
1007 arg2 = -arg2;
1008 if (code == RROTATE_EXPR)
1009 code = LROTATE_EXPR;
1010 else
1011 code = RROTATE_EXPR;
1012 }
1013
1014 if (code == RROTATE_EXPR)
1015 res = wi::rrotate (arg1, arg2);
1016 else
1017 res = wi::lrotate (arg1, arg2);
1018 break;
1019
1020 case PLUS_EXPR:
1021 res = wi::add (arg1, arg2, sign, &overflow);
1022 break;
1023
1024 case MINUS_EXPR:
1025 res = wi::sub (arg1, arg2, sign, &overflow);
1026 break;
1027
1028 case MULT_EXPR:
1029 res = wi::mul (arg1, arg2, sign, &overflow);
1030 break;
1031
1032 case MULT_HIGHPART_EXPR:
1033 res = wi::mul_high (arg1, arg2, sign);
1034 break;
1035
1036 case TRUNC_DIV_EXPR:
1037 case EXACT_DIV_EXPR:
1038 if (arg2 == 0)
1039 return NULL_TREE;
1040 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case FLOOR_DIV_EXPR:
1044 if (arg2 == 0)
1045 return NULL_TREE;
1046 res = wi::div_floor (arg1, arg2, sign, &overflow);
1047 break;
1048
1049 case CEIL_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case ROUND_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_round (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case TRUNC_MOD_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case FLOOR_MOD_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case CEIL_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1077 break;
1078
1079 case ROUND_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_round (arg1, arg2, sign, &overflow);
1083 break;
1084
1085 case MIN_EXPR:
1086 res = wi::min (arg1, arg2, sign);
1087 break;
1088
1089 case MAX_EXPR:
1090 res = wi::max (arg1, arg2, sign);
1091 break;
1092
1093 default:
1094 return NULL_TREE;
1095 }
1096
1097 t = force_fit_type (type, res, overflowable,
1098 (((sign == SIGNED || overflowable == -1)
1099 && overflow)
1100 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1101
1102 return t;
1103 }
1104
1105 tree
1106 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1107 {
1108 return int_const_binop_1 (code, arg1, arg2, 1);
1109 }
1110
1111 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1112 constant. We assume ARG1 and ARG2 have the same data type, or at least
1113 are the same kind of constant and the same machine mode. Return zero if
1114 combining the constants is not allowed in the current operating mode. */
1115
1116 static tree
1117 const_binop (enum tree_code code, tree arg1, tree arg2)
1118 {
1119 /* Sanity check for the recursive cases. */
1120 if (!arg1 || !arg2)
1121 return NULL_TREE;
1122
1123 STRIP_NOPS (arg1);
1124 STRIP_NOPS (arg2);
1125
1126 if (TREE_CODE (arg1) == INTEGER_CST)
1127 return int_const_binop (code, arg1, arg2);
1128
1129 if (TREE_CODE (arg1) == REAL_CST)
1130 {
1131 enum machine_mode mode;
1132 REAL_VALUE_TYPE d1;
1133 REAL_VALUE_TYPE d2;
1134 REAL_VALUE_TYPE value;
1135 REAL_VALUE_TYPE result;
1136 bool inexact;
1137 tree t, type;
1138
1139 /* The following codes are handled by real_arithmetic. */
1140 switch (code)
1141 {
1142 case PLUS_EXPR:
1143 case MINUS_EXPR:
1144 case MULT_EXPR:
1145 case RDIV_EXPR:
1146 case MIN_EXPR:
1147 case MAX_EXPR:
1148 break;
1149
1150 default:
1151 return NULL_TREE;
1152 }
1153
1154 d1 = TREE_REAL_CST (arg1);
1155 d2 = TREE_REAL_CST (arg2);
1156
1157 type = TREE_TYPE (arg1);
1158 mode = TYPE_MODE (type);
1159
1160 /* Don't perform operation if we honor signaling NaNs and
1161 either operand is a NaN. */
1162 if (HONOR_SNANS (mode)
1163 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1164 return NULL_TREE;
1165
1166 /* Don't perform operation if it would raise a division
1167 by zero exception. */
1168 if (code == RDIV_EXPR
1169 && REAL_VALUES_EQUAL (d2, dconst0)
1170 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1171 return NULL_TREE;
1172
1173 /* If either operand is a NaN, just return it. Otherwise, set up
1174 for floating-point trap; we return an overflow. */
1175 if (REAL_VALUE_ISNAN (d1))
1176 return arg1;
1177 else if (REAL_VALUE_ISNAN (d2))
1178 return arg2;
1179
1180 inexact = real_arithmetic (&value, code, &d1, &d2);
1181 real_convert (&result, mode, &value);
1182
1183 /* Don't constant fold this floating point operation if
1184 the result has overflowed and flag_trapping_math. */
1185 if (flag_trapping_math
1186 && MODE_HAS_INFINITIES (mode)
1187 && REAL_VALUE_ISINF (result)
1188 && !REAL_VALUE_ISINF (d1)
1189 && !REAL_VALUE_ISINF (d2))
1190 return NULL_TREE;
1191
1192 /* Don't constant fold this floating point operation if the
1193 result may dependent upon the run-time rounding mode and
1194 flag_rounding_math is set, or if GCC's software emulation
1195 is unable to accurately represent the result. */
1196 if ((flag_rounding_math
1197 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1198 && (inexact || !real_identical (&result, &value)))
1199 return NULL_TREE;
1200
1201 t = build_real (type, result);
1202
1203 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1204 return t;
1205 }
1206
1207 if (TREE_CODE (arg1) == FIXED_CST)
1208 {
1209 FIXED_VALUE_TYPE f1;
1210 FIXED_VALUE_TYPE f2;
1211 FIXED_VALUE_TYPE result;
1212 tree t, type;
1213 int sat_p;
1214 bool overflow_p;
1215
1216 /* The following codes are handled by fixed_arithmetic. */
1217 switch (code)
1218 {
1219 case PLUS_EXPR:
1220 case MINUS_EXPR:
1221 case MULT_EXPR:
1222 case TRUNC_DIV_EXPR:
1223 f2 = TREE_FIXED_CST (arg2);
1224 break;
1225
1226 case LSHIFT_EXPR:
1227 case RSHIFT_EXPR:
1228 {
1229 wide_int w2 = arg2;
1230 f2.data.high = w2.elt (1);
1231 f2.data.low = w2.elt (0);
1232 f2.mode = SImode;
1233 }
1234 break;
1235
1236 default:
1237 return NULL_TREE;
1238 }
1239
1240 f1 = TREE_FIXED_CST (arg1);
1241 type = TREE_TYPE (arg1);
1242 sat_p = TYPE_SATURATING (type);
1243 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1244 t = build_fixed (type, result);
1245 /* Propagate overflow flags. */
1246 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1247 TREE_OVERFLOW (t) = 1;
1248 return t;
1249 }
1250
1251 if (TREE_CODE (arg1) == COMPLEX_CST)
1252 {
1253 tree type = TREE_TYPE (arg1);
1254 tree r1 = TREE_REALPART (arg1);
1255 tree i1 = TREE_IMAGPART (arg1);
1256 tree r2 = TREE_REALPART (arg2);
1257 tree i2 = TREE_IMAGPART (arg2);
1258 tree real, imag;
1259
1260 switch (code)
1261 {
1262 case PLUS_EXPR:
1263 case MINUS_EXPR:
1264 real = const_binop (code, r1, r2);
1265 imag = const_binop (code, i1, i2);
1266 break;
1267
1268 case MULT_EXPR:
1269 if (COMPLEX_FLOAT_TYPE_P (type))
1270 return do_mpc_arg2 (arg1, arg2, type,
1271 /* do_nonfinite= */ folding_initializer,
1272 mpc_mul);
1273
1274 real = const_binop (MINUS_EXPR,
1275 const_binop (MULT_EXPR, r1, r2),
1276 const_binop (MULT_EXPR, i1, i2));
1277 imag = const_binop (PLUS_EXPR,
1278 const_binop (MULT_EXPR, r1, i2),
1279 const_binop (MULT_EXPR, i1, r2));
1280 break;
1281
1282 case RDIV_EXPR:
1283 if (COMPLEX_FLOAT_TYPE_P (type))
1284 return do_mpc_arg2 (arg1, arg2, type,
1285 /* do_nonfinite= */ folding_initializer,
1286 mpc_div);
1287 /* Fallthru ... */
1288 case TRUNC_DIV_EXPR:
1289 case CEIL_DIV_EXPR:
1290 case FLOOR_DIV_EXPR:
1291 case ROUND_DIV_EXPR:
1292 if (flag_complex_method == 0)
1293 {
1294 /* Keep this algorithm in sync with
1295 tree-complex.c:expand_complex_div_straight().
1296
1297 Expand complex division to scalars, straightforward algorithm.
1298 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1299 t = br*br + bi*bi
1300 */
1301 tree magsquared
1302 = const_binop (PLUS_EXPR,
1303 const_binop (MULT_EXPR, r2, r2),
1304 const_binop (MULT_EXPR, i2, i2));
1305 tree t1
1306 = const_binop (PLUS_EXPR,
1307 const_binop (MULT_EXPR, r1, r2),
1308 const_binop (MULT_EXPR, i1, i2));
1309 tree t2
1310 = const_binop (MINUS_EXPR,
1311 const_binop (MULT_EXPR, i1, r2),
1312 const_binop (MULT_EXPR, r1, i2));
1313
1314 real = const_binop (code, t1, magsquared);
1315 imag = const_binop (code, t2, magsquared);
1316 }
1317 else
1318 {
1319 /* Keep this algorithm in sync with
1320 tree-complex.c:expand_complex_div_wide().
1321
1322 Expand complex division to scalars, modified algorithm to minimize
1323 overflow with wide input ranges. */
1324 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1325 fold_abs_const (r2, TREE_TYPE (type)),
1326 fold_abs_const (i2, TREE_TYPE (type)));
1327
1328 if (integer_nonzerop (compare))
1329 {
1330 /* In the TRUE branch, we compute
1331 ratio = br/bi;
1332 div = (br * ratio) + bi;
1333 tr = (ar * ratio) + ai;
1334 ti = (ai * ratio) - ar;
1335 tr = tr / div;
1336 ti = ti / div; */
1337 tree ratio = const_binop (code, r2, i2);
1338 tree div = const_binop (PLUS_EXPR, i2,
1339 const_binop (MULT_EXPR, r2, ratio));
1340 real = const_binop (MULT_EXPR, r1, ratio);
1341 real = const_binop (PLUS_EXPR, real, i1);
1342 real = const_binop (code, real, div);
1343
1344 imag = const_binop (MULT_EXPR, i1, ratio);
1345 imag = const_binop (MINUS_EXPR, imag, r1);
1346 imag = const_binop (code, imag, div);
1347 }
1348 else
1349 {
1350 /* In the FALSE branch, we compute
1351 ratio = d/c;
1352 divisor = (d * ratio) + c;
1353 tr = (b * ratio) + a;
1354 ti = b - (a * ratio);
1355 tr = tr / div;
1356 ti = ti / div; */
1357 tree ratio = const_binop (code, i2, r2);
1358 tree div = const_binop (PLUS_EXPR, r2,
1359 const_binop (MULT_EXPR, i2, ratio));
1360
1361 real = const_binop (MULT_EXPR, i1, ratio);
1362 real = const_binop (PLUS_EXPR, real, r1);
1363 real = const_binop (code, real, div);
1364
1365 imag = const_binop (MULT_EXPR, r1, ratio);
1366 imag = const_binop (MINUS_EXPR, i1, imag);
1367 imag = const_binop (code, imag, div);
1368 }
1369 }
1370 break;
1371
1372 default:
1373 return NULL_TREE;
1374 }
1375
1376 if (real && imag)
1377 return build_complex (type, real, imag);
1378 }
1379
1380 if (TREE_CODE (arg1) == VECTOR_CST
1381 && TREE_CODE (arg2) == VECTOR_CST)
1382 {
1383 tree type = TREE_TYPE (arg1);
1384 int count = TYPE_VECTOR_SUBPARTS (type), i;
1385 tree *elts = XALLOCAVEC (tree, count);
1386
1387 for (i = 0; i < count; i++)
1388 {
1389 tree elem1 = VECTOR_CST_ELT (arg1, i);
1390 tree elem2 = VECTOR_CST_ELT (arg2, i);
1391
1392 elts[i] = const_binop (code, elem1, elem2);
1393
1394 /* It is possible that const_binop cannot handle the given
1395 code and return NULL_TREE */
1396 if (elts[i] == NULL_TREE)
1397 return NULL_TREE;
1398 }
1399
1400 return build_vector (type, elts);
1401 }
1402
1403 /* Shifts allow a scalar offset for a vector. */
1404 if (TREE_CODE (arg1) == VECTOR_CST
1405 && TREE_CODE (arg2) == INTEGER_CST)
1406 {
1407 tree type = TREE_TYPE (arg1);
1408 int count = TYPE_VECTOR_SUBPARTS (type), i;
1409 tree *elts = XALLOCAVEC (tree, count);
1410
1411 if (code == VEC_LSHIFT_EXPR
1412 || code == VEC_RSHIFT_EXPR)
1413 {
1414 if (!tree_fits_uhwi_p (arg2))
1415 return NULL_TREE;
1416
1417 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1418 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1419 unsigned HOST_WIDE_INT innerc
1420 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1421 if (shiftc >= outerc || (shiftc % innerc) != 0)
1422 return NULL_TREE;
1423 int offset = shiftc / innerc;
1424 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1425 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1426 for !BYTES_BIG_ENDIAN picks first vector element, but
1427 for BYTES_BIG_ENDIAN last element from the vector. */
1428 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1429 offset = -offset;
1430 tree zero = build_zero_cst (TREE_TYPE (type));
1431 for (i = 0; i < count; i++)
1432 {
1433 if (i + offset < 0 || i + offset >= count)
1434 elts[i] = zero;
1435 else
1436 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1437 }
1438 }
1439 else
1440 for (i = 0; i < count; i++)
1441 {
1442 tree elem1 = VECTOR_CST_ELT (arg1, i);
1443
1444 elts[i] = const_binop (code, elem1, arg2);
1445
1446 /* It is possible that const_binop cannot handle the given
1447 code and return NULL_TREE */
1448 if (elts[i] == NULL_TREE)
1449 return NULL_TREE;
1450 }
1451
1452 return build_vector (type, elts);
1453 }
1454 return NULL_TREE;
1455 }
1456
1457 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1458 indicates which particular sizetype to create. */
1459
1460 tree
1461 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1462 {
1463 return build_int_cst (sizetype_tab[(int) kind], number);
1464 }
1465 \f
1466 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1467 is a tree code. The type of the result is taken from the operands.
1468 Both must be equivalent integer types, ala int_binop_types_match_p.
1469 If the operands are constant, so is the result. */
1470
1471 tree
1472 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1473 {
1474 tree type = TREE_TYPE (arg0);
1475
1476 if (arg0 == error_mark_node || arg1 == error_mark_node)
1477 return error_mark_node;
1478
1479 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1480 TREE_TYPE (arg1)));
1481
1482 /* Handle the special case of two integer constants faster. */
1483 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1484 {
1485 /* And some specific cases even faster than that. */
1486 if (code == PLUS_EXPR)
1487 {
1488 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1489 return arg1;
1490 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1491 return arg0;
1492 }
1493 else if (code == MINUS_EXPR)
1494 {
1495 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1496 return arg0;
1497 }
1498 else if (code == MULT_EXPR)
1499 {
1500 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1501 return arg1;
1502 }
1503
1504 /* Handle general case of two integer constants. For sizetype
1505 constant calculations we always want to know about overflow,
1506 even in the unsigned case. */
1507 return int_const_binop_1 (code, arg0, arg1, -1);
1508 }
1509
1510 return fold_build2_loc (loc, code, type, arg0, arg1);
1511 }
1512
1513 /* Given two values, either both of sizetype or both of bitsizetype,
1514 compute the difference between the two values. Return the value
1515 in signed type corresponding to the type of the operands. */
1516
1517 tree
1518 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1519 {
1520 tree type = TREE_TYPE (arg0);
1521 tree ctype;
1522
1523 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1524 TREE_TYPE (arg1)));
1525
1526 /* If the type is already signed, just do the simple thing. */
1527 if (!TYPE_UNSIGNED (type))
1528 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1529
1530 if (type == sizetype)
1531 ctype = ssizetype;
1532 else if (type == bitsizetype)
1533 ctype = sbitsizetype;
1534 else
1535 ctype = signed_type_for (type);
1536
1537 /* If either operand is not a constant, do the conversions to the signed
1538 type and subtract. The hardware will do the right thing with any
1539 overflow in the subtraction. */
1540 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1541 return size_binop_loc (loc, MINUS_EXPR,
1542 fold_convert_loc (loc, ctype, arg0),
1543 fold_convert_loc (loc, ctype, arg1));
1544
1545 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1546 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1547 overflow) and negate (which can't either). Special-case a result
1548 of zero while we're here. */
1549 if (tree_int_cst_equal (arg0, arg1))
1550 return build_int_cst (ctype, 0);
1551 else if (tree_int_cst_lt (arg1, arg0))
1552 return fold_convert_loc (loc, ctype,
1553 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1554 else
1555 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1556 fold_convert_loc (loc, ctype,
1557 size_binop_loc (loc,
1558 MINUS_EXPR,
1559 arg1, arg0)));
1560 }
1561 \f
1562 /* A subroutine of fold_convert_const handling conversions of an
1563 INTEGER_CST to another integer type. */
1564
1565 static tree
1566 fold_convert_const_int_from_int (tree type, const_tree arg1)
1567 {
1568 /* Given an integer constant, make new constant with new type,
1569 appropriately sign-extended or truncated. Use widest_int
1570 so that any extension is done according ARG1's type. */
1571 return force_fit_type (type, wi::to_widest (arg1),
1572 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1573 TREE_OVERFLOW (arg1));
1574 }
1575
1576 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1577 to an integer type. */
1578
1579 static tree
1580 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1581 {
1582 bool overflow = false;
1583 tree t;
1584
1585 /* The following code implements the floating point to integer
1586 conversion rules required by the Java Language Specification,
1587 that IEEE NaNs are mapped to zero and values that overflow
1588 the target precision saturate, i.e. values greater than
1589 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1590 are mapped to INT_MIN. These semantics are allowed by the
1591 C and C++ standards that simply state that the behavior of
1592 FP-to-integer conversion is unspecified upon overflow. */
1593
1594 wide_int val;
1595 REAL_VALUE_TYPE r;
1596 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1597
1598 switch (code)
1599 {
1600 case FIX_TRUNC_EXPR:
1601 real_trunc (&r, VOIDmode, &x);
1602 break;
1603
1604 default:
1605 gcc_unreachable ();
1606 }
1607
1608 /* If R is NaN, return zero and show we have an overflow. */
1609 if (REAL_VALUE_ISNAN (r))
1610 {
1611 overflow = true;
1612 val = wi::zero (TYPE_PRECISION (type));
1613 }
1614
1615 /* See if R is less than the lower bound or greater than the
1616 upper bound. */
1617
1618 if (! overflow)
1619 {
1620 tree lt = TYPE_MIN_VALUE (type);
1621 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1622 if (REAL_VALUES_LESS (r, l))
1623 {
1624 overflow = true;
1625 val = lt;
1626 }
1627 }
1628
1629 if (! overflow)
1630 {
1631 tree ut = TYPE_MAX_VALUE (type);
1632 if (ut)
1633 {
1634 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1635 if (REAL_VALUES_LESS (u, r))
1636 {
1637 overflow = true;
1638 val = ut;
1639 }
1640 }
1641 }
1642
1643 if (! overflow)
1644 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1645
1646 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1647 return t;
1648 }
1649
1650 /* A subroutine of fold_convert_const handling conversions of a
1651 FIXED_CST to an integer type. */
1652
1653 static tree
1654 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1655 {
1656 tree t;
1657 double_int temp, temp_trunc;
1658 unsigned int mode;
1659
1660 /* Right shift FIXED_CST to temp by fbit. */
1661 temp = TREE_FIXED_CST (arg1).data;
1662 mode = TREE_FIXED_CST (arg1).mode;
1663 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1664 {
1665 temp = temp.rshift (GET_MODE_FBIT (mode),
1666 HOST_BITS_PER_DOUBLE_INT,
1667 SIGNED_FIXED_POINT_MODE_P (mode));
1668
1669 /* Left shift temp to temp_trunc by fbit. */
1670 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1671 HOST_BITS_PER_DOUBLE_INT,
1672 SIGNED_FIXED_POINT_MODE_P (mode));
1673 }
1674 else
1675 {
1676 temp = double_int_zero;
1677 temp_trunc = double_int_zero;
1678 }
1679
1680 /* If FIXED_CST is negative, we need to round the value toward 0.
1681 By checking if the fractional bits are not zero to add 1 to temp. */
1682 if (SIGNED_FIXED_POINT_MODE_P (mode)
1683 && temp_trunc.is_negative ()
1684 && TREE_FIXED_CST (arg1).data != temp_trunc)
1685 temp += double_int_one;
1686
1687 /* Given a fixed-point constant, make new constant with new type,
1688 appropriately sign-extended or truncated. */
1689 t = force_fit_type (type, temp, -1,
1690 (temp.is_negative ()
1691 && (TYPE_UNSIGNED (type)
1692 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1693 | TREE_OVERFLOW (arg1));
1694
1695 return t;
1696 }
1697
1698 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1699 to another floating point type. */
1700
1701 static tree
1702 fold_convert_const_real_from_real (tree type, const_tree arg1)
1703 {
1704 REAL_VALUE_TYPE value;
1705 tree t;
1706
1707 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1708 t = build_real (type, value);
1709
1710 /* If converting an infinity or NAN to a representation that doesn't
1711 have one, set the overflow bit so that we can produce some kind of
1712 error message at the appropriate point if necessary. It's not the
1713 most user-friendly message, but it's better than nothing. */
1714 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1715 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1716 TREE_OVERFLOW (t) = 1;
1717 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1718 && !MODE_HAS_NANS (TYPE_MODE (type)))
1719 TREE_OVERFLOW (t) = 1;
1720 /* Regular overflow, conversion produced an infinity in a mode that
1721 can't represent them. */
1722 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1723 && REAL_VALUE_ISINF (value)
1724 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1725 TREE_OVERFLOW (t) = 1;
1726 else
1727 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1728 return t;
1729 }
1730
1731 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1732 to a floating point type. */
1733
1734 static tree
1735 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1736 {
1737 REAL_VALUE_TYPE value;
1738 tree t;
1739
1740 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1741 t = build_real (type, value);
1742
1743 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1744 return t;
1745 }
1746
1747 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1748 to another fixed-point type. */
1749
1750 static tree
1751 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1752 {
1753 FIXED_VALUE_TYPE value;
1754 tree t;
1755 bool overflow_p;
1756
1757 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1758 TYPE_SATURATING (type));
1759 t = build_fixed (type, value);
1760
1761 /* Propagate overflow flags. */
1762 if (overflow_p | TREE_OVERFLOW (arg1))
1763 TREE_OVERFLOW (t) = 1;
1764 return t;
1765 }
1766
1767 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1768 to a fixed-point type. */
1769
1770 static tree
1771 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1772 {
1773 FIXED_VALUE_TYPE value;
1774 tree t;
1775 bool overflow_p;
1776 double_int di;
1777
1778 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1779
1780 di.low = TREE_INT_CST_ELT (arg1, 0);
1781 if (TREE_INT_CST_NUNITS (arg1) == 1)
1782 di.high = (HOST_WIDE_INT)di.low < 0 ? (HOST_WIDE_INT)-1 : 0;
1783 else
1784 di.high = TREE_INT_CST_ELT (arg1, 1);
1785
1786 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1787 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1788 TYPE_SATURATING (type));
1789 t = build_fixed (type, value);
1790
1791 /* Propagate overflow flags. */
1792 if (overflow_p | TREE_OVERFLOW (arg1))
1793 TREE_OVERFLOW (t) = 1;
1794 return t;
1795 }
1796
1797 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1798 to a fixed-point type. */
1799
1800 static tree
1801 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1802 {
1803 FIXED_VALUE_TYPE value;
1804 tree t;
1805 bool overflow_p;
1806
1807 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1808 &TREE_REAL_CST (arg1),
1809 TYPE_SATURATING (type));
1810 t = build_fixed (type, value);
1811
1812 /* Propagate overflow flags. */
1813 if (overflow_p | TREE_OVERFLOW (arg1))
1814 TREE_OVERFLOW (t) = 1;
1815 return t;
1816 }
1817
1818 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1819 type TYPE. If no simplification can be done return NULL_TREE. */
1820
1821 static tree
1822 fold_convert_const (enum tree_code code, tree type, tree arg1)
1823 {
1824 if (TREE_TYPE (arg1) == type)
1825 return arg1;
1826
1827 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1828 || TREE_CODE (type) == OFFSET_TYPE)
1829 {
1830 if (TREE_CODE (arg1) == INTEGER_CST)
1831 return fold_convert_const_int_from_int (type, arg1);
1832 else if (TREE_CODE (arg1) == REAL_CST)
1833 return fold_convert_const_int_from_real (code, type, arg1);
1834 else if (TREE_CODE (arg1) == FIXED_CST)
1835 return fold_convert_const_int_from_fixed (type, arg1);
1836 }
1837 else if (TREE_CODE (type) == REAL_TYPE)
1838 {
1839 if (TREE_CODE (arg1) == INTEGER_CST)
1840 return build_real_from_int_cst (type, arg1);
1841 else if (TREE_CODE (arg1) == REAL_CST)
1842 return fold_convert_const_real_from_real (type, arg1);
1843 else if (TREE_CODE (arg1) == FIXED_CST)
1844 return fold_convert_const_real_from_fixed (type, arg1);
1845 }
1846 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1847 {
1848 if (TREE_CODE (arg1) == FIXED_CST)
1849 return fold_convert_const_fixed_from_fixed (type, arg1);
1850 else if (TREE_CODE (arg1) == INTEGER_CST)
1851 return fold_convert_const_fixed_from_int (type, arg1);
1852 else if (TREE_CODE (arg1) == REAL_CST)
1853 return fold_convert_const_fixed_from_real (type, arg1);
1854 }
1855 return NULL_TREE;
1856 }
1857
1858 /* Construct a vector of zero elements of vector type TYPE. */
1859
1860 static tree
1861 build_zero_vector (tree type)
1862 {
1863 tree t;
1864
1865 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1866 return build_vector_from_val (type, t);
1867 }
1868
1869 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1870
1871 bool
1872 fold_convertible_p (const_tree type, const_tree arg)
1873 {
1874 tree orig = TREE_TYPE (arg);
1875
1876 if (type == orig)
1877 return true;
1878
1879 if (TREE_CODE (arg) == ERROR_MARK
1880 || TREE_CODE (type) == ERROR_MARK
1881 || TREE_CODE (orig) == ERROR_MARK)
1882 return false;
1883
1884 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1885 return true;
1886
1887 switch (TREE_CODE (type))
1888 {
1889 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1890 case POINTER_TYPE: case REFERENCE_TYPE:
1891 case OFFSET_TYPE:
1892 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1893 || TREE_CODE (orig) == OFFSET_TYPE)
1894 return true;
1895 return (TREE_CODE (orig) == VECTOR_TYPE
1896 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1897
1898 case REAL_TYPE:
1899 case FIXED_POINT_TYPE:
1900 case COMPLEX_TYPE:
1901 case VECTOR_TYPE:
1902 case VOID_TYPE:
1903 return TREE_CODE (type) == TREE_CODE (orig);
1904
1905 default:
1906 return false;
1907 }
1908 }
1909
1910 /* Convert expression ARG to type TYPE. Used by the middle-end for
1911 simple conversions in preference to calling the front-end's convert. */
1912
1913 tree
1914 fold_convert_loc (location_t loc, tree type, tree arg)
1915 {
1916 tree orig = TREE_TYPE (arg);
1917 tree tem;
1918
1919 if (type == orig)
1920 return arg;
1921
1922 if (TREE_CODE (arg) == ERROR_MARK
1923 || TREE_CODE (type) == ERROR_MARK
1924 || TREE_CODE (orig) == ERROR_MARK)
1925 return error_mark_node;
1926
1927 switch (TREE_CODE (type))
1928 {
1929 case POINTER_TYPE:
1930 case REFERENCE_TYPE:
1931 /* Handle conversions between pointers to different address spaces. */
1932 if (POINTER_TYPE_P (orig)
1933 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1934 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1935 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1936 /* fall through */
1937
1938 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1939 case OFFSET_TYPE:
1940 if (TREE_CODE (arg) == INTEGER_CST)
1941 {
1942 tem = fold_convert_const (NOP_EXPR, type, arg);
1943 if (tem != NULL_TREE)
1944 return tem;
1945 }
1946 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1947 || TREE_CODE (orig) == OFFSET_TYPE)
1948 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1949 if (TREE_CODE (orig) == COMPLEX_TYPE)
1950 return fold_convert_loc (loc, type,
1951 fold_build1_loc (loc, REALPART_EXPR,
1952 TREE_TYPE (orig), arg));
1953 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1954 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1955 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1956
1957 case REAL_TYPE:
1958 if (TREE_CODE (arg) == INTEGER_CST)
1959 {
1960 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1961 if (tem != NULL_TREE)
1962 return tem;
1963 }
1964 else if (TREE_CODE (arg) == REAL_CST)
1965 {
1966 tem = fold_convert_const (NOP_EXPR, type, arg);
1967 if (tem != NULL_TREE)
1968 return tem;
1969 }
1970 else if (TREE_CODE (arg) == FIXED_CST)
1971 {
1972 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1973 if (tem != NULL_TREE)
1974 return tem;
1975 }
1976
1977 switch (TREE_CODE (orig))
1978 {
1979 case INTEGER_TYPE:
1980 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1981 case POINTER_TYPE: case REFERENCE_TYPE:
1982 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1983
1984 case REAL_TYPE:
1985 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1986
1987 case FIXED_POINT_TYPE:
1988 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1989
1990 case COMPLEX_TYPE:
1991 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1992 return fold_convert_loc (loc, type, tem);
1993
1994 default:
1995 gcc_unreachable ();
1996 }
1997
1998 case FIXED_POINT_TYPE:
1999 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2000 || TREE_CODE (arg) == REAL_CST)
2001 {
2002 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2003 if (tem != NULL_TREE)
2004 goto fold_convert_exit;
2005 }
2006
2007 switch (TREE_CODE (orig))
2008 {
2009 case FIXED_POINT_TYPE:
2010 case INTEGER_TYPE:
2011 case ENUMERAL_TYPE:
2012 case BOOLEAN_TYPE:
2013 case REAL_TYPE:
2014 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2015
2016 case COMPLEX_TYPE:
2017 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2018 return fold_convert_loc (loc, type, tem);
2019
2020 default:
2021 gcc_unreachable ();
2022 }
2023
2024 case COMPLEX_TYPE:
2025 switch (TREE_CODE (orig))
2026 {
2027 case INTEGER_TYPE:
2028 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2029 case POINTER_TYPE: case REFERENCE_TYPE:
2030 case REAL_TYPE:
2031 case FIXED_POINT_TYPE:
2032 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2033 fold_convert_loc (loc, TREE_TYPE (type), arg),
2034 fold_convert_loc (loc, TREE_TYPE (type),
2035 integer_zero_node));
2036 case COMPLEX_TYPE:
2037 {
2038 tree rpart, ipart;
2039
2040 if (TREE_CODE (arg) == COMPLEX_EXPR)
2041 {
2042 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2043 TREE_OPERAND (arg, 0));
2044 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2045 TREE_OPERAND (arg, 1));
2046 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2047 }
2048
2049 arg = save_expr (arg);
2050 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2051 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2052 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2053 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2054 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2055 }
2056
2057 default:
2058 gcc_unreachable ();
2059 }
2060
2061 case VECTOR_TYPE:
2062 if (integer_zerop (arg))
2063 return build_zero_vector (type);
2064 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2065 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2066 || TREE_CODE (orig) == VECTOR_TYPE);
2067 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2068
2069 case VOID_TYPE:
2070 tem = fold_ignored_result (arg);
2071 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2072
2073 default:
2074 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2075 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2076 gcc_unreachable ();
2077 }
2078 fold_convert_exit:
2079 protected_set_expr_location_unshare (tem, loc);
2080 return tem;
2081 }
2082 \f
2083 /* Return false if expr can be assumed not to be an lvalue, true
2084 otherwise. */
2085
2086 static bool
2087 maybe_lvalue_p (const_tree x)
2088 {
2089 /* We only need to wrap lvalue tree codes. */
2090 switch (TREE_CODE (x))
2091 {
2092 case VAR_DECL:
2093 case PARM_DECL:
2094 case RESULT_DECL:
2095 case LABEL_DECL:
2096 case FUNCTION_DECL:
2097 case SSA_NAME:
2098
2099 case COMPONENT_REF:
2100 case MEM_REF:
2101 case INDIRECT_REF:
2102 case ARRAY_REF:
2103 case ARRAY_RANGE_REF:
2104 case BIT_FIELD_REF:
2105 case OBJ_TYPE_REF:
2106
2107 case REALPART_EXPR:
2108 case IMAGPART_EXPR:
2109 case PREINCREMENT_EXPR:
2110 case PREDECREMENT_EXPR:
2111 case SAVE_EXPR:
2112 case TRY_CATCH_EXPR:
2113 case WITH_CLEANUP_EXPR:
2114 case COMPOUND_EXPR:
2115 case MODIFY_EXPR:
2116 case TARGET_EXPR:
2117 case COND_EXPR:
2118 case BIND_EXPR:
2119 break;
2120
2121 default:
2122 /* Assume the worst for front-end tree codes. */
2123 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2124 break;
2125 return false;
2126 }
2127
2128 return true;
2129 }
2130
2131 /* Return an expr equal to X but certainly not valid as an lvalue. */
2132
2133 tree
2134 non_lvalue_loc (location_t loc, tree x)
2135 {
2136 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2137 us. */
2138 if (in_gimple_form)
2139 return x;
2140
2141 if (! maybe_lvalue_p (x))
2142 return x;
2143 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2144 }
2145
2146 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2147 Zero means allow extended lvalues. */
2148
2149 int pedantic_lvalues;
2150
2151 /* When pedantic, return an expr equal to X but certainly not valid as a
2152 pedantic lvalue. Otherwise, return X. */
2153
2154 static tree
2155 pedantic_non_lvalue_loc (location_t loc, tree x)
2156 {
2157 if (pedantic_lvalues)
2158 return non_lvalue_loc (loc, x);
2159
2160 return protected_set_expr_location_unshare (x, loc);
2161 }
2162 \f
2163 /* Given a tree comparison code, return the code that is the logical inverse.
2164 It is generally not safe to do this for floating-point comparisons, except
2165 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2166 ERROR_MARK in this case. */
2167
2168 enum tree_code
2169 invert_tree_comparison (enum tree_code code, bool honor_nans)
2170 {
2171 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2172 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2173 return ERROR_MARK;
2174
2175 switch (code)
2176 {
2177 case EQ_EXPR:
2178 return NE_EXPR;
2179 case NE_EXPR:
2180 return EQ_EXPR;
2181 case GT_EXPR:
2182 return honor_nans ? UNLE_EXPR : LE_EXPR;
2183 case GE_EXPR:
2184 return honor_nans ? UNLT_EXPR : LT_EXPR;
2185 case LT_EXPR:
2186 return honor_nans ? UNGE_EXPR : GE_EXPR;
2187 case LE_EXPR:
2188 return honor_nans ? UNGT_EXPR : GT_EXPR;
2189 case LTGT_EXPR:
2190 return UNEQ_EXPR;
2191 case UNEQ_EXPR:
2192 return LTGT_EXPR;
2193 case UNGT_EXPR:
2194 return LE_EXPR;
2195 case UNGE_EXPR:
2196 return LT_EXPR;
2197 case UNLT_EXPR:
2198 return GE_EXPR;
2199 case UNLE_EXPR:
2200 return GT_EXPR;
2201 case ORDERED_EXPR:
2202 return UNORDERED_EXPR;
2203 case UNORDERED_EXPR:
2204 return ORDERED_EXPR;
2205 default:
2206 gcc_unreachable ();
2207 }
2208 }
2209
2210 /* Similar, but return the comparison that results if the operands are
2211 swapped. This is safe for floating-point. */
2212
2213 enum tree_code
2214 swap_tree_comparison (enum tree_code code)
2215 {
2216 switch (code)
2217 {
2218 case EQ_EXPR:
2219 case NE_EXPR:
2220 case ORDERED_EXPR:
2221 case UNORDERED_EXPR:
2222 case LTGT_EXPR:
2223 case UNEQ_EXPR:
2224 return code;
2225 case GT_EXPR:
2226 return LT_EXPR;
2227 case GE_EXPR:
2228 return LE_EXPR;
2229 case LT_EXPR:
2230 return GT_EXPR;
2231 case LE_EXPR:
2232 return GE_EXPR;
2233 case UNGT_EXPR:
2234 return UNLT_EXPR;
2235 case UNGE_EXPR:
2236 return UNLE_EXPR;
2237 case UNLT_EXPR:
2238 return UNGT_EXPR;
2239 case UNLE_EXPR:
2240 return UNGE_EXPR;
2241 default:
2242 gcc_unreachable ();
2243 }
2244 }
2245
2246
2247 /* Convert a comparison tree code from an enum tree_code representation
2248 into a compcode bit-based encoding. This function is the inverse of
2249 compcode_to_comparison. */
2250
2251 static enum comparison_code
2252 comparison_to_compcode (enum tree_code code)
2253 {
2254 switch (code)
2255 {
2256 case LT_EXPR:
2257 return COMPCODE_LT;
2258 case EQ_EXPR:
2259 return COMPCODE_EQ;
2260 case LE_EXPR:
2261 return COMPCODE_LE;
2262 case GT_EXPR:
2263 return COMPCODE_GT;
2264 case NE_EXPR:
2265 return COMPCODE_NE;
2266 case GE_EXPR:
2267 return COMPCODE_GE;
2268 case ORDERED_EXPR:
2269 return COMPCODE_ORD;
2270 case UNORDERED_EXPR:
2271 return COMPCODE_UNORD;
2272 case UNLT_EXPR:
2273 return COMPCODE_UNLT;
2274 case UNEQ_EXPR:
2275 return COMPCODE_UNEQ;
2276 case UNLE_EXPR:
2277 return COMPCODE_UNLE;
2278 case UNGT_EXPR:
2279 return COMPCODE_UNGT;
2280 case LTGT_EXPR:
2281 return COMPCODE_LTGT;
2282 case UNGE_EXPR:
2283 return COMPCODE_UNGE;
2284 default:
2285 gcc_unreachable ();
2286 }
2287 }
2288
2289 /* Convert a compcode bit-based encoding of a comparison operator back
2290 to GCC's enum tree_code representation. This function is the
2291 inverse of comparison_to_compcode. */
2292
2293 static enum tree_code
2294 compcode_to_comparison (enum comparison_code code)
2295 {
2296 switch (code)
2297 {
2298 case COMPCODE_LT:
2299 return LT_EXPR;
2300 case COMPCODE_EQ:
2301 return EQ_EXPR;
2302 case COMPCODE_LE:
2303 return LE_EXPR;
2304 case COMPCODE_GT:
2305 return GT_EXPR;
2306 case COMPCODE_NE:
2307 return NE_EXPR;
2308 case COMPCODE_GE:
2309 return GE_EXPR;
2310 case COMPCODE_ORD:
2311 return ORDERED_EXPR;
2312 case COMPCODE_UNORD:
2313 return UNORDERED_EXPR;
2314 case COMPCODE_UNLT:
2315 return UNLT_EXPR;
2316 case COMPCODE_UNEQ:
2317 return UNEQ_EXPR;
2318 case COMPCODE_UNLE:
2319 return UNLE_EXPR;
2320 case COMPCODE_UNGT:
2321 return UNGT_EXPR;
2322 case COMPCODE_LTGT:
2323 return LTGT_EXPR;
2324 case COMPCODE_UNGE:
2325 return UNGE_EXPR;
2326 default:
2327 gcc_unreachable ();
2328 }
2329 }
2330
2331 /* Return a tree for the comparison which is the combination of
2332 doing the AND or OR (depending on CODE) of the two operations LCODE
2333 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2334 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2335 if this makes the transformation invalid. */
2336
2337 tree
2338 combine_comparisons (location_t loc,
2339 enum tree_code code, enum tree_code lcode,
2340 enum tree_code rcode, tree truth_type,
2341 tree ll_arg, tree lr_arg)
2342 {
2343 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2344 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2345 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2346 int compcode;
2347
2348 switch (code)
2349 {
2350 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2351 compcode = lcompcode & rcompcode;
2352 break;
2353
2354 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2355 compcode = lcompcode | rcompcode;
2356 break;
2357
2358 default:
2359 return NULL_TREE;
2360 }
2361
2362 if (!honor_nans)
2363 {
2364 /* Eliminate unordered comparisons, as well as LTGT and ORD
2365 which are not used unless the mode has NaNs. */
2366 compcode &= ~COMPCODE_UNORD;
2367 if (compcode == COMPCODE_LTGT)
2368 compcode = COMPCODE_NE;
2369 else if (compcode == COMPCODE_ORD)
2370 compcode = COMPCODE_TRUE;
2371 }
2372 else if (flag_trapping_math)
2373 {
2374 /* Check that the original operation and the optimized ones will trap
2375 under the same condition. */
2376 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2377 && (lcompcode != COMPCODE_EQ)
2378 && (lcompcode != COMPCODE_ORD);
2379 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2380 && (rcompcode != COMPCODE_EQ)
2381 && (rcompcode != COMPCODE_ORD);
2382 bool trap = (compcode & COMPCODE_UNORD) == 0
2383 && (compcode != COMPCODE_EQ)
2384 && (compcode != COMPCODE_ORD);
2385
2386 /* In a short-circuited boolean expression the LHS might be
2387 such that the RHS, if evaluated, will never trap. For
2388 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2389 if neither x nor y is NaN. (This is a mixed blessing: for
2390 example, the expression above will never trap, hence
2391 optimizing it to x < y would be invalid). */
2392 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2393 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2394 rtrap = false;
2395
2396 /* If the comparison was short-circuited, and only the RHS
2397 trapped, we may now generate a spurious trap. */
2398 if (rtrap && !ltrap
2399 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2400 return NULL_TREE;
2401
2402 /* If we changed the conditions that cause a trap, we lose. */
2403 if ((ltrap || rtrap) != trap)
2404 return NULL_TREE;
2405 }
2406
2407 if (compcode == COMPCODE_TRUE)
2408 return constant_boolean_node (true, truth_type);
2409 else if (compcode == COMPCODE_FALSE)
2410 return constant_boolean_node (false, truth_type);
2411 else
2412 {
2413 enum tree_code tcode;
2414
2415 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2416 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2417 }
2418 }
2419 \f
2420 /* Return nonzero if two operands (typically of the same tree node)
2421 are necessarily equal. If either argument has side-effects this
2422 function returns zero. FLAGS modifies behavior as follows:
2423
2424 If OEP_ONLY_CONST is set, only return nonzero for constants.
2425 This function tests whether the operands are indistinguishable;
2426 it does not test whether they are equal using C's == operation.
2427 The distinction is important for IEEE floating point, because
2428 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2429 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2430
2431 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2432 even though it may hold multiple values during a function.
2433 This is because a GCC tree node guarantees that nothing else is
2434 executed between the evaluation of its "operands" (which may often
2435 be evaluated in arbitrary order). Hence if the operands themselves
2436 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2437 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2438 unset means assuming isochronic (or instantaneous) tree equivalence.
2439 Unless comparing arbitrary expression trees, such as from different
2440 statements, this flag can usually be left unset.
2441
2442 If OEP_PURE_SAME is set, then pure functions with identical arguments
2443 are considered the same. It is used when the caller has other ways
2444 to ensure that global memory is unchanged in between. */
2445
2446 int
2447 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2448 {
2449 /* If either is ERROR_MARK, they aren't equal. */
2450 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2451 || TREE_TYPE (arg0) == error_mark_node
2452 || TREE_TYPE (arg1) == error_mark_node)
2453 return 0;
2454
2455 /* Similar, if either does not have a type (like a released SSA name),
2456 they aren't equal. */
2457 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2458 return 0;
2459
2460 /* Check equality of integer constants before bailing out due to
2461 precision differences. */
2462 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2463 return tree_int_cst_equal (arg0, arg1);
2464
2465 /* If both types don't have the same signedness, then we can't consider
2466 them equal. We must check this before the STRIP_NOPS calls
2467 because they may change the signedness of the arguments. As pointers
2468 strictly don't have a signedness, require either two pointers or
2469 two non-pointers as well. */
2470 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2471 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2472 return 0;
2473
2474 /* We cannot consider pointers to different address space equal. */
2475 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2476 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2477 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2478 return 0;
2479
2480 /* If both types don't have the same precision, then it is not safe
2481 to strip NOPs. */
2482 if (element_precision (TREE_TYPE (arg0))
2483 != element_precision (TREE_TYPE (arg1)))
2484 return 0;
2485
2486 STRIP_NOPS (arg0);
2487 STRIP_NOPS (arg1);
2488
2489 /* In case both args are comparisons but with different comparison
2490 code, try to swap the comparison operands of one arg to produce
2491 a match and compare that variant. */
2492 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2493 && COMPARISON_CLASS_P (arg0)
2494 && COMPARISON_CLASS_P (arg1))
2495 {
2496 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2497
2498 if (TREE_CODE (arg0) == swap_code)
2499 return operand_equal_p (TREE_OPERAND (arg0, 0),
2500 TREE_OPERAND (arg1, 1), flags)
2501 && operand_equal_p (TREE_OPERAND (arg0, 1),
2502 TREE_OPERAND (arg1, 0), flags);
2503 }
2504
2505 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2506 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2507 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2508 return 0;
2509
2510 /* This is needed for conversions and for COMPONENT_REF.
2511 Might as well play it safe and always test this. */
2512 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2513 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2514 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2515 return 0;
2516
2517 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2518 We don't care about side effects in that case because the SAVE_EXPR
2519 takes care of that for us. In all other cases, two expressions are
2520 equal if they have no side effects. If we have two identical
2521 expressions with side effects that should be treated the same due
2522 to the only side effects being identical SAVE_EXPR's, that will
2523 be detected in the recursive calls below.
2524 If we are taking an invariant address of two identical objects
2525 they are necessarily equal as well. */
2526 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2527 && (TREE_CODE (arg0) == SAVE_EXPR
2528 || (flags & OEP_CONSTANT_ADDRESS_OF)
2529 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2530 return 1;
2531
2532 /* Next handle constant cases, those for which we can return 1 even
2533 if ONLY_CONST is set. */
2534 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2535 switch (TREE_CODE (arg0))
2536 {
2537 case INTEGER_CST:
2538 return tree_int_cst_equal (arg0, arg1);
2539
2540 case FIXED_CST:
2541 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2542 TREE_FIXED_CST (arg1));
2543
2544 case REAL_CST:
2545 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2546 TREE_REAL_CST (arg1)))
2547 return 1;
2548
2549
2550 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2551 {
2552 /* If we do not distinguish between signed and unsigned zero,
2553 consider them equal. */
2554 if (real_zerop (arg0) && real_zerop (arg1))
2555 return 1;
2556 }
2557 return 0;
2558
2559 case VECTOR_CST:
2560 {
2561 unsigned i;
2562
2563 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2564 return 0;
2565
2566 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2567 {
2568 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2569 VECTOR_CST_ELT (arg1, i), flags))
2570 return 0;
2571 }
2572 return 1;
2573 }
2574
2575 case COMPLEX_CST:
2576 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2577 flags)
2578 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2579 flags));
2580
2581 case STRING_CST:
2582 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2583 && ! memcmp (TREE_STRING_POINTER (arg0),
2584 TREE_STRING_POINTER (arg1),
2585 TREE_STRING_LENGTH (arg0)));
2586
2587 case ADDR_EXPR:
2588 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2589 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2590 ? OEP_CONSTANT_ADDRESS_OF : 0);
2591 default:
2592 break;
2593 }
2594
2595 if (flags & OEP_ONLY_CONST)
2596 return 0;
2597
2598 /* Define macros to test an operand from arg0 and arg1 for equality and a
2599 variant that allows null and views null as being different from any
2600 non-null value. In the latter case, if either is null, the both
2601 must be; otherwise, do the normal comparison. */
2602 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2603 TREE_OPERAND (arg1, N), flags)
2604
2605 #define OP_SAME_WITH_NULL(N) \
2606 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2607 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2608
2609 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2610 {
2611 case tcc_unary:
2612 /* Two conversions are equal only if signedness and modes match. */
2613 switch (TREE_CODE (arg0))
2614 {
2615 CASE_CONVERT:
2616 case FIX_TRUNC_EXPR:
2617 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2618 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2619 return 0;
2620 break;
2621 default:
2622 break;
2623 }
2624
2625 return OP_SAME (0);
2626
2627
2628 case tcc_comparison:
2629 case tcc_binary:
2630 if (OP_SAME (0) && OP_SAME (1))
2631 return 1;
2632
2633 /* For commutative ops, allow the other order. */
2634 return (commutative_tree_code (TREE_CODE (arg0))
2635 && operand_equal_p (TREE_OPERAND (arg0, 0),
2636 TREE_OPERAND (arg1, 1), flags)
2637 && operand_equal_p (TREE_OPERAND (arg0, 1),
2638 TREE_OPERAND (arg1, 0), flags));
2639
2640 case tcc_reference:
2641 /* If either of the pointer (or reference) expressions we are
2642 dereferencing contain a side effect, these cannot be equal,
2643 but their addresses can be. */
2644 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2645 && (TREE_SIDE_EFFECTS (arg0)
2646 || TREE_SIDE_EFFECTS (arg1)))
2647 return 0;
2648
2649 switch (TREE_CODE (arg0))
2650 {
2651 case INDIRECT_REF:
2652 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2653 return OP_SAME (0);
2654
2655 case REALPART_EXPR:
2656 case IMAGPART_EXPR:
2657 return OP_SAME (0);
2658
2659 case TARGET_MEM_REF:
2660 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2661 /* Require equal extra operands and then fall through to MEM_REF
2662 handling of the two common operands. */
2663 if (!OP_SAME_WITH_NULL (2)
2664 || !OP_SAME_WITH_NULL (3)
2665 || !OP_SAME_WITH_NULL (4))
2666 return 0;
2667 /* Fallthru. */
2668 case MEM_REF:
2669 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2670 /* Require equal access sizes, and similar pointer types.
2671 We can have incomplete types for array references of
2672 variable-sized arrays from the Fortran frontend
2673 though. Also verify the types are compatible. */
2674 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2675 || (TYPE_SIZE (TREE_TYPE (arg0))
2676 && TYPE_SIZE (TREE_TYPE (arg1))
2677 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2678 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2679 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2680 && alias_ptr_types_compatible_p
2681 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2682 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2683 && OP_SAME (0) && OP_SAME (1));
2684
2685 case ARRAY_REF:
2686 case ARRAY_RANGE_REF:
2687 /* Operands 2 and 3 may be null.
2688 Compare the array index by value if it is constant first as we
2689 may have different types but same value here. */
2690 if (!OP_SAME (0))
2691 return 0;
2692 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2693 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2694 TREE_OPERAND (arg1, 1))
2695 || OP_SAME (1))
2696 && OP_SAME_WITH_NULL (2)
2697 && OP_SAME_WITH_NULL (3));
2698
2699 case COMPONENT_REF:
2700 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2701 may be NULL when we're called to compare MEM_EXPRs. */
2702 if (!OP_SAME_WITH_NULL (0)
2703 || !OP_SAME (1))
2704 return 0;
2705 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2706 return OP_SAME_WITH_NULL (2);
2707
2708 case BIT_FIELD_REF:
2709 if (!OP_SAME (0))
2710 return 0;
2711 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2712 return OP_SAME (1) && OP_SAME (2);
2713
2714 default:
2715 return 0;
2716 }
2717
2718 case tcc_expression:
2719 switch (TREE_CODE (arg0))
2720 {
2721 case ADDR_EXPR:
2722 case TRUTH_NOT_EXPR:
2723 return OP_SAME (0);
2724
2725 case TRUTH_ANDIF_EXPR:
2726 case TRUTH_ORIF_EXPR:
2727 return OP_SAME (0) && OP_SAME (1);
2728
2729 case FMA_EXPR:
2730 case WIDEN_MULT_PLUS_EXPR:
2731 case WIDEN_MULT_MINUS_EXPR:
2732 if (!OP_SAME (2))
2733 return 0;
2734 /* The multiplcation operands are commutative. */
2735 /* FALLTHRU */
2736
2737 case TRUTH_AND_EXPR:
2738 case TRUTH_OR_EXPR:
2739 case TRUTH_XOR_EXPR:
2740 if (OP_SAME (0) && OP_SAME (1))
2741 return 1;
2742
2743 /* Otherwise take into account this is a commutative operation. */
2744 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2745 TREE_OPERAND (arg1, 1), flags)
2746 && operand_equal_p (TREE_OPERAND (arg0, 1),
2747 TREE_OPERAND (arg1, 0), flags));
2748
2749 case COND_EXPR:
2750 case VEC_COND_EXPR:
2751 case DOT_PROD_EXPR:
2752 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2753
2754 default:
2755 return 0;
2756 }
2757
2758 case tcc_vl_exp:
2759 switch (TREE_CODE (arg0))
2760 {
2761 case CALL_EXPR:
2762 /* If the CALL_EXPRs call different functions, then they
2763 clearly can not be equal. */
2764 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2765 flags))
2766 return 0;
2767
2768 {
2769 unsigned int cef = call_expr_flags (arg0);
2770 if (flags & OEP_PURE_SAME)
2771 cef &= ECF_CONST | ECF_PURE;
2772 else
2773 cef &= ECF_CONST;
2774 if (!cef)
2775 return 0;
2776 }
2777
2778 /* Now see if all the arguments are the same. */
2779 {
2780 const_call_expr_arg_iterator iter0, iter1;
2781 const_tree a0, a1;
2782 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2783 a1 = first_const_call_expr_arg (arg1, &iter1);
2784 a0 && a1;
2785 a0 = next_const_call_expr_arg (&iter0),
2786 a1 = next_const_call_expr_arg (&iter1))
2787 if (! operand_equal_p (a0, a1, flags))
2788 return 0;
2789
2790 /* If we get here and both argument lists are exhausted
2791 then the CALL_EXPRs are equal. */
2792 return ! (a0 || a1);
2793 }
2794 default:
2795 return 0;
2796 }
2797
2798 case tcc_declaration:
2799 /* Consider __builtin_sqrt equal to sqrt. */
2800 return (TREE_CODE (arg0) == FUNCTION_DECL
2801 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2802 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2803 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2804
2805 default:
2806 return 0;
2807 }
2808
2809 #undef OP_SAME
2810 #undef OP_SAME_WITH_NULL
2811 }
2812 \f
2813 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2814 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2815
2816 When in doubt, return 0. */
2817
2818 static int
2819 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2820 {
2821 int unsignedp1, unsignedpo;
2822 tree primarg0, primarg1, primother;
2823 unsigned int correct_width;
2824
2825 if (operand_equal_p (arg0, arg1, 0))
2826 return 1;
2827
2828 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2829 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2830 return 0;
2831
2832 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2833 and see if the inner values are the same. This removes any
2834 signedness comparison, which doesn't matter here. */
2835 primarg0 = arg0, primarg1 = arg1;
2836 STRIP_NOPS (primarg0);
2837 STRIP_NOPS (primarg1);
2838 if (operand_equal_p (primarg0, primarg1, 0))
2839 return 1;
2840
2841 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2842 actual comparison operand, ARG0.
2843
2844 First throw away any conversions to wider types
2845 already present in the operands. */
2846
2847 primarg1 = get_narrower (arg1, &unsignedp1);
2848 primother = get_narrower (other, &unsignedpo);
2849
2850 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2851 if (unsignedp1 == unsignedpo
2852 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2853 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2854 {
2855 tree type = TREE_TYPE (arg0);
2856
2857 /* Make sure shorter operand is extended the right way
2858 to match the longer operand. */
2859 primarg1 = fold_convert (signed_or_unsigned_type_for
2860 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2861
2862 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2863 return 1;
2864 }
2865
2866 return 0;
2867 }
2868 \f
2869 /* See if ARG is an expression that is either a comparison or is performing
2870 arithmetic on comparisons. The comparisons must only be comparing
2871 two different values, which will be stored in *CVAL1 and *CVAL2; if
2872 they are nonzero it means that some operands have already been found.
2873 No variables may be used anywhere else in the expression except in the
2874 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2875 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2876
2877 If this is true, return 1. Otherwise, return zero. */
2878
2879 static int
2880 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2881 {
2882 enum tree_code code = TREE_CODE (arg);
2883 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2884
2885 /* We can handle some of the tcc_expression cases here. */
2886 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2887 tclass = tcc_unary;
2888 else if (tclass == tcc_expression
2889 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2890 || code == COMPOUND_EXPR))
2891 tclass = tcc_binary;
2892
2893 else if (tclass == tcc_expression && code == SAVE_EXPR
2894 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2895 {
2896 /* If we've already found a CVAL1 or CVAL2, this expression is
2897 two complex to handle. */
2898 if (*cval1 || *cval2)
2899 return 0;
2900
2901 tclass = tcc_unary;
2902 *save_p = 1;
2903 }
2904
2905 switch (tclass)
2906 {
2907 case tcc_unary:
2908 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2909
2910 case tcc_binary:
2911 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2912 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2913 cval1, cval2, save_p));
2914
2915 case tcc_constant:
2916 return 1;
2917
2918 case tcc_expression:
2919 if (code == COND_EXPR)
2920 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2921 cval1, cval2, save_p)
2922 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2923 cval1, cval2, save_p)
2924 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2925 cval1, cval2, save_p));
2926 return 0;
2927
2928 case tcc_comparison:
2929 /* First see if we can handle the first operand, then the second. For
2930 the second operand, we know *CVAL1 can't be zero. It must be that
2931 one side of the comparison is each of the values; test for the
2932 case where this isn't true by failing if the two operands
2933 are the same. */
2934
2935 if (operand_equal_p (TREE_OPERAND (arg, 0),
2936 TREE_OPERAND (arg, 1), 0))
2937 return 0;
2938
2939 if (*cval1 == 0)
2940 *cval1 = TREE_OPERAND (arg, 0);
2941 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2942 ;
2943 else if (*cval2 == 0)
2944 *cval2 = TREE_OPERAND (arg, 0);
2945 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2946 ;
2947 else
2948 return 0;
2949
2950 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2951 ;
2952 else if (*cval2 == 0)
2953 *cval2 = TREE_OPERAND (arg, 1);
2954 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2955 ;
2956 else
2957 return 0;
2958
2959 return 1;
2960
2961 default:
2962 return 0;
2963 }
2964 }
2965 \f
2966 /* ARG is a tree that is known to contain just arithmetic operations and
2967 comparisons. Evaluate the operations in the tree substituting NEW0 for
2968 any occurrence of OLD0 as an operand of a comparison and likewise for
2969 NEW1 and OLD1. */
2970
2971 static tree
2972 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2973 tree old1, tree new1)
2974 {
2975 tree type = TREE_TYPE (arg);
2976 enum tree_code code = TREE_CODE (arg);
2977 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2978
2979 /* We can handle some of the tcc_expression cases here. */
2980 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2981 tclass = tcc_unary;
2982 else if (tclass == tcc_expression
2983 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2984 tclass = tcc_binary;
2985
2986 switch (tclass)
2987 {
2988 case tcc_unary:
2989 return fold_build1_loc (loc, code, type,
2990 eval_subst (loc, TREE_OPERAND (arg, 0),
2991 old0, new0, old1, new1));
2992
2993 case tcc_binary:
2994 return fold_build2_loc (loc, code, type,
2995 eval_subst (loc, TREE_OPERAND (arg, 0),
2996 old0, new0, old1, new1),
2997 eval_subst (loc, TREE_OPERAND (arg, 1),
2998 old0, new0, old1, new1));
2999
3000 case tcc_expression:
3001 switch (code)
3002 {
3003 case SAVE_EXPR:
3004 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3005 old1, new1);
3006
3007 case COMPOUND_EXPR:
3008 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3009 old1, new1);
3010
3011 case COND_EXPR:
3012 return fold_build3_loc (loc, code, type,
3013 eval_subst (loc, TREE_OPERAND (arg, 0),
3014 old0, new0, old1, new1),
3015 eval_subst (loc, TREE_OPERAND (arg, 1),
3016 old0, new0, old1, new1),
3017 eval_subst (loc, TREE_OPERAND (arg, 2),
3018 old0, new0, old1, new1));
3019 default:
3020 break;
3021 }
3022 /* Fall through - ??? */
3023
3024 case tcc_comparison:
3025 {
3026 tree arg0 = TREE_OPERAND (arg, 0);
3027 tree arg1 = TREE_OPERAND (arg, 1);
3028
3029 /* We need to check both for exact equality and tree equality. The
3030 former will be true if the operand has a side-effect. In that
3031 case, we know the operand occurred exactly once. */
3032
3033 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3034 arg0 = new0;
3035 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3036 arg0 = new1;
3037
3038 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3039 arg1 = new0;
3040 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3041 arg1 = new1;
3042
3043 return fold_build2_loc (loc, code, type, arg0, arg1);
3044 }
3045
3046 default:
3047 return arg;
3048 }
3049 }
3050 \f
3051 /* Return a tree for the case when the result of an expression is RESULT
3052 converted to TYPE and OMITTED was previously an operand of the expression
3053 but is now not needed (e.g., we folded OMITTED * 0).
3054
3055 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3056 the conversion of RESULT to TYPE. */
3057
3058 tree
3059 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3060 {
3061 tree t = fold_convert_loc (loc, type, result);
3062
3063 /* If the resulting operand is an empty statement, just return the omitted
3064 statement casted to void. */
3065 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3066 return build1_loc (loc, NOP_EXPR, void_type_node,
3067 fold_ignored_result (omitted));
3068
3069 if (TREE_SIDE_EFFECTS (omitted))
3070 return build2_loc (loc, COMPOUND_EXPR, type,
3071 fold_ignored_result (omitted), t);
3072
3073 return non_lvalue_loc (loc, t);
3074 }
3075
3076 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3077
3078 static tree
3079 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3080 tree omitted)
3081 {
3082 tree t = fold_convert_loc (loc, type, result);
3083
3084 /* If the resulting operand is an empty statement, just return the omitted
3085 statement casted to void. */
3086 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3087 return build1_loc (loc, NOP_EXPR, void_type_node,
3088 fold_ignored_result (omitted));
3089
3090 if (TREE_SIDE_EFFECTS (omitted))
3091 return build2_loc (loc, COMPOUND_EXPR, type,
3092 fold_ignored_result (omitted), t);
3093
3094 return pedantic_non_lvalue_loc (loc, t);
3095 }
3096
3097 /* Return a tree for the case when the result of an expression is RESULT
3098 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3099 of the expression but are now not needed.
3100
3101 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3102 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3103 evaluated before OMITTED2. Otherwise, if neither has side effects,
3104 just do the conversion of RESULT to TYPE. */
3105
3106 tree
3107 omit_two_operands_loc (location_t loc, tree type, tree result,
3108 tree omitted1, tree omitted2)
3109 {
3110 tree t = fold_convert_loc (loc, type, result);
3111
3112 if (TREE_SIDE_EFFECTS (omitted2))
3113 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3114 if (TREE_SIDE_EFFECTS (omitted1))
3115 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3116
3117 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3118 }
3119
3120 \f
3121 /* Return a simplified tree node for the truth-negation of ARG. This
3122 never alters ARG itself. We assume that ARG is an operation that
3123 returns a truth value (0 or 1).
3124
3125 FIXME: one would think we would fold the result, but it causes
3126 problems with the dominator optimizer. */
3127
3128 static tree
3129 fold_truth_not_expr (location_t loc, tree arg)
3130 {
3131 tree type = TREE_TYPE (arg);
3132 enum tree_code code = TREE_CODE (arg);
3133 location_t loc1, loc2;
3134
3135 /* If this is a comparison, we can simply invert it, except for
3136 floating-point non-equality comparisons, in which case we just
3137 enclose a TRUTH_NOT_EXPR around what we have. */
3138
3139 if (TREE_CODE_CLASS (code) == tcc_comparison)
3140 {
3141 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3142 if (FLOAT_TYPE_P (op_type)
3143 && flag_trapping_math
3144 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3145 && code != NE_EXPR && code != EQ_EXPR)
3146 return NULL_TREE;
3147
3148 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3149 if (code == ERROR_MARK)
3150 return NULL_TREE;
3151
3152 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3153 TREE_OPERAND (arg, 1));
3154 }
3155
3156 switch (code)
3157 {
3158 case INTEGER_CST:
3159 return constant_boolean_node (integer_zerop (arg), type);
3160
3161 case TRUTH_AND_EXPR:
3162 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3163 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3164 return build2_loc (loc, TRUTH_OR_EXPR, type,
3165 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3166 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3167
3168 case TRUTH_OR_EXPR:
3169 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3170 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3171 return build2_loc (loc, TRUTH_AND_EXPR, type,
3172 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3173 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3174
3175 case TRUTH_XOR_EXPR:
3176 /* Here we can invert either operand. We invert the first operand
3177 unless the second operand is a TRUTH_NOT_EXPR in which case our
3178 result is the XOR of the first operand with the inside of the
3179 negation of the second operand. */
3180
3181 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3182 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3183 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3184 else
3185 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3186 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3187 TREE_OPERAND (arg, 1));
3188
3189 case TRUTH_ANDIF_EXPR:
3190 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3191 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3192 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3193 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3194 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3195
3196 case TRUTH_ORIF_EXPR:
3197 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3198 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3199 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3200 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3201 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3202
3203 case TRUTH_NOT_EXPR:
3204 return TREE_OPERAND (arg, 0);
3205
3206 case COND_EXPR:
3207 {
3208 tree arg1 = TREE_OPERAND (arg, 1);
3209 tree arg2 = TREE_OPERAND (arg, 2);
3210
3211 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3212 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3213
3214 /* A COND_EXPR may have a throw as one operand, which
3215 then has void type. Just leave void operands
3216 as they are. */
3217 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3218 VOID_TYPE_P (TREE_TYPE (arg1))
3219 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3220 VOID_TYPE_P (TREE_TYPE (arg2))
3221 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3222 }
3223
3224 case COMPOUND_EXPR:
3225 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3226 return build2_loc (loc, COMPOUND_EXPR, type,
3227 TREE_OPERAND (arg, 0),
3228 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3229
3230 case NON_LVALUE_EXPR:
3231 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3232 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3233
3234 CASE_CONVERT:
3235 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3236 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3237
3238 /* ... fall through ... */
3239
3240 case FLOAT_EXPR:
3241 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3242 return build1_loc (loc, TREE_CODE (arg), type,
3243 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3244
3245 case BIT_AND_EXPR:
3246 if (!integer_onep (TREE_OPERAND (arg, 1)))
3247 return NULL_TREE;
3248 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3249
3250 case SAVE_EXPR:
3251 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3252
3253 case CLEANUP_POINT_EXPR:
3254 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3255 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3256 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3257
3258 default:
3259 return NULL_TREE;
3260 }
3261 }
3262
3263 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3264 assume that ARG is an operation that returns a truth value (0 or 1
3265 for scalars, 0 or -1 for vectors). Return the folded expression if
3266 folding is successful. Otherwise, return NULL_TREE. */
3267
3268 static tree
3269 fold_invert_truthvalue (location_t loc, tree arg)
3270 {
3271 tree type = TREE_TYPE (arg);
3272 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3273 ? BIT_NOT_EXPR
3274 : TRUTH_NOT_EXPR,
3275 type, arg);
3276 }
3277
3278 /* Return a simplified tree node for the truth-negation of ARG. This
3279 never alters ARG itself. We assume that ARG is an operation that
3280 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3281
3282 tree
3283 invert_truthvalue_loc (location_t loc, tree arg)
3284 {
3285 if (TREE_CODE (arg) == ERROR_MARK)
3286 return arg;
3287
3288 tree type = TREE_TYPE (arg);
3289 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3290 ? BIT_NOT_EXPR
3291 : TRUTH_NOT_EXPR,
3292 type, arg);
3293 }
3294
3295 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3296 operands are another bit-wise operation with a common input. If so,
3297 distribute the bit operations to save an operation and possibly two if
3298 constants are involved. For example, convert
3299 (A | B) & (A | C) into A | (B & C)
3300 Further simplification will occur if B and C are constants.
3301
3302 If this optimization cannot be done, 0 will be returned. */
3303
3304 static tree
3305 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3306 tree arg0, tree arg1)
3307 {
3308 tree common;
3309 tree left, right;
3310
3311 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3312 || TREE_CODE (arg0) == code
3313 || (TREE_CODE (arg0) != BIT_AND_EXPR
3314 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3315 return 0;
3316
3317 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3318 {
3319 common = TREE_OPERAND (arg0, 0);
3320 left = TREE_OPERAND (arg0, 1);
3321 right = TREE_OPERAND (arg1, 1);
3322 }
3323 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3324 {
3325 common = TREE_OPERAND (arg0, 0);
3326 left = TREE_OPERAND (arg0, 1);
3327 right = TREE_OPERAND (arg1, 0);
3328 }
3329 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3330 {
3331 common = TREE_OPERAND (arg0, 1);
3332 left = TREE_OPERAND (arg0, 0);
3333 right = TREE_OPERAND (arg1, 1);
3334 }
3335 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3336 {
3337 common = TREE_OPERAND (arg0, 1);
3338 left = TREE_OPERAND (arg0, 0);
3339 right = TREE_OPERAND (arg1, 0);
3340 }
3341 else
3342 return 0;
3343
3344 common = fold_convert_loc (loc, type, common);
3345 left = fold_convert_loc (loc, type, left);
3346 right = fold_convert_loc (loc, type, right);
3347 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3348 fold_build2_loc (loc, code, type, left, right));
3349 }
3350
3351 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3352 with code CODE. This optimization is unsafe. */
3353 static tree
3354 distribute_real_division (location_t loc, enum tree_code code, tree type,
3355 tree arg0, tree arg1)
3356 {
3357 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3358 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3359
3360 /* (A / C) +- (B / C) -> (A +- B) / C. */
3361 if (mul0 == mul1
3362 && operand_equal_p (TREE_OPERAND (arg0, 1),
3363 TREE_OPERAND (arg1, 1), 0))
3364 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3365 fold_build2_loc (loc, code, type,
3366 TREE_OPERAND (arg0, 0),
3367 TREE_OPERAND (arg1, 0)),
3368 TREE_OPERAND (arg0, 1));
3369
3370 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3371 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3372 TREE_OPERAND (arg1, 0), 0)
3373 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3374 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3375 {
3376 REAL_VALUE_TYPE r0, r1;
3377 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3378 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3379 if (!mul0)
3380 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3381 if (!mul1)
3382 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3383 real_arithmetic (&r0, code, &r0, &r1);
3384 return fold_build2_loc (loc, MULT_EXPR, type,
3385 TREE_OPERAND (arg0, 0),
3386 build_real (type, r0));
3387 }
3388
3389 return NULL_TREE;
3390 }
3391 \f
3392 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3393 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3394
3395 static tree
3396 make_bit_field_ref (location_t loc, tree inner, tree type,
3397 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3398 {
3399 tree result, bftype;
3400
3401 if (bitpos == 0)
3402 {
3403 tree size = TYPE_SIZE (TREE_TYPE (inner));
3404 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3405 || POINTER_TYPE_P (TREE_TYPE (inner)))
3406 && tree_fits_shwi_p (size)
3407 && tree_to_shwi (size) == bitsize)
3408 return fold_convert_loc (loc, type, inner);
3409 }
3410
3411 bftype = type;
3412 if (TYPE_PRECISION (bftype) != bitsize
3413 || TYPE_UNSIGNED (bftype) == !unsignedp)
3414 bftype = build_nonstandard_integer_type (bitsize, 0);
3415
3416 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3417 size_int (bitsize), bitsize_int (bitpos));
3418
3419 if (bftype != type)
3420 result = fold_convert_loc (loc, type, result);
3421
3422 return result;
3423 }
3424
3425 /* Optimize a bit-field compare.
3426
3427 There are two cases: First is a compare against a constant and the
3428 second is a comparison of two items where the fields are at the same
3429 bit position relative to the start of a chunk (byte, halfword, word)
3430 large enough to contain it. In these cases we can avoid the shift
3431 implicit in bitfield extractions.
3432
3433 For constants, we emit a compare of the shifted constant with the
3434 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3435 compared. For two fields at the same position, we do the ANDs with the
3436 similar mask and compare the result of the ANDs.
3437
3438 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3439 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3440 are the left and right operands of the comparison, respectively.
3441
3442 If the optimization described above can be done, we return the resulting
3443 tree. Otherwise we return zero. */
3444
3445 static tree
3446 optimize_bit_field_compare (location_t loc, enum tree_code code,
3447 tree compare_type, tree lhs, tree rhs)
3448 {
3449 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3450 tree type = TREE_TYPE (lhs);
3451 tree signed_type, unsigned_type;
3452 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3453 enum machine_mode lmode, rmode, nmode;
3454 int lunsignedp, runsignedp;
3455 int lvolatilep = 0, rvolatilep = 0;
3456 tree linner, rinner = NULL_TREE;
3457 tree mask;
3458 tree offset;
3459
3460 /* Get all the information about the extractions being done. If the bit size
3461 if the same as the size of the underlying object, we aren't doing an
3462 extraction at all and so can do nothing. We also don't want to
3463 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3464 then will no longer be able to replace it. */
3465 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3466 &lunsignedp, &lvolatilep, false);
3467 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3468 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3469 return 0;
3470
3471 if (!const_p)
3472 {
3473 /* If this is not a constant, we can only do something if bit positions,
3474 sizes, and signedness are the same. */
3475 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3476 &runsignedp, &rvolatilep, false);
3477
3478 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3479 || lunsignedp != runsignedp || offset != 0
3480 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3481 return 0;
3482 }
3483
3484 /* See if we can find a mode to refer to this field. We should be able to,
3485 but fail if we can't. */
3486 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3487 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3488 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3489 TYPE_ALIGN (TREE_TYPE (rinner))),
3490 word_mode, false);
3491 if (nmode == VOIDmode)
3492 return 0;
3493
3494 /* Set signed and unsigned types of the precision of this mode for the
3495 shifts below. */
3496 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3497 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3498
3499 /* Compute the bit position and size for the new reference and our offset
3500 within it. If the new reference is the same size as the original, we
3501 won't optimize anything, so return zero. */
3502 nbitsize = GET_MODE_BITSIZE (nmode);
3503 nbitpos = lbitpos & ~ (nbitsize - 1);
3504 lbitpos -= nbitpos;
3505 if (nbitsize == lbitsize)
3506 return 0;
3507
3508 if (BYTES_BIG_ENDIAN)
3509 lbitpos = nbitsize - lbitsize - lbitpos;
3510
3511 /* Make the mask to be used against the extracted field. */
3512 mask = build_int_cst_type (unsigned_type, -1);
3513 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3514 mask = const_binop (RSHIFT_EXPR, mask,
3515 size_int (nbitsize - lbitsize - lbitpos));
3516
3517 if (! const_p)
3518 /* If not comparing with constant, just rework the comparison
3519 and return. */
3520 return fold_build2_loc (loc, code, compare_type,
3521 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3522 make_bit_field_ref (loc, linner,
3523 unsigned_type,
3524 nbitsize, nbitpos,
3525 1),
3526 mask),
3527 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3528 make_bit_field_ref (loc, rinner,
3529 unsigned_type,
3530 nbitsize, nbitpos,
3531 1),
3532 mask));
3533
3534 /* Otherwise, we are handling the constant case. See if the constant is too
3535 big for the field. Warn and return a tree of for 0 (false) if so. We do
3536 this not only for its own sake, but to avoid having to test for this
3537 error case below. If we didn't, we might generate wrong code.
3538
3539 For unsigned fields, the constant shifted right by the field length should
3540 be all zero. For signed fields, the high-order bits should agree with
3541 the sign bit. */
3542
3543 if (lunsignedp)
3544 {
3545 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3546 fold_convert_loc (loc,
3547 unsigned_type, rhs),
3548 size_int (lbitsize))))
3549 {
3550 warning (0, "comparison is always %d due to width of bit-field",
3551 code == NE_EXPR);
3552 return constant_boolean_node (code == NE_EXPR, compare_type);
3553 }
3554 }
3555 else
3556 {
3557 tree tem = const_binop (RSHIFT_EXPR,
3558 fold_convert_loc (loc, signed_type, rhs),
3559 size_int (lbitsize - 1));
3560 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3561 {
3562 warning (0, "comparison is always %d due to width of bit-field",
3563 code == NE_EXPR);
3564 return constant_boolean_node (code == NE_EXPR, compare_type);
3565 }
3566 }
3567
3568 /* Single-bit compares should always be against zero. */
3569 if (lbitsize == 1 && ! integer_zerop (rhs))
3570 {
3571 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3572 rhs = build_int_cst (type, 0);
3573 }
3574
3575 /* Make a new bitfield reference, shift the constant over the
3576 appropriate number of bits and mask it with the computed mask
3577 (in case this was a signed field). If we changed it, make a new one. */
3578 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3579
3580 rhs = const_binop (BIT_AND_EXPR,
3581 const_binop (LSHIFT_EXPR,
3582 fold_convert_loc (loc, unsigned_type, rhs),
3583 size_int (lbitpos)),
3584 mask);
3585
3586 lhs = build2_loc (loc, code, compare_type,
3587 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3588 return lhs;
3589 }
3590 \f
3591 /* Subroutine for fold_truth_andor_1: decode a field reference.
3592
3593 If EXP is a comparison reference, we return the innermost reference.
3594
3595 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3596 set to the starting bit number.
3597
3598 If the innermost field can be completely contained in a mode-sized
3599 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3600
3601 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3602 otherwise it is not changed.
3603
3604 *PUNSIGNEDP is set to the signedness of the field.
3605
3606 *PMASK is set to the mask used. This is either contained in a
3607 BIT_AND_EXPR or derived from the width of the field.
3608
3609 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3610
3611 Return 0 if this is not a component reference or is one that we can't
3612 do anything with. */
3613
3614 static tree
3615 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3616 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3617 int *punsignedp, int *pvolatilep,
3618 tree *pmask, tree *pand_mask)
3619 {
3620 tree outer_type = 0;
3621 tree and_mask = 0;
3622 tree mask, inner, offset;
3623 tree unsigned_type;
3624 unsigned int precision;
3625
3626 /* All the optimizations using this function assume integer fields.
3627 There are problems with FP fields since the type_for_size call
3628 below can fail for, e.g., XFmode. */
3629 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3630 return 0;
3631
3632 /* We are interested in the bare arrangement of bits, so strip everything
3633 that doesn't affect the machine mode. However, record the type of the
3634 outermost expression if it may matter below. */
3635 if (CONVERT_EXPR_P (exp)
3636 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3637 outer_type = TREE_TYPE (exp);
3638 STRIP_NOPS (exp);
3639
3640 if (TREE_CODE (exp) == BIT_AND_EXPR)
3641 {
3642 and_mask = TREE_OPERAND (exp, 1);
3643 exp = TREE_OPERAND (exp, 0);
3644 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3645 if (TREE_CODE (and_mask) != INTEGER_CST)
3646 return 0;
3647 }
3648
3649 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3650 punsignedp, pvolatilep, false);
3651 if ((inner == exp && and_mask == 0)
3652 || *pbitsize < 0 || offset != 0
3653 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3654 return 0;
3655
3656 /* If the number of bits in the reference is the same as the bitsize of
3657 the outer type, then the outer type gives the signedness. Otherwise
3658 (in case of a small bitfield) the signedness is unchanged. */
3659 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3660 *punsignedp = TYPE_UNSIGNED (outer_type);
3661
3662 /* Compute the mask to access the bitfield. */
3663 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3664 precision = TYPE_PRECISION (unsigned_type);
3665
3666 mask = build_int_cst_type (unsigned_type, -1);
3667
3668 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3669 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3670
3671 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3672 if (and_mask != 0)
3673 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3674 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3675
3676 *pmask = mask;
3677 *pand_mask = and_mask;
3678 return inner;
3679 }
3680
3681 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3682 bit positions and MASK is SIGNED. */
3683
3684 static int
3685 all_ones_mask_p (const_tree mask, unsigned int size)
3686 {
3687 tree type = TREE_TYPE (mask);
3688 unsigned int precision = TYPE_PRECISION (type);
3689
3690 /* If this function returns true when the type of the mask is
3691 UNSIGNED, then there will be errors. In particular see
3692 gcc.c-torture/execute/990326-1.c. There does not appear to be
3693 any documentation paper trail as to why this is so. But the pre
3694 wide-int worked with that restriction and it has been preserved
3695 here. */
3696 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3697 return false;
3698
3699 return wi::mask (size, false, precision) == mask;
3700 }
3701
3702 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3703 represents the sign bit of EXP's type. If EXP represents a sign
3704 or zero extension, also test VAL against the unextended type.
3705 The return value is the (sub)expression whose sign bit is VAL,
3706 or NULL_TREE otherwise. */
3707
3708 static tree
3709 sign_bit_p (tree exp, const_tree val)
3710 {
3711 int width;
3712 tree t;
3713
3714 /* Tree EXP must have an integral type. */
3715 t = TREE_TYPE (exp);
3716 if (! INTEGRAL_TYPE_P (t))
3717 return NULL_TREE;
3718
3719 /* Tree VAL must be an integer constant. */
3720 if (TREE_CODE (val) != INTEGER_CST
3721 || TREE_OVERFLOW (val))
3722 return NULL_TREE;
3723
3724 width = TYPE_PRECISION (t);
3725 if (wi::only_sign_bit_p (val, width))
3726 return exp;
3727
3728 /* Handle extension from a narrower type. */
3729 if (TREE_CODE (exp) == NOP_EXPR
3730 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3731 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3732
3733 return NULL_TREE;
3734 }
3735
3736 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3737 to be evaluated unconditionally. */
3738
3739 static int
3740 simple_operand_p (const_tree exp)
3741 {
3742 /* Strip any conversions that don't change the machine mode. */
3743 STRIP_NOPS (exp);
3744
3745 return (CONSTANT_CLASS_P (exp)
3746 || TREE_CODE (exp) == SSA_NAME
3747 || (DECL_P (exp)
3748 && ! TREE_ADDRESSABLE (exp)
3749 && ! TREE_THIS_VOLATILE (exp)
3750 && ! DECL_NONLOCAL (exp)
3751 /* Don't regard global variables as simple. They may be
3752 allocated in ways unknown to the compiler (shared memory,
3753 #pragma weak, etc). */
3754 && ! TREE_PUBLIC (exp)
3755 && ! DECL_EXTERNAL (exp)
3756 /* Weakrefs are not safe to be read, since they can be NULL.
3757 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3758 have DECL_WEAK flag set. */
3759 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3760 /* Loading a static variable is unduly expensive, but global
3761 registers aren't expensive. */
3762 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3763 }
3764
3765 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3766 to be evaluated unconditionally.
3767 I addition to simple_operand_p, we assume that comparisons, conversions,
3768 and logic-not operations are simple, if their operands are simple, too. */
3769
3770 static bool
3771 simple_operand_p_2 (tree exp)
3772 {
3773 enum tree_code code;
3774
3775 if (TREE_SIDE_EFFECTS (exp)
3776 || tree_could_trap_p (exp))
3777 return false;
3778
3779 while (CONVERT_EXPR_P (exp))
3780 exp = TREE_OPERAND (exp, 0);
3781
3782 code = TREE_CODE (exp);
3783
3784 if (TREE_CODE_CLASS (code) == tcc_comparison)
3785 return (simple_operand_p (TREE_OPERAND (exp, 0))
3786 && simple_operand_p (TREE_OPERAND (exp, 1)));
3787
3788 if (code == TRUTH_NOT_EXPR)
3789 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3790
3791 return simple_operand_p (exp);
3792 }
3793
3794 \f
3795 /* The following functions are subroutines to fold_range_test and allow it to
3796 try to change a logical combination of comparisons into a range test.
3797
3798 For example, both
3799 X == 2 || X == 3 || X == 4 || X == 5
3800 and
3801 X >= 2 && X <= 5
3802 are converted to
3803 (unsigned) (X - 2) <= 3
3804
3805 We describe each set of comparisons as being either inside or outside
3806 a range, using a variable named like IN_P, and then describe the
3807 range with a lower and upper bound. If one of the bounds is omitted,
3808 it represents either the highest or lowest value of the type.
3809
3810 In the comments below, we represent a range by two numbers in brackets
3811 preceded by a "+" to designate being inside that range, or a "-" to
3812 designate being outside that range, so the condition can be inverted by
3813 flipping the prefix. An omitted bound is represented by a "-". For
3814 example, "- [-, 10]" means being outside the range starting at the lowest
3815 possible value and ending at 10, in other words, being greater than 10.
3816 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3817 always false.
3818
3819 We set up things so that the missing bounds are handled in a consistent
3820 manner so neither a missing bound nor "true" and "false" need to be
3821 handled using a special case. */
3822
3823 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3824 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3825 and UPPER1_P are nonzero if the respective argument is an upper bound
3826 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3827 must be specified for a comparison. ARG1 will be converted to ARG0's
3828 type if both are specified. */
3829
3830 static tree
3831 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3832 tree arg1, int upper1_p)
3833 {
3834 tree tem;
3835 int result;
3836 int sgn0, sgn1;
3837
3838 /* If neither arg represents infinity, do the normal operation.
3839 Else, if not a comparison, return infinity. Else handle the special
3840 comparison rules. Note that most of the cases below won't occur, but
3841 are handled for consistency. */
3842
3843 if (arg0 != 0 && arg1 != 0)
3844 {
3845 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3846 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3847 STRIP_NOPS (tem);
3848 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3849 }
3850
3851 if (TREE_CODE_CLASS (code) != tcc_comparison)
3852 return 0;
3853
3854 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3855 for neither. In real maths, we cannot assume open ended ranges are
3856 the same. But, this is computer arithmetic, where numbers are finite.
3857 We can therefore make the transformation of any unbounded range with
3858 the value Z, Z being greater than any representable number. This permits
3859 us to treat unbounded ranges as equal. */
3860 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3861 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3862 switch (code)
3863 {
3864 case EQ_EXPR:
3865 result = sgn0 == sgn1;
3866 break;
3867 case NE_EXPR:
3868 result = sgn0 != sgn1;
3869 break;
3870 case LT_EXPR:
3871 result = sgn0 < sgn1;
3872 break;
3873 case LE_EXPR:
3874 result = sgn0 <= sgn1;
3875 break;
3876 case GT_EXPR:
3877 result = sgn0 > sgn1;
3878 break;
3879 case GE_EXPR:
3880 result = sgn0 >= sgn1;
3881 break;
3882 default:
3883 gcc_unreachable ();
3884 }
3885
3886 return constant_boolean_node (result, type);
3887 }
3888 \f
3889 /* Helper routine for make_range. Perform one step for it, return
3890 new expression if the loop should continue or NULL_TREE if it should
3891 stop. */
3892
3893 tree
3894 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3895 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3896 bool *strict_overflow_p)
3897 {
3898 tree arg0_type = TREE_TYPE (arg0);
3899 tree n_low, n_high, low = *p_low, high = *p_high;
3900 int in_p = *p_in_p, n_in_p;
3901
3902 switch (code)
3903 {
3904 case TRUTH_NOT_EXPR:
3905 /* We can only do something if the range is testing for zero. */
3906 if (low == NULL_TREE || high == NULL_TREE
3907 || ! integer_zerop (low) || ! integer_zerop (high))
3908 return NULL_TREE;
3909 *p_in_p = ! in_p;
3910 return arg0;
3911
3912 case EQ_EXPR: case NE_EXPR:
3913 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3914 /* We can only do something if the range is testing for zero
3915 and if the second operand is an integer constant. Note that
3916 saying something is "in" the range we make is done by
3917 complementing IN_P since it will set in the initial case of
3918 being not equal to zero; "out" is leaving it alone. */
3919 if (low == NULL_TREE || high == NULL_TREE
3920 || ! integer_zerop (low) || ! integer_zerop (high)
3921 || TREE_CODE (arg1) != INTEGER_CST)
3922 return NULL_TREE;
3923
3924 switch (code)
3925 {
3926 case NE_EXPR: /* - [c, c] */
3927 low = high = arg1;
3928 break;
3929 case EQ_EXPR: /* + [c, c] */
3930 in_p = ! in_p, low = high = arg1;
3931 break;
3932 case GT_EXPR: /* - [-, c] */
3933 low = 0, high = arg1;
3934 break;
3935 case GE_EXPR: /* + [c, -] */
3936 in_p = ! in_p, low = arg1, high = 0;
3937 break;
3938 case LT_EXPR: /* - [c, -] */
3939 low = arg1, high = 0;
3940 break;
3941 case LE_EXPR: /* + [-, c] */
3942 in_p = ! in_p, low = 0, high = arg1;
3943 break;
3944 default:
3945 gcc_unreachable ();
3946 }
3947
3948 /* If this is an unsigned comparison, we also know that EXP is
3949 greater than or equal to zero. We base the range tests we make
3950 on that fact, so we record it here so we can parse existing
3951 range tests. We test arg0_type since often the return type
3952 of, e.g. EQ_EXPR, is boolean. */
3953 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3954 {
3955 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3956 in_p, low, high, 1,
3957 build_int_cst (arg0_type, 0),
3958 NULL_TREE))
3959 return NULL_TREE;
3960
3961 in_p = n_in_p, low = n_low, high = n_high;
3962
3963 /* If the high bound is missing, but we have a nonzero low
3964 bound, reverse the range so it goes from zero to the low bound
3965 minus 1. */
3966 if (high == 0 && low && ! integer_zerop (low))
3967 {
3968 in_p = ! in_p;
3969 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3970 build_int_cst (TREE_TYPE (low), 1), 0);
3971 low = build_int_cst (arg0_type, 0);
3972 }
3973 }
3974
3975 *p_low = low;
3976 *p_high = high;
3977 *p_in_p = in_p;
3978 return arg0;
3979
3980 case NEGATE_EXPR:
3981 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3982 low and high are non-NULL, then normalize will DTRT. */
3983 if (!TYPE_UNSIGNED (arg0_type)
3984 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3985 {
3986 if (low == NULL_TREE)
3987 low = TYPE_MIN_VALUE (arg0_type);
3988 if (high == NULL_TREE)
3989 high = TYPE_MAX_VALUE (arg0_type);
3990 }
3991
3992 /* (-x) IN [a,b] -> x in [-b, -a] */
3993 n_low = range_binop (MINUS_EXPR, exp_type,
3994 build_int_cst (exp_type, 0),
3995 0, high, 1);
3996 n_high = range_binop (MINUS_EXPR, exp_type,
3997 build_int_cst (exp_type, 0),
3998 0, low, 0);
3999 if (n_high != 0 && TREE_OVERFLOW (n_high))
4000 return NULL_TREE;
4001 goto normalize;
4002
4003 case BIT_NOT_EXPR:
4004 /* ~ X -> -X - 1 */
4005 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4006 build_int_cst (exp_type, 1));
4007
4008 case PLUS_EXPR:
4009 case MINUS_EXPR:
4010 if (TREE_CODE (arg1) != INTEGER_CST)
4011 return NULL_TREE;
4012
4013 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4014 move a constant to the other side. */
4015 if (!TYPE_UNSIGNED (arg0_type)
4016 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4017 return NULL_TREE;
4018
4019 /* If EXP is signed, any overflow in the computation is undefined,
4020 so we don't worry about it so long as our computations on
4021 the bounds don't overflow. For unsigned, overflow is defined
4022 and this is exactly the right thing. */
4023 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4024 arg0_type, low, 0, arg1, 0);
4025 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4026 arg0_type, high, 1, arg1, 0);
4027 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4028 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4029 return NULL_TREE;
4030
4031 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4032 *strict_overflow_p = true;
4033
4034 normalize:
4035 /* Check for an unsigned range which has wrapped around the maximum
4036 value thus making n_high < n_low, and normalize it. */
4037 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4038 {
4039 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4040 build_int_cst (TREE_TYPE (n_high), 1), 0);
4041 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4042 build_int_cst (TREE_TYPE (n_low), 1), 0);
4043
4044 /* If the range is of the form +/- [ x+1, x ], we won't
4045 be able to normalize it. But then, it represents the
4046 whole range or the empty set, so make it
4047 +/- [ -, - ]. */
4048 if (tree_int_cst_equal (n_low, low)
4049 && tree_int_cst_equal (n_high, high))
4050 low = high = 0;
4051 else
4052 in_p = ! in_p;
4053 }
4054 else
4055 low = n_low, high = n_high;
4056
4057 *p_low = low;
4058 *p_high = high;
4059 *p_in_p = in_p;
4060 return arg0;
4061
4062 CASE_CONVERT:
4063 case NON_LVALUE_EXPR:
4064 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4065 return NULL_TREE;
4066
4067 if (! INTEGRAL_TYPE_P (arg0_type)
4068 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4069 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4070 return NULL_TREE;
4071
4072 n_low = low, n_high = high;
4073
4074 if (n_low != 0)
4075 n_low = fold_convert_loc (loc, arg0_type, n_low);
4076
4077 if (n_high != 0)
4078 n_high = fold_convert_loc (loc, arg0_type, n_high);
4079
4080 /* If we're converting arg0 from an unsigned type, to exp,
4081 a signed type, we will be doing the comparison as unsigned.
4082 The tests above have already verified that LOW and HIGH
4083 are both positive.
4084
4085 So we have to ensure that we will handle large unsigned
4086 values the same way that the current signed bounds treat
4087 negative values. */
4088
4089 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4090 {
4091 tree high_positive;
4092 tree equiv_type;
4093 /* For fixed-point modes, we need to pass the saturating flag
4094 as the 2nd parameter. */
4095 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4096 equiv_type
4097 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4098 TYPE_SATURATING (arg0_type));
4099 else
4100 equiv_type
4101 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4102
4103 /* A range without an upper bound is, naturally, unbounded.
4104 Since convert would have cropped a very large value, use
4105 the max value for the destination type. */
4106 high_positive
4107 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4108 : TYPE_MAX_VALUE (arg0_type);
4109
4110 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4111 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4112 fold_convert_loc (loc, arg0_type,
4113 high_positive),
4114 build_int_cst (arg0_type, 1));
4115
4116 /* If the low bound is specified, "and" the range with the
4117 range for which the original unsigned value will be
4118 positive. */
4119 if (low != 0)
4120 {
4121 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4122 1, fold_convert_loc (loc, arg0_type,
4123 integer_zero_node),
4124 high_positive))
4125 return NULL_TREE;
4126
4127 in_p = (n_in_p == in_p);
4128 }
4129 else
4130 {
4131 /* Otherwise, "or" the range with the range of the input
4132 that will be interpreted as negative. */
4133 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4134 1, fold_convert_loc (loc, arg0_type,
4135 integer_zero_node),
4136 high_positive))
4137 return NULL_TREE;
4138
4139 in_p = (in_p != n_in_p);
4140 }
4141 }
4142
4143 *p_low = n_low;
4144 *p_high = n_high;
4145 *p_in_p = in_p;
4146 return arg0;
4147
4148 default:
4149 return NULL_TREE;
4150 }
4151 }
4152
4153 /* Given EXP, a logical expression, set the range it is testing into
4154 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4155 actually being tested. *PLOW and *PHIGH will be made of the same
4156 type as the returned expression. If EXP is not a comparison, we
4157 will most likely not be returning a useful value and range. Set
4158 *STRICT_OVERFLOW_P to true if the return value is only valid
4159 because signed overflow is undefined; otherwise, do not change
4160 *STRICT_OVERFLOW_P. */
4161
4162 tree
4163 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4164 bool *strict_overflow_p)
4165 {
4166 enum tree_code code;
4167 tree arg0, arg1 = NULL_TREE;
4168 tree exp_type, nexp;
4169 int in_p;
4170 tree low, high;
4171 location_t loc = EXPR_LOCATION (exp);
4172
4173 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4174 and see if we can refine the range. Some of the cases below may not
4175 happen, but it doesn't seem worth worrying about this. We "continue"
4176 the outer loop when we've changed something; otherwise we "break"
4177 the switch, which will "break" the while. */
4178
4179 in_p = 0;
4180 low = high = build_int_cst (TREE_TYPE (exp), 0);
4181
4182 while (1)
4183 {
4184 code = TREE_CODE (exp);
4185 exp_type = TREE_TYPE (exp);
4186 arg0 = NULL_TREE;
4187
4188 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4189 {
4190 if (TREE_OPERAND_LENGTH (exp) > 0)
4191 arg0 = TREE_OPERAND (exp, 0);
4192 if (TREE_CODE_CLASS (code) == tcc_binary
4193 || TREE_CODE_CLASS (code) == tcc_comparison
4194 || (TREE_CODE_CLASS (code) == tcc_expression
4195 && TREE_OPERAND_LENGTH (exp) > 1))
4196 arg1 = TREE_OPERAND (exp, 1);
4197 }
4198 if (arg0 == NULL_TREE)
4199 break;
4200
4201 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4202 &high, &in_p, strict_overflow_p);
4203 if (nexp == NULL_TREE)
4204 break;
4205 exp = nexp;
4206 }
4207
4208 /* If EXP is a constant, we can evaluate whether this is true or false. */
4209 if (TREE_CODE (exp) == INTEGER_CST)
4210 {
4211 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4212 exp, 0, low, 0))
4213 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4214 exp, 1, high, 1)));
4215 low = high = 0;
4216 exp = 0;
4217 }
4218
4219 *pin_p = in_p, *plow = low, *phigh = high;
4220 return exp;
4221 }
4222 \f
4223 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4224 type, TYPE, return an expression to test if EXP is in (or out of, depending
4225 on IN_P) the range. Return 0 if the test couldn't be created. */
4226
4227 tree
4228 build_range_check (location_t loc, tree type, tree exp, int in_p,
4229 tree low, tree high)
4230 {
4231 tree etype = TREE_TYPE (exp), value;
4232
4233 #ifdef HAVE_canonicalize_funcptr_for_compare
4234 /* Disable this optimization for function pointer expressions
4235 on targets that require function pointer canonicalization. */
4236 if (HAVE_canonicalize_funcptr_for_compare
4237 && TREE_CODE (etype) == POINTER_TYPE
4238 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4239 return NULL_TREE;
4240 #endif
4241
4242 if (! in_p)
4243 {
4244 value = build_range_check (loc, type, exp, 1, low, high);
4245 if (value != 0)
4246 return invert_truthvalue_loc (loc, value);
4247
4248 return 0;
4249 }
4250
4251 if (low == 0 && high == 0)
4252 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4253
4254 if (low == 0)
4255 return fold_build2_loc (loc, LE_EXPR, type, exp,
4256 fold_convert_loc (loc, etype, high));
4257
4258 if (high == 0)
4259 return fold_build2_loc (loc, GE_EXPR, type, exp,
4260 fold_convert_loc (loc, etype, low));
4261
4262 if (operand_equal_p (low, high, 0))
4263 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4264 fold_convert_loc (loc, etype, low));
4265
4266 if (integer_zerop (low))
4267 {
4268 if (! TYPE_UNSIGNED (etype))
4269 {
4270 etype = unsigned_type_for (etype);
4271 high = fold_convert_loc (loc, etype, high);
4272 exp = fold_convert_loc (loc, etype, exp);
4273 }
4274 return build_range_check (loc, type, exp, 1, 0, high);
4275 }
4276
4277 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4278 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4279 {
4280 int prec = TYPE_PRECISION (etype);
4281 wide_int osb = wi::set_bit_in_zero (prec - 1, prec) - 1;
4282
4283 if (osb == high)
4284 {
4285 if (TYPE_UNSIGNED (etype))
4286 {
4287 tree signed_etype = signed_type_for (etype);
4288 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4289 etype
4290 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4291 else
4292 etype = signed_etype;
4293 exp = fold_convert_loc (loc, etype, exp);
4294 }
4295 return fold_build2_loc (loc, GT_EXPR, type, exp,
4296 build_int_cst (etype, 0));
4297 }
4298 }
4299
4300 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4301 This requires wrap-around arithmetics for the type of the expression.
4302 First make sure that arithmetics in this type is valid, then make sure
4303 that it wraps around. */
4304 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4305 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4306 TYPE_UNSIGNED (etype));
4307
4308 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4309 {
4310 tree utype, minv, maxv;
4311
4312 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4313 for the type in question, as we rely on this here. */
4314 utype = unsigned_type_for (etype);
4315 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4316 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4317 build_int_cst (TREE_TYPE (maxv), 1), 1);
4318 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4319
4320 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4321 minv, 1, maxv, 1)))
4322 etype = utype;
4323 else
4324 return 0;
4325 }
4326
4327 high = fold_convert_loc (loc, etype, high);
4328 low = fold_convert_loc (loc, etype, low);
4329 exp = fold_convert_loc (loc, etype, exp);
4330
4331 value = const_binop (MINUS_EXPR, high, low);
4332
4333
4334 if (POINTER_TYPE_P (etype))
4335 {
4336 if (value != 0 && !TREE_OVERFLOW (value))
4337 {
4338 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4339 return build_range_check (loc, type,
4340 fold_build_pointer_plus_loc (loc, exp, low),
4341 1, build_int_cst (etype, 0), value);
4342 }
4343 return 0;
4344 }
4345
4346 if (value != 0 && !TREE_OVERFLOW (value))
4347 return build_range_check (loc, type,
4348 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4349 1, build_int_cst (etype, 0), value);
4350
4351 return 0;
4352 }
4353 \f
4354 /* Return the predecessor of VAL in its type, handling the infinite case. */
4355
4356 static tree
4357 range_predecessor (tree val)
4358 {
4359 tree type = TREE_TYPE (val);
4360
4361 if (INTEGRAL_TYPE_P (type)
4362 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4363 return 0;
4364 else
4365 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4366 build_int_cst (TREE_TYPE (val), 1), 0);
4367 }
4368
4369 /* Return the successor of VAL in its type, handling the infinite case. */
4370
4371 static tree
4372 range_successor (tree val)
4373 {
4374 tree type = TREE_TYPE (val);
4375
4376 if (INTEGRAL_TYPE_P (type)
4377 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4378 return 0;
4379 else
4380 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4381 build_int_cst (TREE_TYPE (val), 1), 0);
4382 }
4383
4384 /* Given two ranges, see if we can merge them into one. Return 1 if we
4385 can, 0 if we can't. Set the output range into the specified parameters. */
4386
4387 bool
4388 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4389 tree high0, int in1_p, tree low1, tree high1)
4390 {
4391 int no_overlap;
4392 int subset;
4393 int temp;
4394 tree tem;
4395 int in_p;
4396 tree low, high;
4397 int lowequal = ((low0 == 0 && low1 == 0)
4398 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4399 low0, 0, low1, 0)));
4400 int highequal = ((high0 == 0 && high1 == 0)
4401 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4402 high0, 1, high1, 1)));
4403
4404 /* Make range 0 be the range that starts first, or ends last if they
4405 start at the same value. Swap them if it isn't. */
4406 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4407 low0, 0, low1, 0))
4408 || (lowequal
4409 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4410 high1, 1, high0, 1))))
4411 {
4412 temp = in0_p, in0_p = in1_p, in1_p = temp;
4413 tem = low0, low0 = low1, low1 = tem;
4414 tem = high0, high0 = high1, high1 = tem;
4415 }
4416
4417 /* Now flag two cases, whether the ranges are disjoint or whether the
4418 second range is totally subsumed in the first. Note that the tests
4419 below are simplified by the ones above. */
4420 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4421 high0, 1, low1, 0));
4422 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4423 high1, 1, high0, 1));
4424
4425 /* We now have four cases, depending on whether we are including or
4426 excluding the two ranges. */
4427 if (in0_p && in1_p)
4428 {
4429 /* If they don't overlap, the result is false. If the second range
4430 is a subset it is the result. Otherwise, the range is from the start
4431 of the second to the end of the first. */
4432 if (no_overlap)
4433 in_p = 0, low = high = 0;
4434 else if (subset)
4435 in_p = 1, low = low1, high = high1;
4436 else
4437 in_p = 1, low = low1, high = high0;
4438 }
4439
4440 else if (in0_p && ! in1_p)
4441 {
4442 /* If they don't overlap, the result is the first range. If they are
4443 equal, the result is false. If the second range is a subset of the
4444 first, and the ranges begin at the same place, we go from just after
4445 the end of the second range to the end of the first. If the second
4446 range is not a subset of the first, or if it is a subset and both
4447 ranges end at the same place, the range starts at the start of the
4448 first range and ends just before the second range.
4449 Otherwise, we can't describe this as a single range. */
4450 if (no_overlap)
4451 in_p = 1, low = low0, high = high0;
4452 else if (lowequal && highequal)
4453 in_p = 0, low = high = 0;
4454 else if (subset && lowequal)
4455 {
4456 low = range_successor (high1);
4457 high = high0;
4458 in_p = 1;
4459 if (low == 0)
4460 {
4461 /* We are in the weird situation where high0 > high1 but
4462 high1 has no successor. Punt. */
4463 return 0;
4464 }
4465 }
4466 else if (! subset || highequal)
4467 {
4468 low = low0;
4469 high = range_predecessor (low1);
4470 in_p = 1;
4471 if (high == 0)
4472 {
4473 /* low0 < low1 but low1 has no predecessor. Punt. */
4474 return 0;
4475 }
4476 }
4477 else
4478 return 0;
4479 }
4480
4481 else if (! in0_p && in1_p)
4482 {
4483 /* If they don't overlap, the result is the second range. If the second
4484 is a subset of the first, the result is false. Otherwise,
4485 the range starts just after the first range and ends at the
4486 end of the second. */
4487 if (no_overlap)
4488 in_p = 1, low = low1, high = high1;
4489 else if (subset || highequal)
4490 in_p = 0, low = high = 0;
4491 else
4492 {
4493 low = range_successor (high0);
4494 high = high1;
4495 in_p = 1;
4496 if (low == 0)
4497 {
4498 /* high1 > high0 but high0 has no successor. Punt. */
4499 return 0;
4500 }
4501 }
4502 }
4503
4504 else
4505 {
4506 /* The case where we are excluding both ranges. Here the complex case
4507 is if they don't overlap. In that case, the only time we have a
4508 range is if they are adjacent. If the second is a subset of the
4509 first, the result is the first. Otherwise, the range to exclude
4510 starts at the beginning of the first range and ends at the end of the
4511 second. */
4512 if (no_overlap)
4513 {
4514 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4515 range_successor (high0),
4516 1, low1, 0)))
4517 in_p = 0, low = low0, high = high1;
4518 else
4519 {
4520 /* Canonicalize - [min, x] into - [-, x]. */
4521 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4522 switch (TREE_CODE (TREE_TYPE (low0)))
4523 {
4524 case ENUMERAL_TYPE:
4525 if (TYPE_PRECISION (TREE_TYPE (low0))
4526 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4527 break;
4528 /* FALLTHROUGH */
4529 case INTEGER_TYPE:
4530 if (tree_int_cst_equal (low0,
4531 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4532 low0 = 0;
4533 break;
4534 case POINTER_TYPE:
4535 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4536 && integer_zerop (low0))
4537 low0 = 0;
4538 break;
4539 default:
4540 break;
4541 }
4542
4543 /* Canonicalize - [x, max] into - [x, -]. */
4544 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4545 switch (TREE_CODE (TREE_TYPE (high1)))
4546 {
4547 case ENUMERAL_TYPE:
4548 if (TYPE_PRECISION (TREE_TYPE (high1))
4549 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4550 break;
4551 /* FALLTHROUGH */
4552 case INTEGER_TYPE:
4553 if (tree_int_cst_equal (high1,
4554 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4555 high1 = 0;
4556 break;
4557 case POINTER_TYPE:
4558 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4559 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4560 high1, 1,
4561 build_int_cst (TREE_TYPE (high1), 1),
4562 1)))
4563 high1 = 0;
4564 break;
4565 default:
4566 break;
4567 }
4568
4569 /* The ranges might be also adjacent between the maximum and
4570 minimum values of the given type. For
4571 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4572 return + [x + 1, y - 1]. */
4573 if (low0 == 0 && high1 == 0)
4574 {
4575 low = range_successor (high0);
4576 high = range_predecessor (low1);
4577 if (low == 0 || high == 0)
4578 return 0;
4579
4580 in_p = 1;
4581 }
4582 else
4583 return 0;
4584 }
4585 }
4586 else if (subset)
4587 in_p = 0, low = low0, high = high0;
4588 else
4589 in_p = 0, low = low0, high = high1;
4590 }
4591
4592 *pin_p = in_p, *plow = low, *phigh = high;
4593 return 1;
4594 }
4595 \f
4596
4597 /* Subroutine of fold, looking inside expressions of the form
4598 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4599 of the COND_EXPR. This function is being used also to optimize
4600 A op B ? C : A, by reversing the comparison first.
4601
4602 Return a folded expression whose code is not a COND_EXPR
4603 anymore, or NULL_TREE if no folding opportunity is found. */
4604
4605 static tree
4606 fold_cond_expr_with_comparison (location_t loc, tree type,
4607 tree arg0, tree arg1, tree arg2)
4608 {
4609 enum tree_code comp_code = TREE_CODE (arg0);
4610 tree arg00 = TREE_OPERAND (arg0, 0);
4611 tree arg01 = TREE_OPERAND (arg0, 1);
4612 tree arg1_type = TREE_TYPE (arg1);
4613 tree tem;
4614
4615 STRIP_NOPS (arg1);
4616 STRIP_NOPS (arg2);
4617
4618 /* If we have A op 0 ? A : -A, consider applying the following
4619 transformations:
4620
4621 A == 0? A : -A same as -A
4622 A != 0? A : -A same as A
4623 A >= 0? A : -A same as abs (A)
4624 A > 0? A : -A same as abs (A)
4625 A <= 0? A : -A same as -abs (A)
4626 A < 0? A : -A same as -abs (A)
4627
4628 None of these transformations work for modes with signed
4629 zeros. If A is +/-0, the first two transformations will
4630 change the sign of the result (from +0 to -0, or vice
4631 versa). The last four will fix the sign of the result,
4632 even though the original expressions could be positive or
4633 negative, depending on the sign of A.
4634
4635 Note that all these transformations are correct if A is
4636 NaN, since the two alternatives (A and -A) are also NaNs. */
4637 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4638 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4639 ? real_zerop (arg01)
4640 : integer_zerop (arg01))
4641 && ((TREE_CODE (arg2) == NEGATE_EXPR
4642 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4643 /* In the case that A is of the form X-Y, '-A' (arg2) may
4644 have already been folded to Y-X, check for that. */
4645 || (TREE_CODE (arg1) == MINUS_EXPR
4646 && TREE_CODE (arg2) == MINUS_EXPR
4647 && operand_equal_p (TREE_OPERAND (arg1, 0),
4648 TREE_OPERAND (arg2, 1), 0)
4649 && operand_equal_p (TREE_OPERAND (arg1, 1),
4650 TREE_OPERAND (arg2, 0), 0))))
4651 switch (comp_code)
4652 {
4653 case EQ_EXPR:
4654 case UNEQ_EXPR:
4655 tem = fold_convert_loc (loc, arg1_type, arg1);
4656 return pedantic_non_lvalue_loc (loc,
4657 fold_convert_loc (loc, type,
4658 negate_expr (tem)));
4659 case NE_EXPR:
4660 case LTGT_EXPR:
4661 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4662 case UNGE_EXPR:
4663 case UNGT_EXPR:
4664 if (flag_trapping_math)
4665 break;
4666 /* Fall through. */
4667 case GE_EXPR:
4668 case GT_EXPR:
4669 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4670 arg1 = fold_convert_loc (loc, signed_type_for
4671 (TREE_TYPE (arg1)), arg1);
4672 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4673 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4674 case UNLE_EXPR:
4675 case UNLT_EXPR:
4676 if (flag_trapping_math)
4677 break;
4678 case LE_EXPR:
4679 case LT_EXPR:
4680 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4681 arg1 = fold_convert_loc (loc, signed_type_for
4682 (TREE_TYPE (arg1)), arg1);
4683 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4684 return negate_expr (fold_convert_loc (loc, type, tem));
4685 default:
4686 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4687 break;
4688 }
4689
4690 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4691 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4692 both transformations are correct when A is NaN: A != 0
4693 is then true, and A == 0 is false. */
4694
4695 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4696 && integer_zerop (arg01) && integer_zerop (arg2))
4697 {
4698 if (comp_code == NE_EXPR)
4699 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4700 else if (comp_code == EQ_EXPR)
4701 return build_zero_cst (type);
4702 }
4703
4704 /* Try some transformations of A op B ? A : B.
4705
4706 A == B? A : B same as B
4707 A != B? A : B same as A
4708 A >= B? A : B same as max (A, B)
4709 A > B? A : B same as max (B, A)
4710 A <= B? A : B same as min (A, B)
4711 A < B? A : B same as min (B, A)
4712
4713 As above, these transformations don't work in the presence
4714 of signed zeros. For example, if A and B are zeros of
4715 opposite sign, the first two transformations will change
4716 the sign of the result. In the last four, the original
4717 expressions give different results for (A=+0, B=-0) and
4718 (A=-0, B=+0), but the transformed expressions do not.
4719
4720 The first two transformations are correct if either A or B
4721 is a NaN. In the first transformation, the condition will
4722 be false, and B will indeed be chosen. In the case of the
4723 second transformation, the condition A != B will be true,
4724 and A will be chosen.
4725
4726 The conversions to max() and min() are not correct if B is
4727 a number and A is not. The conditions in the original
4728 expressions will be false, so all four give B. The min()
4729 and max() versions would give a NaN instead. */
4730 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4731 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4732 /* Avoid these transformations if the COND_EXPR may be used
4733 as an lvalue in the C++ front-end. PR c++/19199. */
4734 && (in_gimple_form
4735 || VECTOR_TYPE_P (type)
4736 || (strcmp (lang_hooks.name, "GNU C++") != 0
4737 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4738 || ! maybe_lvalue_p (arg1)
4739 || ! maybe_lvalue_p (arg2)))
4740 {
4741 tree comp_op0 = arg00;
4742 tree comp_op1 = arg01;
4743 tree comp_type = TREE_TYPE (comp_op0);
4744
4745 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4746 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4747 {
4748 comp_type = type;
4749 comp_op0 = arg1;
4750 comp_op1 = arg2;
4751 }
4752
4753 switch (comp_code)
4754 {
4755 case EQ_EXPR:
4756 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4757 case NE_EXPR:
4758 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4759 case LE_EXPR:
4760 case LT_EXPR:
4761 case UNLE_EXPR:
4762 case UNLT_EXPR:
4763 /* In C++ a ?: expression can be an lvalue, so put the
4764 operand which will be used if they are equal first
4765 so that we can convert this back to the
4766 corresponding COND_EXPR. */
4767 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4768 {
4769 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4770 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4771 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4772 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4773 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4774 comp_op1, comp_op0);
4775 return pedantic_non_lvalue_loc (loc,
4776 fold_convert_loc (loc, type, tem));
4777 }
4778 break;
4779 case GE_EXPR:
4780 case GT_EXPR:
4781 case UNGE_EXPR:
4782 case UNGT_EXPR:
4783 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4784 {
4785 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4786 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4787 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4788 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4789 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4790 comp_op1, comp_op0);
4791 return pedantic_non_lvalue_loc (loc,
4792 fold_convert_loc (loc, type, tem));
4793 }
4794 break;
4795 case UNEQ_EXPR:
4796 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4797 return pedantic_non_lvalue_loc (loc,
4798 fold_convert_loc (loc, type, arg2));
4799 break;
4800 case LTGT_EXPR:
4801 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4802 return pedantic_non_lvalue_loc (loc,
4803 fold_convert_loc (loc, type, arg1));
4804 break;
4805 default:
4806 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4807 break;
4808 }
4809 }
4810
4811 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4812 we might still be able to simplify this. For example,
4813 if C1 is one less or one more than C2, this might have started
4814 out as a MIN or MAX and been transformed by this function.
4815 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4816
4817 if (INTEGRAL_TYPE_P (type)
4818 && TREE_CODE (arg01) == INTEGER_CST
4819 && TREE_CODE (arg2) == INTEGER_CST)
4820 switch (comp_code)
4821 {
4822 case EQ_EXPR:
4823 if (TREE_CODE (arg1) == INTEGER_CST)
4824 break;
4825 /* We can replace A with C1 in this case. */
4826 arg1 = fold_convert_loc (loc, type, arg01);
4827 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4828
4829 case LT_EXPR:
4830 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4831 MIN_EXPR, to preserve the signedness of the comparison. */
4832 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4833 OEP_ONLY_CONST)
4834 && operand_equal_p (arg01,
4835 const_binop (PLUS_EXPR, arg2,
4836 build_int_cst (type, 1)),
4837 OEP_ONLY_CONST))
4838 {
4839 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4840 fold_convert_loc (loc, TREE_TYPE (arg00),
4841 arg2));
4842 return pedantic_non_lvalue_loc (loc,
4843 fold_convert_loc (loc, type, tem));
4844 }
4845 break;
4846
4847 case LE_EXPR:
4848 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4849 as above. */
4850 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4851 OEP_ONLY_CONST)
4852 && operand_equal_p (arg01,
4853 const_binop (MINUS_EXPR, arg2,
4854 build_int_cst (type, 1)),
4855 OEP_ONLY_CONST))
4856 {
4857 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4858 fold_convert_loc (loc, TREE_TYPE (arg00),
4859 arg2));
4860 return pedantic_non_lvalue_loc (loc,
4861 fold_convert_loc (loc, type, tem));
4862 }
4863 break;
4864
4865 case GT_EXPR:
4866 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4867 MAX_EXPR, to preserve the signedness of the comparison. */
4868 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4869 OEP_ONLY_CONST)
4870 && operand_equal_p (arg01,
4871 const_binop (MINUS_EXPR, arg2,
4872 build_int_cst (type, 1)),
4873 OEP_ONLY_CONST))
4874 {
4875 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4876 fold_convert_loc (loc, TREE_TYPE (arg00),
4877 arg2));
4878 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4879 }
4880 break;
4881
4882 case GE_EXPR:
4883 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4884 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4885 OEP_ONLY_CONST)
4886 && operand_equal_p (arg01,
4887 const_binop (PLUS_EXPR, arg2,
4888 build_int_cst (type, 1)),
4889 OEP_ONLY_CONST))
4890 {
4891 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4892 fold_convert_loc (loc, TREE_TYPE (arg00),
4893 arg2));
4894 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4895 }
4896 break;
4897 case NE_EXPR:
4898 break;
4899 default:
4900 gcc_unreachable ();
4901 }
4902
4903 return NULL_TREE;
4904 }
4905
4906
4907 \f
4908 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4909 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4910 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4911 false) >= 2)
4912 #endif
4913
4914 /* EXP is some logical combination of boolean tests. See if we can
4915 merge it into some range test. Return the new tree if so. */
4916
4917 static tree
4918 fold_range_test (location_t loc, enum tree_code code, tree type,
4919 tree op0, tree op1)
4920 {
4921 int or_op = (code == TRUTH_ORIF_EXPR
4922 || code == TRUTH_OR_EXPR);
4923 int in0_p, in1_p, in_p;
4924 tree low0, low1, low, high0, high1, high;
4925 bool strict_overflow_p = false;
4926 tree tem, lhs, rhs;
4927 const char * const warnmsg = G_("assuming signed overflow does not occur "
4928 "when simplifying range test");
4929
4930 if (!INTEGRAL_TYPE_P (type))
4931 return 0;
4932
4933 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4934 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4935
4936 /* If this is an OR operation, invert both sides; we will invert
4937 again at the end. */
4938 if (or_op)
4939 in0_p = ! in0_p, in1_p = ! in1_p;
4940
4941 /* If both expressions are the same, if we can merge the ranges, and we
4942 can build the range test, return it or it inverted. If one of the
4943 ranges is always true or always false, consider it to be the same
4944 expression as the other. */
4945 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4946 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4947 in1_p, low1, high1)
4948 && 0 != (tem = (build_range_check (loc, type,
4949 lhs != 0 ? lhs
4950 : rhs != 0 ? rhs : integer_zero_node,
4951 in_p, low, high))))
4952 {
4953 if (strict_overflow_p)
4954 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4955 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4956 }
4957
4958 /* On machines where the branch cost is expensive, if this is a
4959 short-circuited branch and the underlying object on both sides
4960 is the same, make a non-short-circuit operation. */
4961 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4962 && lhs != 0 && rhs != 0
4963 && (code == TRUTH_ANDIF_EXPR
4964 || code == TRUTH_ORIF_EXPR)
4965 && operand_equal_p (lhs, rhs, 0))
4966 {
4967 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4968 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4969 which cases we can't do this. */
4970 if (simple_operand_p (lhs))
4971 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4972 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4973 type, op0, op1);
4974
4975 else if (!lang_hooks.decls.global_bindings_p ()
4976 && !CONTAINS_PLACEHOLDER_P (lhs))
4977 {
4978 tree common = save_expr (lhs);
4979
4980 if (0 != (lhs = build_range_check (loc, type, common,
4981 or_op ? ! in0_p : in0_p,
4982 low0, high0))
4983 && (0 != (rhs = build_range_check (loc, type, common,
4984 or_op ? ! in1_p : in1_p,
4985 low1, high1))))
4986 {
4987 if (strict_overflow_p)
4988 fold_overflow_warning (warnmsg,
4989 WARN_STRICT_OVERFLOW_COMPARISON);
4990 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4991 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4992 type, lhs, rhs);
4993 }
4994 }
4995 }
4996
4997 return 0;
4998 }
4999 \f
5000 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5001 bit value. Arrange things so the extra bits will be set to zero if and
5002 only if C is signed-extended to its full width. If MASK is nonzero,
5003 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5004
5005 static tree
5006 unextend (tree c, int p, int unsignedp, tree mask)
5007 {
5008 tree type = TREE_TYPE (c);
5009 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5010 tree temp;
5011
5012 if (p == modesize || unsignedp)
5013 return c;
5014
5015 /* We work by getting just the sign bit into the low-order bit, then
5016 into the high-order bit, then sign-extend. We then XOR that value
5017 with C. */
5018 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5019
5020 /* We must use a signed type in order to get an arithmetic right shift.
5021 However, we must also avoid introducing accidental overflows, so that
5022 a subsequent call to integer_zerop will work. Hence we must
5023 do the type conversion here. At this point, the constant is either
5024 zero or one, and the conversion to a signed type can never overflow.
5025 We could get an overflow if this conversion is done anywhere else. */
5026 if (TYPE_UNSIGNED (type))
5027 temp = fold_convert (signed_type_for (type), temp);
5028
5029 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5030 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5031 if (mask != 0)
5032 temp = const_binop (BIT_AND_EXPR, temp,
5033 fold_convert (TREE_TYPE (c), mask));
5034 /* If necessary, convert the type back to match the type of C. */
5035 if (TYPE_UNSIGNED (type))
5036 temp = fold_convert (type, temp);
5037
5038 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5039 }
5040 \f
5041 /* For an expression that has the form
5042 (A && B) || ~B
5043 or
5044 (A || B) && ~B,
5045 we can drop one of the inner expressions and simplify to
5046 A || ~B
5047 or
5048 A && ~B
5049 LOC is the location of the resulting expression. OP is the inner
5050 logical operation; the left-hand side in the examples above, while CMPOP
5051 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5052 removing a condition that guards another, as in
5053 (A != NULL && A->...) || A == NULL
5054 which we must not transform. If RHS_ONLY is true, only eliminate the
5055 right-most operand of the inner logical operation. */
5056
5057 static tree
5058 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5059 bool rhs_only)
5060 {
5061 tree type = TREE_TYPE (cmpop);
5062 enum tree_code code = TREE_CODE (cmpop);
5063 enum tree_code truthop_code = TREE_CODE (op);
5064 tree lhs = TREE_OPERAND (op, 0);
5065 tree rhs = TREE_OPERAND (op, 1);
5066 tree orig_lhs = lhs, orig_rhs = rhs;
5067 enum tree_code rhs_code = TREE_CODE (rhs);
5068 enum tree_code lhs_code = TREE_CODE (lhs);
5069 enum tree_code inv_code;
5070
5071 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5072 return NULL_TREE;
5073
5074 if (TREE_CODE_CLASS (code) != tcc_comparison)
5075 return NULL_TREE;
5076
5077 if (rhs_code == truthop_code)
5078 {
5079 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5080 if (newrhs != NULL_TREE)
5081 {
5082 rhs = newrhs;
5083 rhs_code = TREE_CODE (rhs);
5084 }
5085 }
5086 if (lhs_code == truthop_code && !rhs_only)
5087 {
5088 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5089 if (newlhs != NULL_TREE)
5090 {
5091 lhs = newlhs;
5092 lhs_code = TREE_CODE (lhs);
5093 }
5094 }
5095
5096 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5097 if (inv_code == rhs_code
5098 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5099 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5100 return lhs;
5101 if (!rhs_only && inv_code == lhs_code
5102 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5103 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5104 return rhs;
5105 if (rhs != orig_rhs || lhs != orig_lhs)
5106 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5107 lhs, rhs);
5108 return NULL_TREE;
5109 }
5110
5111 /* Find ways of folding logical expressions of LHS and RHS:
5112 Try to merge two comparisons to the same innermost item.
5113 Look for range tests like "ch >= '0' && ch <= '9'".
5114 Look for combinations of simple terms on machines with expensive branches
5115 and evaluate the RHS unconditionally.
5116
5117 For example, if we have p->a == 2 && p->b == 4 and we can make an
5118 object large enough to span both A and B, we can do this with a comparison
5119 against the object ANDed with the a mask.
5120
5121 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5122 operations to do this with one comparison.
5123
5124 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5125 function and the one above.
5126
5127 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5128 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5129
5130 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5131 two operands.
5132
5133 We return the simplified tree or 0 if no optimization is possible. */
5134
5135 static tree
5136 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5137 tree lhs, tree rhs)
5138 {
5139 /* If this is the "or" of two comparisons, we can do something if
5140 the comparisons are NE_EXPR. If this is the "and", we can do something
5141 if the comparisons are EQ_EXPR. I.e.,
5142 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5143
5144 WANTED_CODE is this operation code. For single bit fields, we can
5145 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5146 comparison for one-bit fields. */
5147
5148 enum tree_code wanted_code;
5149 enum tree_code lcode, rcode;
5150 tree ll_arg, lr_arg, rl_arg, rr_arg;
5151 tree ll_inner, lr_inner, rl_inner, rr_inner;
5152 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5153 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5154 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5155 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5156 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5157 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5158 enum machine_mode lnmode, rnmode;
5159 tree ll_mask, lr_mask, rl_mask, rr_mask;
5160 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5161 tree l_const, r_const;
5162 tree lntype, rntype, result;
5163 HOST_WIDE_INT first_bit, end_bit;
5164 int volatilep;
5165
5166 /* Start by getting the comparison codes. Fail if anything is volatile.
5167 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5168 it were surrounded with a NE_EXPR. */
5169
5170 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5171 return 0;
5172
5173 lcode = TREE_CODE (lhs);
5174 rcode = TREE_CODE (rhs);
5175
5176 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5177 {
5178 lhs = build2 (NE_EXPR, truth_type, lhs,
5179 build_int_cst (TREE_TYPE (lhs), 0));
5180 lcode = NE_EXPR;
5181 }
5182
5183 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5184 {
5185 rhs = build2 (NE_EXPR, truth_type, rhs,
5186 build_int_cst (TREE_TYPE (rhs), 0));
5187 rcode = NE_EXPR;
5188 }
5189
5190 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5191 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5192 return 0;
5193
5194 ll_arg = TREE_OPERAND (lhs, 0);
5195 lr_arg = TREE_OPERAND (lhs, 1);
5196 rl_arg = TREE_OPERAND (rhs, 0);
5197 rr_arg = TREE_OPERAND (rhs, 1);
5198
5199 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5200 if (simple_operand_p (ll_arg)
5201 && simple_operand_p (lr_arg))
5202 {
5203 if (operand_equal_p (ll_arg, rl_arg, 0)
5204 && operand_equal_p (lr_arg, rr_arg, 0))
5205 {
5206 result = combine_comparisons (loc, code, lcode, rcode,
5207 truth_type, ll_arg, lr_arg);
5208 if (result)
5209 return result;
5210 }
5211 else if (operand_equal_p (ll_arg, rr_arg, 0)
5212 && operand_equal_p (lr_arg, rl_arg, 0))
5213 {
5214 result = combine_comparisons (loc, code, lcode,
5215 swap_tree_comparison (rcode),
5216 truth_type, ll_arg, lr_arg);
5217 if (result)
5218 return result;
5219 }
5220 }
5221
5222 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5223 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5224
5225 /* If the RHS can be evaluated unconditionally and its operands are
5226 simple, it wins to evaluate the RHS unconditionally on machines
5227 with expensive branches. In this case, this isn't a comparison
5228 that can be merged. */
5229
5230 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5231 false) >= 2
5232 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5233 && simple_operand_p (rl_arg)
5234 && simple_operand_p (rr_arg))
5235 {
5236 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5237 if (code == TRUTH_OR_EXPR
5238 && lcode == NE_EXPR && integer_zerop (lr_arg)
5239 && rcode == NE_EXPR && integer_zerop (rr_arg)
5240 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5241 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5242 return build2_loc (loc, NE_EXPR, truth_type,
5243 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5244 ll_arg, rl_arg),
5245 build_int_cst (TREE_TYPE (ll_arg), 0));
5246
5247 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5248 if (code == TRUTH_AND_EXPR
5249 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5250 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5251 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5252 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5253 return build2_loc (loc, EQ_EXPR, truth_type,
5254 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5255 ll_arg, rl_arg),
5256 build_int_cst (TREE_TYPE (ll_arg), 0));
5257 }
5258
5259 /* See if the comparisons can be merged. Then get all the parameters for
5260 each side. */
5261
5262 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5263 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5264 return 0;
5265
5266 volatilep = 0;
5267 ll_inner = decode_field_reference (loc, ll_arg,
5268 &ll_bitsize, &ll_bitpos, &ll_mode,
5269 &ll_unsignedp, &volatilep, &ll_mask,
5270 &ll_and_mask);
5271 lr_inner = decode_field_reference (loc, lr_arg,
5272 &lr_bitsize, &lr_bitpos, &lr_mode,
5273 &lr_unsignedp, &volatilep, &lr_mask,
5274 &lr_and_mask);
5275 rl_inner = decode_field_reference (loc, rl_arg,
5276 &rl_bitsize, &rl_bitpos, &rl_mode,
5277 &rl_unsignedp, &volatilep, &rl_mask,
5278 &rl_and_mask);
5279 rr_inner = decode_field_reference (loc, rr_arg,
5280 &rr_bitsize, &rr_bitpos, &rr_mode,
5281 &rr_unsignedp, &volatilep, &rr_mask,
5282 &rr_and_mask);
5283
5284 /* It must be true that the inner operation on the lhs of each
5285 comparison must be the same if we are to be able to do anything.
5286 Then see if we have constants. If not, the same must be true for
5287 the rhs's. */
5288 if (volatilep || ll_inner == 0 || rl_inner == 0
5289 || ! operand_equal_p (ll_inner, rl_inner, 0))
5290 return 0;
5291
5292 if (TREE_CODE (lr_arg) == INTEGER_CST
5293 && TREE_CODE (rr_arg) == INTEGER_CST)
5294 l_const = lr_arg, r_const = rr_arg;
5295 else if (lr_inner == 0 || rr_inner == 0
5296 || ! operand_equal_p (lr_inner, rr_inner, 0))
5297 return 0;
5298 else
5299 l_const = r_const = 0;
5300
5301 /* If either comparison code is not correct for our logical operation,
5302 fail. However, we can convert a one-bit comparison against zero into
5303 the opposite comparison against that bit being set in the field. */
5304
5305 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5306 if (lcode != wanted_code)
5307 {
5308 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5309 {
5310 /* Make the left operand unsigned, since we are only interested
5311 in the value of one bit. Otherwise we are doing the wrong
5312 thing below. */
5313 ll_unsignedp = 1;
5314 l_const = ll_mask;
5315 }
5316 else
5317 return 0;
5318 }
5319
5320 /* This is analogous to the code for l_const above. */
5321 if (rcode != wanted_code)
5322 {
5323 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5324 {
5325 rl_unsignedp = 1;
5326 r_const = rl_mask;
5327 }
5328 else
5329 return 0;
5330 }
5331
5332 /* See if we can find a mode that contains both fields being compared on
5333 the left. If we can't, fail. Otherwise, update all constants and masks
5334 to be relative to a field of that size. */
5335 first_bit = MIN (ll_bitpos, rl_bitpos);
5336 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5337 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5338 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5339 volatilep);
5340 if (lnmode == VOIDmode)
5341 return 0;
5342
5343 lnbitsize = GET_MODE_BITSIZE (lnmode);
5344 lnbitpos = first_bit & ~ (lnbitsize - 1);
5345 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5346 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5347
5348 if (BYTES_BIG_ENDIAN)
5349 {
5350 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5351 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5352 }
5353
5354 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5355 size_int (xll_bitpos));
5356 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5357 size_int (xrl_bitpos));
5358
5359 if (l_const)
5360 {
5361 l_const = fold_convert_loc (loc, lntype, l_const);
5362 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5363 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5364 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5365 fold_build1_loc (loc, BIT_NOT_EXPR,
5366 lntype, ll_mask))))
5367 {
5368 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5369
5370 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5371 }
5372 }
5373 if (r_const)
5374 {
5375 r_const = fold_convert_loc (loc, lntype, r_const);
5376 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5377 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5378 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5379 fold_build1_loc (loc, BIT_NOT_EXPR,
5380 lntype, rl_mask))))
5381 {
5382 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5383
5384 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5385 }
5386 }
5387
5388 /* If the right sides are not constant, do the same for it. Also,
5389 disallow this optimization if a size or signedness mismatch occurs
5390 between the left and right sides. */
5391 if (l_const == 0)
5392 {
5393 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5394 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5395 /* Make sure the two fields on the right
5396 correspond to the left without being swapped. */
5397 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5398 return 0;
5399
5400 first_bit = MIN (lr_bitpos, rr_bitpos);
5401 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5402 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5403 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5404 volatilep);
5405 if (rnmode == VOIDmode)
5406 return 0;
5407
5408 rnbitsize = GET_MODE_BITSIZE (rnmode);
5409 rnbitpos = first_bit & ~ (rnbitsize - 1);
5410 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5411 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5412
5413 if (BYTES_BIG_ENDIAN)
5414 {
5415 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5416 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5417 }
5418
5419 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5420 rntype, lr_mask),
5421 size_int (xlr_bitpos));
5422 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5423 rntype, rr_mask),
5424 size_int (xrr_bitpos));
5425
5426 /* Make a mask that corresponds to both fields being compared.
5427 Do this for both items being compared. If the operands are the
5428 same size and the bits being compared are in the same position
5429 then we can do this by masking both and comparing the masked
5430 results. */
5431 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5432 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5433 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5434 {
5435 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5436 ll_unsignedp || rl_unsignedp);
5437 if (! all_ones_mask_p (ll_mask, lnbitsize))
5438 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5439
5440 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5441 lr_unsignedp || rr_unsignedp);
5442 if (! all_ones_mask_p (lr_mask, rnbitsize))
5443 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5444
5445 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5446 }
5447
5448 /* There is still another way we can do something: If both pairs of
5449 fields being compared are adjacent, we may be able to make a wider
5450 field containing them both.
5451
5452 Note that we still must mask the lhs/rhs expressions. Furthermore,
5453 the mask must be shifted to account for the shift done by
5454 make_bit_field_ref. */
5455 if ((ll_bitsize + ll_bitpos == rl_bitpos
5456 && lr_bitsize + lr_bitpos == rr_bitpos)
5457 || (ll_bitpos == rl_bitpos + rl_bitsize
5458 && lr_bitpos == rr_bitpos + rr_bitsize))
5459 {
5460 tree type;
5461
5462 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5463 ll_bitsize + rl_bitsize,
5464 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5465 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5466 lr_bitsize + rr_bitsize,
5467 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5468
5469 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5470 size_int (MIN (xll_bitpos, xrl_bitpos)));
5471 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5472 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5473
5474 /* Convert to the smaller type before masking out unwanted bits. */
5475 type = lntype;
5476 if (lntype != rntype)
5477 {
5478 if (lnbitsize > rnbitsize)
5479 {
5480 lhs = fold_convert_loc (loc, rntype, lhs);
5481 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5482 type = rntype;
5483 }
5484 else if (lnbitsize < rnbitsize)
5485 {
5486 rhs = fold_convert_loc (loc, lntype, rhs);
5487 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5488 type = lntype;
5489 }
5490 }
5491
5492 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5493 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5494
5495 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5496 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5497
5498 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5499 }
5500
5501 return 0;
5502 }
5503
5504 /* Handle the case of comparisons with constants. If there is something in
5505 common between the masks, those bits of the constants must be the same.
5506 If not, the condition is always false. Test for this to avoid generating
5507 incorrect code below. */
5508 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5509 if (! integer_zerop (result)
5510 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5511 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5512 {
5513 if (wanted_code == NE_EXPR)
5514 {
5515 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5516 return constant_boolean_node (true, truth_type);
5517 }
5518 else
5519 {
5520 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5521 return constant_boolean_node (false, truth_type);
5522 }
5523 }
5524
5525 /* Construct the expression we will return. First get the component
5526 reference we will make. Unless the mask is all ones the width of
5527 that field, perform the mask operation. Then compare with the
5528 merged constant. */
5529 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5530 ll_unsignedp || rl_unsignedp);
5531
5532 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5533 if (! all_ones_mask_p (ll_mask, lnbitsize))
5534 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5535
5536 return build2_loc (loc, wanted_code, truth_type, result,
5537 const_binop (BIT_IOR_EXPR, l_const, r_const));
5538 }
5539 \f
5540 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5541 constant. */
5542
5543 static tree
5544 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5545 tree op0, tree op1)
5546 {
5547 tree arg0 = op0;
5548 enum tree_code op_code;
5549 tree comp_const;
5550 tree minmax_const;
5551 int consts_equal, consts_lt;
5552 tree inner;
5553
5554 STRIP_SIGN_NOPS (arg0);
5555
5556 op_code = TREE_CODE (arg0);
5557 minmax_const = TREE_OPERAND (arg0, 1);
5558 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5559 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5560 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5561 inner = TREE_OPERAND (arg0, 0);
5562
5563 /* If something does not permit us to optimize, return the original tree. */
5564 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5565 || TREE_CODE (comp_const) != INTEGER_CST
5566 || TREE_OVERFLOW (comp_const)
5567 || TREE_CODE (minmax_const) != INTEGER_CST
5568 || TREE_OVERFLOW (minmax_const))
5569 return NULL_TREE;
5570
5571 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5572 and GT_EXPR, doing the rest with recursive calls using logical
5573 simplifications. */
5574 switch (code)
5575 {
5576 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5577 {
5578 tree tem
5579 = optimize_minmax_comparison (loc,
5580 invert_tree_comparison (code, false),
5581 type, op0, op1);
5582 if (tem)
5583 return invert_truthvalue_loc (loc, tem);
5584 return NULL_TREE;
5585 }
5586
5587 case GE_EXPR:
5588 return
5589 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5590 optimize_minmax_comparison
5591 (loc, EQ_EXPR, type, arg0, comp_const),
5592 optimize_minmax_comparison
5593 (loc, GT_EXPR, type, arg0, comp_const));
5594
5595 case EQ_EXPR:
5596 if (op_code == MAX_EXPR && consts_equal)
5597 /* MAX (X, 0) == 0 -> X <= 0 */
5598 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5599
5600 else if (op_code == MAX_EXPR && consts_lt)
5601 /* MAX (X, 0) == 5 -> X == 5 */
5602 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5603
5604 else if (op_code == MAX_EXPR)
5605 /* MAX (X, 0) == -1 -> false */
5606 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5607
5608 else if (consts_equal)
5609 /* MIN (X, 0) == 0 -> X >= 0 */
5610 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5611
5612 else if (consts_lt)
5613 /* MIN (X, 0) == 5 -> false */
5614 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5615
5616 else
5617 /* MIN (X, 0) == -1 -> X == -1 */
5618 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5619
5620 case GT_EXPR:
5621 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5622 /* MAX (X, 0) > 0 -> X > 0
5623 MAX (X, 0) > 5 -> X > 5 */
5624 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5625
5626 else if (op_code == MAX_EXPR)
5627 /* MAX (X, 0) > -1 -> true */
5628 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5629
5630 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5631 /* MIN (X, 0) > 0 -> false
5632 MIN (X, 0) > 5 -> false */
5633 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5634
5635 else
5636 /* MIN (X, 0) > -1 -> X > -1 */
5637 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5638
5639 default:
5640 return NULL_TREE;
5641 }
5642 }
5643 \f
5644 /* T is an integer expression that is being multiplied, divided, or taken a
5645 modulus (CODE says which and what kind of divide or modulus) by a
5646 constant C. See if we can eliminate that operation by folding it with
5647 other operations already in T. WIDE_TYPE, if non-null, is a type that
5648 should be used for the computation if wider than our type.
5649
5650 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5651 (X * 2) + (Y * 4). We must, however, be assured that either the original
5652 expression would not overflow or that overflow is undefined for the type
5653 in the language in question.
5654
5655 If we return a non-null expression, it is an equivalent form of the
5656 original computation, but need not be in the original type.
5657
5658 We set *STRICT_OVERFLOW_P to true if the return values depends on
5659 signed overflow being undefined. Otherwise we do not change
5660 *STRICT_OVERFLOW_P. */
5661
5662 static tree
5663 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5664 bool *strict_overflow_p)
5665 {
5666 /* To avoid exponential search depth, refuse to allow recursion past
5667 three levels. Beyond that (1) it's highly unlikely that we'll find
5668 something interesting and (2) we've probably processed it before
5669 when we built the inner expression. */
5670
5671 static int depth;
5672 tree ret;
5673
5674 if (depth > 3)
5675 return NULL;
5676
5677 depth++;
5678 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5679 depth--;
5680
5681 return ret;
5682 }
5683
5684 static tree
5685 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5686 bool *strict_overflow_p)
5687 {
5688 tree type = TREE_TYPE (t);
5689 enum tree_code tcode = TREE_CODE (t);
5690 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5691 > GET_MODE_SIZE (TYPE_MODE (type)))
5692 ? wide_type : type);
5693 tree t1, t2;
5694 int same_p = tcode == code;
5695 tree op0 = NULL_TREE, op1 = NULL_TREE;
5696 bool sub_strict_overflow_p;
5697
5698 /* Don't deal with constants of zero here; they confuse the code below. */
5699 if (integer_zerop (c))
5700 return NULL_TREE;
5701
5702 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5703 op0 = TREE_OPERAND (t, 0);
5704
5705 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5706 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5707
5708 /* Note that we need not handle conditional operations here since fold
5709 already handles those cases. So just do arithmetic here. */
5710 switch (tcode)
5711 {
5712 case INTEGER_CST:
5713 /* For a constant, we can always simplify if we are a multiply
5714 or (for divide and modulus) if it is a multiple of our constant. */
5715 if (code == MULT_EXPR
5716 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5717 return const_binop (code, fold_convert (ctype, t),
5718 fold_convert (ctype, c));
5719 break;
5720
5721 CASE_CONVERT: case NON_LVALUE_EXPR:
5722 /* If op0 is an expression ... */
5723 if ((COMPARISON_CLASS_P (op0)
5724 || UNARY_CLASS_P (op0)
5725 || BINARY_CLASS_P (op0)
5726 || VL_EXP_CLASS_P (op0)
5727 || EXPRESSION_CLASS_P (op0))
5728 /* ... and has wrapping overflow, and its type is smaller
5729 than ctype, then we cannot pass through as widening. */
5730 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5731 && (TYPE_PRECISION (ctype)
5732 > TYPE_PRECISION (TREE_TYPE (op0))))
5733 /* ... or this is a truncation (t is narrower than op0),
5734 then we cannot pass through this narrowing. */
5735 || (TYPE_PRECISION (type)
5736 < TYPE_PRECISION (TREE_TYPE (op0)))
5737 /* ... or signedness changes for division or modulus,
5738 then we cannot pass through this conversion. */
5739 || (code != MULT_EXPR
5740 && (TYPE_UNSIGNED (ctype)
5741 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5742 /* ... or has undefined overflow while the converted to
5743 type has not, we cannot do the operation in the inner type
5744 as that would introduce undefined overflow. */
5745 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5746 && !TYPE_OVERFLOW_UNDEFINED (type))))
5747 break;
5748
5749 /* Pass the constant down and see if we can make a simplification. If
5750 we can, replace this expression with the inner simplification for
5751 possible later conversion to our or some other type. */
5752 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5753 && TREE_CODE (t2) == INTEGER_CST
5754 && !TREE_OVERFLOW (t2)
5755 && (0 != (t1 = extract_muldiv (op0, t2, code,
5756 code == MULT_EXPR
5757 ? ctype : NULL_TREE,
5758 strict_overflow_p))))
5759 return t1;
5760 break;
5761
5762 case ABS_EXPR:
5763 /* If widening the type changes it from signed to unsigned, then we
5764 must avoid building ABS_EXPR itself as unsigned. */
5765 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5766 {
5767 tree cstype = (*signed_type_for) (ctype);
5768 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5769 != 0)
5770 {
5771 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5772 return fold_convert (ctype, t1);
5773 }
5774 break;
5775 }
5776 /* If the constant is negative, we cannot simplify this. */
5777 if (tree_int_cst_sgn (c) == -1)
5778 break;
5779 /* FALLTHROUGH */
5780 case NEGATE_EXPR:
5781 /* For division and modulus, type can't be unsigned, as e.g.
5782 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5783 For signed types, even with wrapping overflow, this is fine. */
5784 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5785 break;
5786 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5787 != 0)
5788 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5789 break;
5790
5791 case MIN_EXPR: case MAX_EXPR:
5792 /* If widening the type changes the signedness, then we can't perform
5793 this optimization as that changes the result. */
5794 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5795 break;
5796
5797 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5798 sub_strict_overflow_p = false;
5799 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5800 &sub_strict_overflow_p)) != 0
5801 && (t2 = extract_muldiv (op1, c, code, wide_type,
5802 &sub_strict_overflow_p)) != 0)
5803 {
5804 if (tree_int_cst_sgn (c) < 0)
5805 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5806 if (sub_strict_overflow_p)
5807 *strict_overflow_p = true;
5808 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5809 fold_convert (ctype, t2));
5810 }
5811 break;
5812
5813 case LSHIFT_EXPR: case RSHIFT_EXPR:
5814 /* If the second operand is constant, this is a multiplication
5815 or floor division, by a power of two, so we can treat it that
5816 way unless the multiplier or divisor overflows. Signed
5817 left-shift overflow is implementation-defined rather than
5818 undefined in C90, so do not convert signed left shift into
5819 multiplication. */
5820 if (TREE_CODE (op1) == INTEGER_CST
5821 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5822 /* const_binop may not detect overflow correctly,
5823 so check for it explicitly here. */
5824 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5825 && 0 != (t1 = fold_convert (ctype,
5826 const_binop (LSHIFT_EXPR,
5827 size_one_node,
5828 op1)))
5829 && !TREE_OVERFLOW (t1))
5830 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5831 ? MULT_EXPR : FLOOR_DIV_EXPR,
5832 ctype,
5833 fold_convert (ctype, op0),
5834 t1),
5835 c, code, wide_type, strict_overflow_p);
5836 break;
5837
5838 case PLUS_EXPR: case MINUS_EXPR:
5839 /* See if we can eliminate the operation on both sides. If we can, we
5840 can return a new PLUS or MINUS. If we can't, the only remaining
5841 cases where we can do anything are if the second operand is a
5842 constant. */
5843 sub_strict_overflow_p = false;
5844 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5845 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5846 if (t1 != 0 && t2 != 0
5847 && (code == MULT_EXPR
5848 /* If not multiplication, we can only do this if both operands
5849 are divisible by c. */
5850 || (multiple_of_p (ctype, op0, c)
5851 && multiple_of_p (ctype, op1, c))))
5852 {
5853 if (sub_strict_overflow_p)
5854 *strict_overflow_p = true;
5855 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5856 fold_convert (ctype, t2));
5857 }
5858
5859 /* If this was a subtraction, negate OP1 and set it to be an addition.
5860 This simplifies the logic below. */
5861 if (tcode == MINUS_EXPR)
5862 {
5863 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5864 /* If OP1 was not easily negatable, the constant may be OP0. */
5865 if (TREE_CODE (op0) == INTEGER_CST)
5866 {
5867 tree tem = op0;
5868 op0 = op1;
5869 op1 = tem;
5870 tem = t1;
5871 t1 = t2;
5872 t2 = tem;
5873 }
5874 }
5875
5876 if (TREE_CODE (op1) != INTEGER_CST)
5877 break;
5878
5879 /* If either OP1 or C are negative, this optimization is not safe for
5880 some of the division and remainder types while for others we need
5881 to change the code. */
5882 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5883 {
5884 if (code == CEIL_DIV_EXPR)
5885 code = FLOOR_DIV_EXPR;
5886 else if (code == FLOOR_DIV_EXPR)
5887 code = CEIL_DIV_EXPR;
5888 else if (code != MULT_EXPR
5889 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5890 break;
5891 }
5892
5893 /* If it's a multiply or a division/modulus operation of a multiple
5894 of our constant, do the operation and verify it doesn't overflow. */
5895 if (code == MULT_EXPR
5896 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5897 {
5898 op1 = const_binop (code, fold_convert (ctype, op1),
5899 fold_convert (ctype, c));
5900 /* We allow the constant to overflow with wrapping semantics. */
5901 if (op1 == 0
5902 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5903 break;
5904 }
5905 else
5906 break;
5907
5908 /* If we have an unsigned type, we cannot widen the operation since it
5909 will change the result if the original computation overflowed. */
5910 if (TYPE_UNSIGNED (ctype) && ctype != type)
5911 break;
5912
5913 /* If we were able to eliminate our operation from the first side,
5914 apply our operation to the second side and reform the PLUS. */
5915 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5916 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5917
5918 /* The last case is if we are a multiply. In that case, we can
5919 apply the distributive law to commute the multiply and addition
5920 if the multiplication of the constants doesn't overflow
5921 and overflow is defined. With undefined overflow
5922 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5923 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5924 return fold_build2 (tcode, ctype,
5925 fold_build2 (code, ctype,
5926 fold_convert (ctype, op0),
5927 fold_convert (ctype, c)),
5928 op1);
5929
5930 break;
5931
5932 case MULT_EXPR:
5933 /* We have a special case here if we are doing something like
5934 (C * 8) % 4 since we know that's zero. */
5935 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5936 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5937 /* If the multiplication can overflow we cannot optimize this. */
5938 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5939 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5940 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5941 {
5942 *strict_overflow_p = true;
5943 return omit_one_operand (type, integer_zero_node, op0);
5944 }
5945
5946 /* ... fall through ... */
5947
5948 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5949 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5950 /* If we can extract our operation from the LHS, do so and return a
5951 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5952 do something only if the second operand is a constant. */
5953 if (same_p
5954 && (t1 = extract_muldiv (op0, c, code, wide_type,
5955 strict_overflow_p)) != 0)
5956 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5957 fold_convert (ctype, op1));
5958 else if (tcode == MULT_EXPR && code == MULT_EXPR
5959 && (t1 = extract_muldiv (op1, c, code, wide_type,
5960 strict_overflow_p)) != 0)
5961 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5962 fold_convert (ctype, t1));
5963 else if (TREE_CODE (op1) != INTEGER_CST)
5964 return 0;
5965
5966 /* If these are the same operation types, we can associate them
5967 assuming no overflow. */
5968 if (tcode == code)
5969 {
5970 bool overflow_p = false;
5971 bool overflow_mul_p;
5972 signop sign = TYPE_SIGN (ctype);
5973 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5974 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5975 if (overflow_mul_p
5976 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5977 overflow_p = true;
5978 if (!overflow_p)
5979 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5980 wide_int_to_tree (ctype, mul));
5981 }
5982
5983 /* If these operations "cancel" each other, we have the main
5984 optimizations of this pass, which occur when either constant is a
5985 multiple of the other, in which case we replace this with either an
5986 operation or CODE or TCODE.
5987
5988 If we have an unsigned type, we cannot do this since it will change
5989 the result if the original computation overflowed. */
5990 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5991 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5992 || (tcode == MULT_EXPR
5993 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5994 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5995 && code != MULT_EXPR)))
5996 {
5997 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5998 {
5999 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6000 *strict_overflow_p = true;
6001 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6002 fold_convert (ctype,
6003 const_binop (TRUNC_DIV_EXPR,
6004 op1, c)));
6005 }
6006 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6007 {
6008 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6009 *strict_overflow_p = true;
6010 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6011 fold_convert (ctype,
6012 const_binop (TRUNC_DIV_EXPR,
6013 c, op1)));
6014 }
6015 }
6016 break;
6017
6018 default:
6019 break;
6020 }
6021
6022 return 0;
6023 }
6024 \f
6025 /* Return a node which has the indicated constant VALUE (either 0 or
6026 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6027 and is of the indicated TYPE. */
6028
6029 tree
6030 constant_boolean_node (bool value, tree type)
6031 {
6032 if (type == integer_type_node)
6033 return value ? integer_one_node : integer_zero_node;
6034 else if (type == boolean_type_node)
6035 return value ? boolean_true_node : boolean_false_node;
6036 else if (TREE_CODE (type) == VECTOR_TYPE)
6037 return build_vector_from_val (type,
6038 build_int_cst (TREE_TYPE (type),
6039 value ? -1 : 0));
6040 else
6041 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6042 }
6043
6044
6045 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6046 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6047 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6048 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6049 COND is the first argument to CODE; otherwise (as in the example
6050 given here), it is the second argument. TYPE is the type of the
6051 original expression. Return NULL_TREE if no simplification is
6052 possible. */
6053
6054 static tree
6055 fold_binary_op_with_conditional_arg (location_t loc,
6056 enum tree_code code,
6057 tree type, tree op0, tree op1,
6058 tree cond, tree arg, int cond_first_p)
6059 {
6060 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6061 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6062 tree test, true_value, false_value;
6063 tree lhs = NULL_TREE;
6064 tree rhs = NULL_TREE;
6065 enum tree_code cond_code = COND_EXPR;
6066
6067 if (TREE_CODE (cond) == COND_EXPR
6068 || TREE_CODE (cond) == VEC_COND_EXPR)
6069 {
6070 test = TREE_OPERAND (cond, 0);
6071 true_value = TREE_OPERAND (cond, 1);
6072 false_value = TREE_OPERAND (cond, 2);
6073 /* If this operand throws an expression, then it does not make
6074 sense to try to perform a logical or arithmetic operation
6075 involving it. */
6076 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6077 lhs = true_value;
6078 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6079 rhs = false_value;
6080 }
6081 else
6082 {
6083 tree testtype = TREE_TYPE (cond);
6084 test = cond;
6085 true_value = constant_boolean_node (true, testtype);
6086 false_value = constant_boolean_node (false, testtype);
6087 }
6088
6089 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6090 cond_code = VEC_COND_EXPR;
6091
6092 /* This transformation is only worthwhile if we don't have to wrap ARG
6093 in a SAVE_EXPR and the operation can be simplified without recursing
6094 on at least one of the branches once its pushed inside the COND_EXPR. */
6095 if (!TREE_CONSTANT (arg)
6096 && (TREE_SIDE_EFFECTS (arg)
6097 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6098 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6099 return NULL_TREE;
6100
6101 arg = fold_convert_loc (loc, arg_type, arg);
6102 if (lhs == 0)
6103 {
6104 true_value = fold_convert_loc (loc, cond_type, true_value);
6105 if (cond_first_p)
6106 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6107 else
6108 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6109 }
6110 if (rhs == 0)
6111 {
6112 false_value = fold_convert_loc (loc, cond_type, false_value);
6113 if (cond_first_p)
6114 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6115 else
6116 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6117 }
6118
6119 /* Check that we have simplified at least one of the branches. */
6120 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6121 return NULL_TREE;
6122
6123 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6124 }
6125
6126 \f
6127 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6128
6129 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6130 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6131 ADDEND is the same as X.
6132
6133 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6134 and finite. The problematic cases are when X is zero, and its mode
6135 has signed zeros. In the case of rounding towards -infinity,
6136 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6137 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6138
6139 bool
6140 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6141 {
6142 if (!real_zerop (addend))
6143 return false;
6144
6145 /* Don't allow the fold with -fsignaling-nans. */
6146 if (HONOR_SNANS (TYPE_MODE (type)))
6147 return false;
6148
6149 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6150 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6151 return true;
6152
6153 /* In a vector or complex, we would need to check the sign of all zeros. */
6154 if (TREE_CODE (addend) != REAL_CST)
6155 return false;
6156
6157 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6158 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6159 negate = !negate;
6160
6161 /* The mode has signed zeros, and we have to honor their sign.
6162 In this situation, there is only one case we can return true for.
6163 X - 0 is the same as X unless rounding towards -infinity is
6164 supported. */
6165 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6166 }
6167
6168 /* Subroutine of fold() that checks comparisons of built-in math
6169 functions against real constants.
6170
6171 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6172 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6173 is the type of the result and ARG0 and ARG1 are the operands of the
6174 comparison. ARG1 must be a TREE_REAL_CST.
6175
6176 The function returns the constant folded tree if a simplification
6177 can be made, and NULL_TREE otherwise. */
6178
6179 static tree
6180 fold_mathfn_compare (location_t loc,
6181 enum built_in_function fcode, enum tree_code code,
6182 tree type, tree arg0, tree arg1)
6183 {
6184 REAL_VALUE_TYPE c;
6185
6186 if (BUILTIN_SQRT_P (fcode))
6187 {
6188 tree arg = CALL_EXPR_ARG (arg0, 0);
6189 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6190
6191 c = TREE_REAL_CST (arg1);
6192 if (REAL_VALUE_NEGATIVE (c))
6193 {
6194 /* sqrt(x) < y is always false, if y is negative. */
6195 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6196 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6197
6198 /* sqrt(x) > y is always true, if y is negative and we
6199 don't care about NaNs, i.e. negative values of x. */
6200 if (code == NE_EXPR || !HONOR_NANS (mode))
6201 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6202
6203 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6204 return fold_build2_loc (loc, GE_EXPR, type, arg,
6205 build_real (TREE_TYPE (arg), dconst0));
6206 }
6207 else if (code == GT_EXPR || code == GE_EXPR)
6208 {
6209 REAL_VALUE_TYPE c2;
6210
6211 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6212 real_convert (&c2, mode, &c2);
6213
6214 if (REAL_VALUE_ISINF (c2))
6215 {
6216 /* sqrt(x) > y is x == +Inf, when y is very large. */
6217 if (HONOR_INFINITIES (mode))
6218 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6219 build_real (TREE_TYPE (arg), c2));
6220
6221 /* sqrt(x) > y is always false, when y is very large
6222 and we don't care about infinities. */
6223 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6224 }
6225
6226 /* sqrt(x) > c is the same as x > c*c. */
6227 return fold_build2_loc (loc, code, type, arg,
6228 build_real (TREE_TYPE (arg), c2));
6229 }
6230 else if (code == LT_EXPR || code == LE_EXPR)
6231 {
6232 REAL_VALUE_TYPE c2;
6233
6234 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6235 real_convert (&c2, mode, &c2);
6236
6237 if (REAL_VALUE_ISINF (c2))
6238 {
6239 /* sqrt(x) < y is always true, when y is a very large
6240 value and we don't care about NaNs or Infinities. */
6241 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6242 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6243
6244 /* sqrt(x) < y is x != +Inf when y is very large and we
6245 don't care about NaNs. */
6246 if (! HONOR_NANS (mode))
6247 return fold_build2_loc (loc, NE_EXPR, type, arg,
6248 build_real (TREE_TYPE (arg), c2));
6249
6250 /* sqrt(x) < y is x >= 0 when y is very large and we
6251 don't care about Infinities. */
6252 if (! HONOR_INFINITIES (mode))
6253 return fold_build2_loc (loc, GE_EXPR, type, arg,
6254 build_real (TREE_TYPE (arg), dconst0));
6255
6256 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6257 arg = save_expr (arg);
6258 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6259 fold_build2_loc (loc, GE_EXPR, type, arg,
6260 build_real (TREE_TYPE (arg),
6261 dconst0)),
6262 fold_build2_loc (loc, NE_EXPR, type, arg,
6263 build_real (TREE_TYPE (arg),
6264 c2)));
6265 }
6266
6267 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6268 if (! HONOR_NANS (mode))
6269 return fold_build2_loc (loc, code, type, arg,
6270 build_real (TREE_TYPE (arg), c2));
6271
6272 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6273 arg = save_expr (arg);
6274 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6275 fold_build2_loc (loc, GE_EXPR, type, arg,
6276 build_real (TREE_TYPE (arg),
6277 dconst0)),
6278 fold_build2_loc (loc, code, type, arg,
6279 build_real (TREE_TYPE (arg),
6280 c2)));
6281 }
6282 }
6283
6284 return NULL_TREE;
6285 }
6286
6287 /* Subroutine of fold() that optimizes comparisons against Infinities,
6288 either +Inf or -Inf.
6289
6290 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6291 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6292 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6293
6294 The function returns the constant folded tree if a simplification
6295 can be made, and NULL_TREE otherwise. */
6296
6297 static tree
6298 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6299 tree arg0, tree arg1)
6300 {
6301 enum machine_mode mode;
6302 REAL_VALUE_TYPE max;
6303 tree temp;
6304 bool neg;
6305
6306 mode = TYPE_MODE (TREE_TYPE (arg0));
6307
6308 /* For negative infinity swap the sense of the comparison. */
6309 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6310 if (neg)
6311 code = swap_tree_comparison (code);
6312
6313 switch (code)
6314 {
6315 case GT_EXPR:
6316 /* x > +Inf is always false, if with ignore sNANs. */
6317 if (HONOR_SNANS (mode))
6318 return NULL_TREE;
6319 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6320
6321 case LE_EXPR:
6322 /* x <= +Inf is always true, if we don't case about NaNs. */
6323 if (! HONOR_NANS (mode))
6324 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6325
6326 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6327 arg0 = save_expr (arg0);
6328 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6329
6330 case EQ_EXPR:
6331 case GE_EXPR:
6332 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6333 real_maxval (&max, neg, mode);
6334 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6335 arg0, build_real (TREE_TYPE (arg0), max));
6336
6337 case LT_EXPR:
6338 /* x < +Inf is always equal to x <= DBL_MAX. */
6339 real_maxval (&max, neg, mode);
6340 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6341 arg0, build_real (TREE_TYPE (arg0), max));
6342
6343 case NE_EXPR:
6344 /* x != +Inf is always equal to !(x > DBL_MAX). */
6345 real_maxval (&max, neg, mode);
6346 if (! HONOR_NANS (mode))
6347 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6348 arg0, build_real (TREE_TYPE (arg0), max));
6349
6350 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6351 arg0, build_real (TREE_TYPE (arg0), max));
6352 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6353
6354 default:
6355 break;
6356 }
6357
6358 return NULL_TREE;
6359 }
6360
6361 /* Subroutine of fold() that optimizes comparisons of a division by
6362 a nonzero integer constant against an integer constant, i.e.
6363 X/C1 op C2.
6364
6365 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6366 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6367 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6368
6369 The function returns the constant folded tree if a simplification
6370 can be made, and NULL_TREE otherwise. */
6371
6372 static tree
6373 fold_div_compare (location_t loc,
6374 enum tree_code code, tree type, tree arg0, tree arg1)
6375 {
6376 tree prod, tmp, hi, lo;
6377 tree arg00 = TREE_OPERAND (arg0, 0);
6378 tree arg01 = TREE_OPERAND (arg0, 1);
6379 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6380 bool neg_overflow = false;
6381 bool overflow;
6382
6383 /* We have to do this the hard way to detect unsigned overflow.
6384 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6385 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6386 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6387 neg_overflow = false;
6388
6389 if (sign == UNSIGNED)
6390 {
6391 tmp = int_const_binop (MINUS_EXPR, arg01,
6392 build_int_cst (TREE_TYPE (arg01), 1));
6393 lo = prod;
6394
6395 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6396 val = wi::add (prod, tmp, sign, &overflow);
6397 hi = force_fit_type (TREE_TYPE (arg00), val,
6398 -1, overflow | TREE_OVERFLOW (prod));
6399 }
6400 else if (tree_int_cst_sgn (arg01) >= 0)
6401 {
6402 tmp = int_const_binop (MINUS_EXPR, arg01,
6403 build_int_cst (TREE_TYPE (arg01), 1));
6404 switch (tree_int_cst_sgn (arg1))
6405 {
6406 case -1:
6407 neg_overflow = true;
6408 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6409 hi = prod;
6410 break;
6411
6412 case 0:
6413 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6414 hi = tmp;
6415 break;
6416
6417 case 1:
6418 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6419 lo = prod;
6420 break;
6421
6422 default:
6423 gcc_unreachable ();
6424 }
6425 }
6426 else
6427 {
6428 /* A negative divisor reverses the relational operators. */
6429 code = swap_tree_comparison (code);
6430
6431 tmp = int_const_binop (PLUS_EXPR, arg01,
6432 build_int_cst (TREE_TYPE (arg01), 1));
6433 switch (tree_int_cst_sgn (arg1))
6434 {
6435 case -1:
6436 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6437 lo = prod;
6438 break;
6439
6440 case 0:
6441 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6442 lo = tmp;
6443 break;
6444
6445 case 1:
6446 neg_overflow = true;
6447 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6448 hi = prod;
6449 break;
6450
6451 default:
6452 gcc_unreachable ();
6453 }
6454 }
6455
6456 switch (code)
6457 {
6458 case EQ_EXPR:
6459 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6460 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6461 if (TREE_OVERFLOW (hi))
6462 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6463 if (TREE_OVERFLOW (lo))
6464 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6465 return build_range_check (loc, type, arg00, 1, lo, hi);
6466
6467 case NE_EXPR:
6468 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6469 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6470 if (TREE_OVERFLOW (hi))
6471 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6472 if (TREE_OVERFLOW (lo))
6473 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6474 return build_range_check (loc, type, arg00, 0, lo, hi);
6475
6476 case LT_EXPR:
6477 if (TREE_OVERFLOW (lo))
6478 {
6479 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6480 return omit_one_operand_loc (loc, type, tmp, arg00);
6481 }
6482 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6483
6484 case LE_EXPR:
6485 if (TREE_OVERFLOW (hi))
6486 {
6487 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6488 return omit_one_operand_loc (loc, type, tmp, arg00);
6489 }
6490 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6491
6492 case GT_EXPR:
6493 if (TREE_OVERFLOW (hi))
6494 {
6495 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6496 return omit_one_operand_loc (loc, type, tmp, arg00);
6497 }
6498 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6499
6500 case GE_EXPR:
6501 if (TREE_OVERFLOW (lo))
6502 {
6503 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6504 return omit_one_operand_loc (loc, type, tmp, arg00);
6505 }
6506 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6507
6508 default:
6509 break;
6510 }
6511
6512 return NULL_TREE;
6513 }
6514
6515
6516 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6517 equality/inequality test, then return a simplified form of the test
6518 using a sign testing. Otherwise return NULL. TYPE is the desired
6519 result type. */
6520
6521 static tree
6522 fold_single_bit_test_into_sign_test (location_t loc,
6523 enum tree_code code, tree arg0, tree arg1,
6524 tree result_type)
6525 {
6526 /* If this is testing a single bit, we can optimize the test. */
6527 if ((code == NE_EXPR || code == EQ_EXPR)
6528 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6529 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6530 {
6531 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6532 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6533 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6534
6535 if (arg00 != NULL_TREE
6536 /* This is only a win if casting to a signed type is cheap,
6537 i.e. when arg00's type is not a partial mode. */
6538 && TYPE_PRECISION (TREE_TYPE (arg00))
6539 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6540 {
6541 tree stype = signed_type_for (TREE_TYPE (arg00));
6542 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6543 result_type,
6544 fold_convert_loc (loc, stype, arg00),
6545 build_int_cst (stype, 0));
6546 }
6547 }
6548
6549 return NULL_TREE;
6550 }
6551
6552 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6553 equality/inequality test, then return a simplified form of
6554 the test using shifts and logical operations. Otherwise return
6555 NULL. TYPE is the desired result type. */
6556
6557 tree
6558 fold_single_bit_test (location_t loc, enum tree_code code,
6559 tree arg0, tree arg1, tree result_type)
6560 {
6561 /* If this is testing a single bit, we can optimize the test. */
6562 if ((code == NE_EXPR || code == EQ_EXPR)
6563 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6564 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6565 {
6566 tree inner = TREE_OPERAND (arg0, 0);
6567 tree type = TREE_TYPE (arg0);
6568 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6569 enum machine_mode operand_mode = TYPE_MODE (type);
6570 int ops_unsigned;
6571 tree signed_type, unsigned_type, intermediate_type;
6572 tree tem, one;
6573
6574 /* First, see if we can fold the single bit test into a sign-bit
6575 test. */
6576 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6577 result_type);
6578 if (tem)
6579 return tem;
6580
6581 /* Otherwise we have (A & C) != 0 where C is a single bit,
6582 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6583 Similarly for (A & C) == 0. */
6584
6585 /* If INNER is a right shift of a constant and it plus BITNUM does
6586 not overflow, adjust BITNUM and INNER. */
6587 if (TREE_CODE (inner) == RSHIFT_EXPR
6588 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6589 && wi::ltu_p (wi::to_widest (TREE_OPERAND (inner, 1)) + bitnum,
6590 TYPE_PRECISION (type)))
6591 {
6592 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6593 inner = TREE_OPERAND (inner, 0);
6594 }
6595
6596 /* If we are going to be able to omit the AND below, we must do our
6597 operations as unsigned. If we must use the AND, we have a choice.
6598 Normally unsigned is faster, but for some machines signed is. */
6599 #ifdef LOAD_EXTEND_OP
6600 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6601 && !flag_syntax_only) ? 0 : 1;
6602 #else
6603 ops_unsigned = 1;
6604 #endif
6605
6606 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6607 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6608 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6609 inner = fold_convert_loc (loc, intermediate_type, inner);
6610
6611 if (bitnum != 0)
6612 inner = build2 (RSHIFT_EXPR, intermediate_type,
6613 inner, size_int (bitnum));
6614
6615 one = build_int_cst (intermediate_type, 1);
6616
6617 if (code == EQ_EXPR)
6618 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6619
6620 /* Put the AND last so it can combine with more things. */
6621 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6622
6623 /* Make sure to return the proper type. */
6624 inner = fold_convert_loc (loc, result_type, inner);
6625
6626 return inner;
6627 }
6628 return NULL_TREE;
6629 }
6630
6631 /* Check whether we are allowed to reorder operands arg0 and arg1,
6632 such that the evaluation of arg1 occurs before arg0. */
6633
6634 static bool
6635 reorder_operands_p (const_tree arg0, const_tree arg1)
6636 {
6637 if (! flag_evaluation_order)
6638 return true;
6639 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6640 return true;
6641 return ! TREE_SIDE_EFFECTS (arg0)
6642 && ! TREE_SIDE_EFFECTS (arg1);
6643 }
6644
6645 /* Test whether it is preferable two swap two operands, ARG0 and
6646 ARG1, for example because ARG0 is an integer constant and ARG1
6647 isn't. If REORDER is true, only recommend swapping if we can
6648 evaluate the operands in reverse order. */
6649
6650 bool
6651 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6652 {
6653 STRIP_SIGN_NOPS (arg0);
6654 STRIP_SIGN_NOPS (arg1);
6655
6656 if (TREE_CODE (arg1) == INTEGER_CST)
6657 return 0;
6658 if (TREE_CODE (arg0) == INTEGER_CST)
6659 return 1;
6660
6661 if (TREE_CODE (arg1) == REAL_CST)
6662 return 0;
6663 if (TREE_CODE (arg0) == REAL_CST)
6664 return 1;
6665
6666 if (TREE_CODE (arg1) == FIXED_CST)
6667 return 0;
6668 if (TREE_CODE (arg0) == FIXED_CST)
6669 return 1;
6670
6671 if (TREE_CODE (arg1) == COMPLEX_CST)
6672 return 0;
6673 if (TREE_CODE (arg0) == COMPLEX_CST)
6674 return 1;
6675
6676 if (TREE_CONSTANT (arg1))
6677 return 0;
6678 if (TREE_CONSTANT (arg0))
6679 return 1;
6680
6681 if (optimize_function_for_size_p (cfun))
6682 return 0;
6683
6684 if (reorder && flag_evaluation_order
6685 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6686 return 0;
6687
6688 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6689 for commutative and comparison operators. Ensuring a canonical
6690 form allows the optimizers to find additional redundancies without
6691 having to explicitly check for both orderings. */
6692 if (TREE_CODE (arg0) == SSA_NAME
6693 && TREE_CODE (arg1) == SSA_NAME
6694 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6695 return 1;
6696
6697 /* Put SSA_NAMEs last. */
6698 if (TREE_CODE (arg1) == SSA_NAME)
6699 return 0;
6700 if (TREE_CODE (arg0) == SSA_NAME)
6701 return 1;
6702
6703 /* Put variables last. */
6704 if (DECL_P (arg1))
6705 return 0;
6706 if (DECL_P (arg0))
6707 return 1;
6708
6709 return 0;
6710 }
6711
6712 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6713 ARG0 is extended to a wider type. */
6714
6715 static tree
6716 fold_widened_comparison (location_t loc, enum tree_code code,
6717 tree type, tree arg0, tree arg1)
6718 {
6719 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6720 tree arg1_unw;
6721 tree shorter_type, outer_type;
6722 tree min, max;
6723 bool above, below;
6724
6725 if (arg0_unw == arg0)
6726 return NULL_TREE;
6727 shorter_type = TREE_TYPE (arg0_unw);
6728
6729 #ifdef HAVE_canonicalize_funcptr_for_compare
6730 /* Disable this optimization if we're casting a function pointer
6731 type on targets that require function pointer canonicalization. */
6732 if (HAVE_canonicalize_funcptr_for_compare
6733 && TREE_CODE (shorter_type) == POINTER_TYPE
6734 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6735 return NULL_TREE;
6736 #endif
6737
6738 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6739 return NULL_TREE;
6740
6741 arg1_unw = get_unwidened (arg1, NULL_TREE);
6742
6743 /* If possible, express the comparison in the shorter mode. */
6744 if ((code == EQ_EXPR || code == NE_EXPR
6745 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6746 && (TREE_TYPE (arg1_unw) == shorter_type
6747 || ((TYPE_PRECISION (shorter_type)
6748 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6749 && (TYPE_UNSIGNED (shorter_type)
6750 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6751 || (TREE_CODE (arg1_unw) == INTEGER_CST
6752 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6753 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6754 && int_fits_type_p (arg1_unw, shorter_type))))
6755 return fold_build2_loc (loc, code, type, arg0_unw,
6756 fold_convert_loc (loc, shorter_type, arg1_unw));
6757
6758 if (TREE_CODE (arg1_unw) != INTEGER_CST
6759 || TREE_CODE (shorter_type) != INTEGER_TYPE
6760 || !int_fits_type_p (arg1_unw, shorter_type))
6761 return NULL_TREE;
6762
6763 /* If we are comparing with the integer that does not fit into the range
6764 of the shorter type, the result is known. */
6765 outer_type = TREE_TYPE (arg1_unw);
6766 min = lower_bound_in_type (outer_type, shorter_type);
6767 max = upper_bound_in_type (outer_type, shorter_type);
6768
6769 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6770 max, arg1_unw));
6771 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6772 arg1_unw, min));
6773
6774 switch (code)
6775 {
6776 case EQ_EXPR:
6777 if (above || below)
6778 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6779 break;
6780
6781 case NE_EXPR:
6782 if (above || below)
6783 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6784 break;
6785
6786 case LT_EXPR:
6787 case LE_EXPR:
6788 if (above)
6789 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6790 else if (below)
6791 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6792
6793 case GT_EXPR:
6794 case GE_EXPR:
6795 if (above)
6796 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6797 else if (below)
6798 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6799
6800 default:
6801 break;
6802 }
6803
6804 return NULL_TREE;
6805 }
6806
6807 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6808 ARG0 just the signedness is changed. */
6809
6810 static tree
6811 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6812 tree arg0, tree arg1)
6813 {
6814 tree arg0_inner;
6815 tree inner_type, outer_type;
6816
6817 if (!CONVERT_EXPR_P (arg0))
6818 return NULL_TREE;
6819
6820 outer_type = TREE_TYPE (arg0);
6821 arg0_inner = TREE_OPERAND (arg0, 0);
6822 inner_type = TREE_TYPE (arg0_inner);
6823
6824 #ifdef HAVE_canonicalize_funcptr_for_compare
6825 /* Disable this optimization if we're casting a function pointer
6826 type on targets that require function pointer canonicalization. */
6827 if (HAVE_canonicalize_funcptr_for_compare
6828 && TREE_CODE (inner_type) == POINTER_TYPE
6829 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6830 return NULL_TREE;
6831 #endif
6832
6833 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6834 return NULL_TREE;
6835
6836 if (TREE_CODE (arg1) != INTEGER_CST
6837 && !(CONVERT_EXPR_P (arg1)
6838 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6839 return NULL_TREE;
6840
6841 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6842 && code != NE_EXPR
6843 && code != EQ_EXPR)
6844 return NULL_TREE;
6845
6846 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6847 return NULL_TREE;
6848
6849 if (TREE_CODE (arg1) == INTEGER_CST)
6850 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6851 TREE_OVERFLOW (arg1));
6852 else
6853 arg1 = fold_convert_loc (loc, inner_type, arg1);
6854
6855 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6856 }
6857
6858 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6859 step of the array. Reconstructs s and delta in the case of s *
6860 delta being an integer constant (and thus already folded). ADDR is
6861 the address. MULT is the multiplicative expression. If the
6862 function succeeds, the new address expression is returned.
6863 Otherwise NULL_TREE is returned. LOC is the location of the
6864 resulting expression. */
6865
6866 static tree
6867 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6868 {
6869 tree s, delta, step;
6870 tree ref = TREE_OPERAND (addr, 0), pref;
6871 tree ret, pos;
6872 tree itype;
6873 bool mdim = false;
6874
6875 /* Strip the nops that might be added when converting op1 to sizetype. */
6876 STRIP_NOPS (op1);
6877
6878 /* Canonicalize op1 into a possibly non-constant delta
6879 and an INTEGER_CST s. */
6880 if (TREE_CODE (op1) == MULT_EXPR)
6881 {
6882 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6883
6884 STRIP_NOPS (arg0);
6885 STRIP_NOPS (arg1);
6886
6887 if (TREE_CODE (arg0) == INTEGER_CST)
6888 {
6889 s = arg0;
6890 delta = arg1;
6891 }
6892 else if (TREE_CODE (arg1) == INTEGER_CST)
6893 {
6894 s = arg1;
6895 delta = arg0;
6896 }
6897 else
6898 return NULL_TREE;
6899 }
6900 else if (TREE_CODE (op1) == INTEGER_CST)
6901 {
6902 delta = op1;
6903 s = NULL_TREE;
6904 }
6905 else
6906 {
6907 /* Simulate we are delta * 1. */
6908 delta = op1;
6909 s = integer_one_node;
6910 }
6911
6912 /* Handle &x.array the same as we would handle &x.array[0]. */
6913 if (TREE_CODE (ref) == COMPONENT_REF
6914 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6915 {
6916 tree domain;
6917
6918 /* Remember if this was a multi-dimensional array. */
6919 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6920 mdim = true;
6921
6922 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6923 if (! domain)
6924 goto cont;
6925 itype = TREE_TYPE (domain);
6926
6927 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6928 if (TREE_CODE (step) != INTEGER_CST)
6929 goto cont;
6930
6931 if (s)
6932 {
6933 if (! tree_int_cst_equal (step, s))
6934 goto cont;
6935 }
6936 else
6937 {
6938 /* Try if delta is a multiple of step. */
6939 tree tmp = div_if_zero_remainder (op1, step);
6940 if (! tmp)
6941 goto cont;
6942 delta = tmp;
6943 }
6944
6945 /* Only fold here if we can verify we do not overflow one
6946 dimension of a multi-dimensional array. */
6947 if (mdim)
6948 {
6949 tree tmp;
6950
6951 if (!TYPE_MIN_VALUE (domain)
6952 || !TYPE_MAX_VALUE (domain)
6953 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6954 goto cont;
6955
6956 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6957 fold_convert_loc (loc, itype,
6958 TYPE_MIN_VALUE (domain)),
6959 fold_convert_loc (loc, itype, delta));
6960 if (TREE_CODE (tmp) != INTEGER_CST
6961 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6962 goto cont;
6963 }
6964
6965 /* We found a suitable component reference. */
6966
6967 pref = TREE_OPERAND (addr, 0);
6968 ret = copy_node (pref);
6969 SET_EXPR_LOCATION (ret, loc);
6970
6971 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6972 fold_build2_loc
6973 (loc, PLUS_EXPR, itype,
6974 fold_convert_loc (loc, itype,
6975 TYPE_MIN_VALUE
6976 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6977 fold_convert_loc (loc, itype, delta)),
6978 NULL_TREE, NULL_TREE);
6979 return build_fold_addr_expr_loc (loc, ret);
6980 }
6981
6982 cont:
6983
6984 for (;; ref = TREE_OPERAND (ref, 0))
6985 {
6986 if (TREE_CODE (ref) == ARRAY_REF)
6987 {
6988 tree domain;
6989
6990 /* Remember if this was a multi-dimensional array. */
6991 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6992 mdim = true;
6993
6994 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6995 if (! domain)
6996 continue;
6997 itype = TREE_TYPE (domain);
6998
6999 step = array_ref_element_size (ref);
7000 if (TREE_CODE (step) != INTEGER_CST)
7001 continue;
7002
7003 if (s)
7004 {
7005 if (! tree_int_cst_equal (step, s))
7006 continue;
7007 }
7008 else
7009 {
7010 /* Try if delta is a multiple of step. */
7011 tree tmp = div_if_zero_remainder (op1, step);
7012 if (! tmp)
7013 continue;
7014 delta = tmp;
7015 }
7016
7017 /* Only fold here if we can verify we do not overflow one
7018 dimension of a multi-dimensional array. */
7019 if (mdim)
7020 {
7021 tree tmp;
7022
7023 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7024 || !TYPE_MAX_VALUE (domain)
7025 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7026 continue;
7027
7028 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7029 fold_convert_loc (loc, itype,
7030 TREE_OPERAND (ref, 1)),
7031 fold_convert_loc (loc, itype, delta));
7032 if (!tmp
7033 || TREE_CODE (tmp) != INTEGER_CST
7034 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7035 continue;
7036 }
7037
7038 break;
7039 }
7040 else
7041 mdim = false;
7042
7043 if (!handled_component_p (ref))
7044 return NULL_TREE;
7045 }
7046
7047 /* We found the suitable array reference. So copy everything up to it,
7048 and replace the index. */
7049
7050 pref = TREE_OPERAND (addr, 0);
7051 ret = copy_node (pref);
7052 SET_EXPR_LOCATION (ret, loc);
7053 pos = ret;
7054
7055 while (pref != ref)
7056 {
7057 pref = TREE_OPERAND (pref, 0);
7058 TREE_OPERAND (pos, 0) = copy_node (pref);
7059 pos = TREE_OPERAND (pos, 0);
7060 }
7061
7062 TREE_OPERAND (pos, 1)
7063 = fold_build2_loc (loc, PLUS_EXPR, itype,
7064 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7065 fold_convert_loc (loc, itype, delta));
7066 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7067 }
7068
7069
7070 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7071 means A >= Y && A != MAX, but in this case we know that
7072 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7073
7074 static tree
7075 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7076 {
7077 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7078
7079 if (TREE_CODE (bound) == LT_EXPR)
7080 a = TREE_OPERAND (bound, 0);
7081 else if (TREE_CODE (bound) == GT_EXPR)
7082 a = TREE_OPERAND (bound, 1);
7083 else
7084 return NULL_TREE;
7085
7086 typea = TREE_TYPE (a);
7087 if (!INTEGRAL_TYPE_P (typea)
7088 && !POINTER_TYPE_P (typea))
7089 return NULL_TREE;
7090
7091 if (TREE_CODE (ineq) == LT_EXPR)
7092 {
7093 a1 = TREE_OPERAND (ineq, 1);
7094 y = TREE_OPERAND (ineq, 0);
7095 }
7096 else if (TREE_CODE (ineq) == GT_EXPR)
7097 {
7098 a1 = TREE_OPERAND (ineq, 0);
7099 y = TREE_OPERAND (ineq, 1);
7100 }
7101 else
7102 return NULL_TREE;
7103
7104 if (TREE_TYPE (a1) != typea)
7105 return NULL_TREE;
7106
7107 if (POINTER_TYPE_P (typea))
7108 {
7109 /* Convert the pointer types into integer before taking the difference. */
7110 tree ta = fold_convert_loc (loc, ssizetype, a);
7111 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7112 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7113 }
7114 else
7115 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7116
7117 if (!diff || !integer_onep (diff))
7118 return NULL_TREE;
7119
7120 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7121 }
7122
7123 /* Fold a sum or difference of at least one multiplication.
7124 Returns the folded tree or NULL if no simplification could be made. */
7125
7126 static tree
7127 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7128 tree arg0, tree arg1)
7129 {
7130 tree arg00, arg01, arg10, arg11;
7131 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7132
7133 /* (A * C) +- (B * C) -> (A+-B) * C.
7134 (A * C) +- A -> A * (C+-1).
7135 We are most concerned about the case where C is a constant,
7136 but other combinations show up during loop reduction. Since
7137 it is not difficult, try all four possibilities. */
7138
7139 if (TREE_CODE (arg0) == MULT_EXPR)
7140 {
7141 arg00 = TREE_OPERAND (arg0, 0);
7142 arg01 = TREE_OPERAND (arg0, 1);
7143 }
7144 else if (TREE_CODE (arg0) == INTEGER_CST)
7145 {
7146 arg00 = build_one_cst (type);
7147 arg01 = arg0;
7148 }
7149 else
7150 {
7151 /* We cannot generate constant 1 for fract. */
7152 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7153 return NULL_TREE;
7154 arg00 = arg0;
7155 arg01 = build_one_cst (type);
7156 }
7157 if (TREE_CODE (arg1) == MULT_EXPR)
7158 {
7159 arg10 = TREE_OPERAND (arg1, 0);
7160 arg11 = TREE_OPERAND (arg1, 1);
7161 }
7162 else if (TREE_CODE (arg1) == INTEGER_CST)
7163 {
7164 arg10 = build_one_cst (type);
7165 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7166 the purpose of this canonicalization. */
7167 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7168 && negate_expr_p (arg1)
7169 && code == PLUS_EXPR)
7170 {
7171 arg11 = negate_expr (arg1);
7172 code = MINUS_EXPR;
7173 }
7174 else
7175 arg11 = arg1;
7176 }
7177 else
7178 {
7179 /* We cannot generate constant 1 for fract. */
7180 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7181 return NULL_TREE;
7182 arg10 = arg1;
7183 arg11 = build_one_cst (type);
7184 }
7185 same = NULL_TREE;
7186
7187 if (operand_equal_p (arg01, arg11, 0))
7188 same = arg01, alt0 = arg00, alt1 = arg10;
7189 else if (operand_equal_p (arg00, arg10, 0))
7190 same = arg00, alt0 = arg01, alt1 = arg11;
7191 else if (operand_equal_p (arg00, arg11, 0))
7192 same = arg00, alt0 = arg01, alt1 = arg10;
7193 else if (operand_equal_p (arg01, arg10, 0))
7194 same = arg01, alt0 = arg00, alt1 = arg11;
7195
7196 /* No identical multiplicands; see if we can find a common
7197 power-of-two factor in non-power-of-two multiplies. This
7198 can help in multi-dimensional array access. */
7199 else if (tree_fits_shwi_p (arg01)
7200 && tree_fits_shwi_p (arg11))
7201 {
7202 HOST_WIDE_INT int01, int11, tmp;
7203 bool swap = false;
7204 tree maybe_same;
7205 int01 = tree_to_shwi (arg01);
7206 int11 = tree_to_shwi (arg11);
7207
7208 /* Move min of absolute values to int11. */
7209 if (absu_hwi (int01) < absu_hwi (int11))
7210 {
7211 tmp = int01, int01 = int11, int11 = tmp;
7212 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7213 maybe_same = arg01;
7214 swap = true;
7215 }
7216 else
7217 maybe_same = arg11;
7218
7219 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7220 /* The remainder should not be a constant, otherwise we
7221 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7222 increased the number of multiplications necessary. */
7223 && TREE_CODE (arg10) != INTEGER_CST)
7224 {
7225 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7226 build_int_cst (TREE_TYPE (arg00),
7227 int01 / int11));
7228 alt1 = arg10;
7229 same = maybe_same;
7230 if (swap)
7231 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7232 }
7233 }
7234
7235 if (same)
7236 return fold_build2_loc (loc, MULT_EXPR, type,
7237 fold_build2_loc (loc, code, type,
7238 fold_convert_loc (loc, type, alt0),
7239 fold_convert_loc (loc, type, alt1)),
7240 fold_convert_loc (loc, type, same));
7241
7242 return NULL_TREE;
7243 }
7244
7245 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7246 specified by EXPR into the buffer PTR of length LEN bytes.
7247 Return the number of bytes placed in the buffer, or zero
7248 upon failure. */
7249
7250 static int
7251 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7252 {
7253 tree type = TREE_TYPE (expr);
7254 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7255 int byte, offset, word, words;
7256 unsigned char value;
7257
7258 if (total_bytes > len)
7259 return 0;
7260 words = total_bytes / UNITS_PER_WORD;
7261
7262 for (byte = 0; byte < total_bytes; byte++)
7263 {
7264 int bitpos = byte * BITS_PER_UNIT;
7265 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7266 number of bytes. */
7267 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7268
7269 if (total_bytes > UNITS_PER_WORD)
7270 {
7271 word = byte / UNITS_PER_WORD;
7272 if (WORDS_BIG_ENDIAN)
7273 word = (words - 1) - word;
7274 offset = word * UNITS_PER_WORD;
7275 if (BYTES_BIG_ENDIAN)
7276 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7277 else
7278 offset += byte % UNITS_PER_WORD;
7279 }
7280 else
7281 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7282 ptr[offset] = value;
7283 }
7284 return total_bytes;
7285 }
7286
7287
7288 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7289 specified by EXPR into the buffer PTR of length LEN bytes.
7290 Return the number of bytes placed in the buffer, or zero
7291 upon failure. */
7292
7293 static int
7294 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7295 {
7296 tree type = TREE_TYPE (expr);
7297 enum machine_mode mode = TYPE_MODE (type);
7298 int total_bytes = GET_MODE_SIZE (mode);
7299 FIXED_VALUE_TYPE value;
7300 tree i_value, i_type;
7301
7302 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7303 return 0;
7304
7305 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7306
7307 if (NULL_TREE == i_type
7308 || TYPE_PRECISION (i_type) != total_bytes)
7309 return 0;
7310
7311 value = TREE_FIXED_CST (expr);
7312 i_value = double_int_to_tree (i_type, value.data);
7313
7314 return native_encode_int (i_value, ptr, len);
7315 }
7316
7317
7318 /* Subroutine of native_encode_expr. Encode the REAL_CST
7319 specified by EXPR into the buffer PTR of length LEN bytes.
7320 Return the number of bytes placed in the buffer, or zero
7321 upon failure. */
7322
7323 static int
7324 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7325 {
7326 tree type = TREE_TYPE (expr);
7327 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7328 int byte, offset, word, words, bitpos;
7329 unsigned char value;
7330
7331 /* There are always 32 bits in each long, no matter the size of
7332 the hosts long. We handle floating point representations with
7333 up to 192 bits. */
7334 long tmp[6];
7335
7336 if (total_bytes > len)
7337 return 0;
7338 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7339
7340 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7341
7342 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7343 bitpos += BITS_PER_UNIT)
7344 {
7345 byte = (bitpos / BITS_PER_UNIT) & 3;
7346 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7347
7348 if (UNITS_PER_WORD < 4)
7349 {
7350 word = byte / UNITS_PER_WORD;
7351 if (WORDS_BIG_ENDIAN)
7352 word = (words - 1) - word;
7353 offset = word * UNITS_PER_WORD;
7354 if (BYTES_BIG_ENDIAN)
7355 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7356 else
7357 offset += byte % UNITS_PER_WORD;
7358 }
7359 else
7360 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7361 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7362 }
7363 return total_bytes;
7364 }
7365
7366 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7367 specified by EXPR into the buffer PTR of length LEN bytes.
7368 Return the number of bytes placed in the buffer, or zero
7369 upon failure. */
7370
7371 static int
7372 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7373 {
7374 int rsize, isize;
7375 tree part;
7376
7377 part = TREE_REALPART (expr);
7378 rsize = native_encode_expr (part, ptr, len);
7379 if (rsize == 0)
7380 return 0;
7381 part = TREE_IMAGPART (expr);
7382 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7383 if (isize != rsize)
7384 return 0;
7385 return rsize + isize;
7386 }
7387
7388
7389 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7390 specified by EXPR into the buffer PTR of length LEN bytes.
7391 Return the number of bytes placed in the buffer, or zero
7392 upon failure. */
7393
7394 static int
7395 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7396 {
7397 unsigned i, count;
7398 int size, offset;
7399 tree itype, elem;
7400
7401 offset = 0;
7402 count = VECTOR_CST_NELTS (expr);
7403 itype = TREE_TYPE (TREE_TYPE (expr));
7404 size = GET_MODE_SIZE (TYPE_MODE (itype));
7405 for (i = 0; i < count; i++)
7406 {
7407 elem = VECTOR_CST_ELT (expr, i);
7408 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7409 return 0;
7410 offset += size;
7411 }
7412 return offset;
7413 }
7414
7415
7416 /* Subroutine of native_encode_expr. Encode the STRING_CST
7417 specified by EXPR into the buffer PTR of length LEN bytes.
7418 Return the number of bytes placed in the buffer, or zero
7419 upon failure. */
7420
7421 static int
7422 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7423 {
7424 tree type = TREE_TYPE (expr);
7425 HOST_WIDE_INT total_bytes;
7426
7427 if (TREE_CODE (type) != ARRAY_TYPE
7428 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7429 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7430 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7431 return 0;
7432 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7433 if (total_bytes > len)
7434 return 0;
7435 if (TREE_STRING_LENGTH (expr) < total_bytes)
7436 {
7437 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7438 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7439 total_bytes - TREE_STRING_LENGTH (expr));
7440 }
7441 else
7442 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7443 return total_bytes;
7444 }
7445
7446
7447 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7448 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7449 buffer PTR of length LEN bytes. Return the number of bytes
7450 placed in the buffer, or zero upon failure. */
7451
7452 int
7453 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7454 {
7455 switch (TREE_CODE (expr))
7456 {
7457 case INTEGER_CST:
7458 return native_encode_int (expr, ptr, len);
7459
7460 case REAL_CST:
7461 return native_encode_real (expr, ptr, len);
7462
7463 case FIXED_CST:
7464 return native_encode_fixed (expr, ptr, len);
7465
7466 case COMPLEX_CST:
7467 return native_encode_complex (expr, ptr, len);
7468
7469 case VECTOR_CST:
7470 return native_encode_vector (expr, ptr, len);
7471
7472 case STRING_CST:
7473 return native_encode_string (expr, ptr, len);
7474
7475 default:
7476 return 0;
7477 }
7478 }
7479
7480
7481 /* Subroutine of native_interpret_expr. Interpret the contents of
7482 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7483 If the buffer cannot be interpreted, return NULL_TREE. */
7484
7485 static tree
7486 native_interpret_int (tree type, const unsigned char *ptr, int len)
7487 {
7488 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7489
7490 if (total_bytes > len
7491 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7492 return NULL_TREE;
7493
7494 wide_int result = wi::from_buffer (ptr, total_bytes);
7495
7496 return wide_int_to_tree (type, result);
7497 }
7498
7499
7500 /* Subroutine of native_interpret_expr. Interpret the contents of
7501 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7502 If the buffer cannot be interpreted, return NULL_TREE. */
7503
7504 static tree
7505 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7506 {
7507 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7508 double_int result;
7509 FIXED_VALUE_TYPE fixed_value;
7510
7511 if (total_bytes > len
7512 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7513 return NULL_TREE;
7514
7515 result = double_int::from_buffer (ptr, total_bytes);
7516 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7517
7518 return build_fixed (type, fixed_value);
7519 }
7520
7521
7522 /* Subroutine of native_interpret_expr. Interpret the contents of
7523 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7524 If the buffer cannot be interpreted, return NULL_TREE. */
7525
7526 static tree
7527 native_interpret_real (tree type, const unsigned char *ptr, int len)
7528 {
7529 enum machine_mode mode = TYPE_MODE (type);
7530 int total_bytes = GET_MODE_SIZE (mode);
7531 int byte, offset, word, words, bitpos;
7532 unsigned char value;
7533 /* There are always 32 bits in each long, no matter the size of
7534 the hosts long. We handle floating point representations with
7535 up to 192 bits. */
7536 REAL_VALUE_TYPE r;
7537 long tmp[6];
7538
7539 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7540 if (total_bytes > len || total_bytes > 24)
7541 return NULL_TREE;
7542 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7543
7544 memset (tmp, 0, sizeof (tmp));
7545 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7546 bitpos += BITS_PER_UNIT)
7547 {
7548 byte = (bitpos / BITS_PER_UNIT) & 3;
7549 if (UNITS_PER_WORD < 4)
7550 {
7551 word = byte / UNITS_PER_WORD;
7552 if (WORDS_BIG_ENDIAN)
7553 word = (words - 1) - word;
7554 offset = word * UNITS_PER_WORD;
7555 if (BYTES_BIG_ENDIAN)
7556 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7557 else
7558 offset += byte % UNITS_PER_WORD;
7559 }
7560 else
7561 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7562 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7563
7564 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7565 }
7566
7567 real_from_target (&r, tmp, mode);
7568 return build_real (type, r);
7569 }
7570
7571
7572 /* Subroutine of native_interpret_expr. Interpret the contents of
7573 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7574 If the buffer cannot be interpreted, return NULL_TREE. */
7575
7576 static tree
7577 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7578 {
7579 tree etype, rpart, ipart;
7580 int size;
7581
7582 etype = TREE_TYPE (type);
7583 size = GET_MODE_SIZE (TYPE_MODE (etype));
7584 if (size * 2 > len)
7585 return NULL_TREE;
7586 rpart = native_interpret_expr (etype, ptr, size);
7587 if (!rpart)
7588 return NULL_TREE;
7589 ipart = native_interpret_expr (etype, ptr+size, size);
7590 if (!ipart)
7591 return NULL_TREE;
7592 return build_complex (type, rpart, ipart);
7593 }
7594
7595
7596 /* Subroutine of native_interpret_expr. Interpret the contents of
7597 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7598 If the buffer cannot be interpreted, return NULL_TREE. */
7599
7600 static tree
7601 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7602 {
7603 tree etype, elem;
7604 int i, size, count;
7605 tree *elements;
7606
7607 etype = TREE_TYPE (type);
7608 size = GET_MODE_SIZE (TYPE_MODE (etype));
7609 count = TYPE_VECTOR_SUBPARTS (type);
7610 if (size * count > len)
7611 return NULL_TREE;
7612
7613 elements = XALLOCAVEC (tree, count);
7614 for (i = count - 1; i >= 0; i--)
7615 {
7616 elem = native_interpret_expr (etype, ptr+(i*size), size);
7617 if (!elem)
7618 return NULL_TREE;
7619 elements[i] = elem;
7620 }
7621 return build_vector (type, elements);
7622 }
7623
7624
7625 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7626 the buffer PTR of length LEN as a constant of type TYPE. For
7627 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7628 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7629 return NULL_TREE. */
7630
7631 tree
7632 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7633 {
7634 switch (TREE_CODE (type))
7635 {
7636 case INTEGER_TYPE:
7637 case ENUMERAL_TYPE:
7638 case BOOLEAN_TYPE:
7639 case POINTER_TYPE:
7640 case REFERENCE_TYPE:
7641 return native_interpret_int (type, ptr, len);
7642
7643 case REAL_TYPE:
7644 return native_interpret_real (type, ptr, len);
7645
7646 case FIXED_POINT_TYPE:
7647 return native_interpret_fixed (type, ptr, len);
7648
7649 case COMPLEX_TYPE:
7650 return native_interpret_complex (type, ptr, len);
7651
7652 case VECTOR_TYPE:
7653 return native_interpret_vector (type, ptr, len);
7654
7655 default:
7656 return NULL_TREE;
7657 }
7658 }
7659
7660 /* Returns true if we can interpret the contents of a native encoding
7661 as TYPE. */
7662
7663 static bool
7664 can_native_interpret_type_p (tree type)
7665 {
7666 switch (TREE_CODE (type))
7667 {
7668 case INTEGER_TYPE:
7669 case ENUMERAL_TYPE:
7670 case BOOLEAN_TYPE:
7671 case POINTER_TYPE:
7672 case REFERENCE_TYPE:
7673 case FIXED_POINT_TYPE:
7674 case REAL_TYPE:
7675 case COMPLEX_TYPE:
7676 case VECTOR_TYPE:
7677 return true;
7678 default:
7679 return false;
7680 }
7681 }
7682
7683 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7684 TYPE at compile-time. If we're unable to perform the conversion
7685 return NULL_TREE. */
7686
7687 static tree
7688 fold_view_convert_expr (tree type, tree expr)
7689 {
7690 /* We support up to 512-bit values (for V8DFmode). */
7691 unsigned char buffer[64];
7692 int len;
7693
7694 /* Check that the host and target are sane. */
7695 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7696 return NULL_TREE;
7697
7698 len = native_encode_expr (expr, buffer, sizeof (buffer));
7699 if (len == 0)
7700 return NULL_TREE;
7701
7702 return native_interpret_expr (type, buffer, len);
7703 }
7704
7705 /* Build an expression for the address of T. Folds away INDIRECT_REF
7706 to avoid confusing the gimplify process. */
7707
7708 tree
7709 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7710 {
7711 /* The size of the object is not relevant when talking about its address. */
7712 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7713 t = TREE_OPERAND (t, 0);
7714
7715 if (TREE_CODE (t) == INDIRECT_REF)
7716 {
7717 t = TREE_OPERAND (t, 0);
7718
7719 if (TREE_TYPE (t) != ptrtype)
7720 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7721 }
7722 else if (TREE_CODE (t) == MEM_REF
7723 && integer_zerop (TREE_OPERAND (t, 1)))
7724 return TREE_OPERAND (t, 0);
7725 else if (TREE_CODE (t) == MEM_REF
7726 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7727 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7728 TREE_OPERAND (t, 0),
7729 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7730 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7731 {
7732 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7733
7734 if (TREE_TYPE (t) != ptrtype)
7735 t = fold_convert_loc (loc, ptrtype, t);
7736 }
7737 else
7738 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7739
7740 return t;
7741 }
7742
7743 /* Build an expression for the address of T. */
7744
7745 tree
7746 build_fold_addr_expr_loc (location_t loc, tree t)
7747 {
7748 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7749
7750 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7751 }
7752
7753 static bool vec_cst_ctor_to_array (tree, tree *);
7754
7755 /* Fold a unary expression of code CODE and type TYPE with operand
7756 OP0. Return the folded expression if folding is successful.
7757 Otherwise, return NULL_TREE. */
7758
7759 tree
7760 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7761 {
7762 tree tem;
7763 tree arg0;
7764 enum tree_code_class kind = TREE_CODE_CLASS (code);
7765
7766 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7767 && TREE_CODE_LENGTH (code) == 1);
7768
7769 arg0 = op0;
7770 if (arg0)
7771 {
7772 if (CONVERT_EXPR_CODE_P (code)
7773 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7774 {
7775 /* Don't use STRIP_NOPS, because signedness of argument type
7776 matters. */
7777 STRIP_SIGN_NOPS (arg0);
7778 }
7779 else
7780 {
7781 /* Strip any conversions that don't change the mode. This
7782 is safe for every expression, except for a comparison
7783 expression because its signedness is derived from its
7784 operands.
7785
7786 Note that this is done as an internal manipulation within
7787 the constant folder, in order to find the simplest
7788 representation of the arguments so that their form can be
7789 studied. In any cases, the appropriate type conversions
7790 should be put back in the tree that will get out of the
7791 constant folder. */
7792 STRIP_NOPS (arg0);
7793 }
7794 }
7795
7796 if (TREE_CODE_CLASS (code) == tcc_unary)
7797 {
7798 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7799 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7800 fold_build1_loc (loc, code, type,
7801 fold_convert_loc (loc, TREE_TYPE (op0),
7802 TREE_OPERAND (arg0, 1))));
7803 else if (TREE_CODE (arg0) == COND_EXPR)
7804 {
7805 tree arg01 = TREE_OPERAND (arg0, 1);
7806 tree arg02 = TREE_OPERAND (arg0, 2);
7807 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7808 arg01 = fold_build1_loc (loc, code, type,
7809 fold_convert_loc (loc,
7810 TREE_TYPE (op0), arg01));
7811 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7812 arg02 = fold_build1_loc (loc, code, type,
7813 fold_convert_loc (loc,
7814 TREE_TYPE (op0), arg02));
7815 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7816 arg01, arg02);
7817
7818 /* If this was a conversion, and all we did was to move into
7819 inside the COND_EXPR, bring it back out. But leave it if
7820 it is a conversion from integer to integer and the
7821 result precision is no wider than a word since such a
7822 conversion is cheap and may be optimized away by combine,
7823 while it couldn't if it were outside the COND_EXPR. Then return
7824 so we don't get into an infinite recursion loop taking the
7825 conversion out and then back in. */
7826
7827 if ((CONVERT_EXPR_CODE_P (code)
7828 || code == NON_LVALUE_EXPR)
7829 && TREE_CODE (tem) == COND_EXPR
7830 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7831 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7832 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7833 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7834 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7835 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7836 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7837 && (INTEGRAL_TYPE_P
7838 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7839 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7840 || flag_syntax_only))
7841 tem = build1_loc (loc, code, type,
7842 build3 (COND_EXPR,
7843 TREE_TYPE (TREE_OPERAND
7844 (TREE_OPERAND (tem, 1), 0)),
7845 TREE_OPERAND (tem, 0),
7846 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7847 TREE_OPERAND (TREE_OPERAND (tem, 2),
7848 0)));
7849 return tem;
7850 }
7851 }
7852
7853 switch (code)
7854 {
7855 case PAREN_EXPR:
7856 /* Re-association barriers around constants and other re-association
7857 barriers can be removed. */
7858 if (CONSTANT_CLASS_P (op0)
7859 || TREE_CODE (op0) == PAREN_EXPR)
7860 return fold_convert_loc (loc, type, op0);
7861 return NULL_TREE;
7862
7863 CASE_CONVERT:
7864 case FLOAT_EXPR:
7865 case FIX_TRUNC_EXPR:
7866 if (TREE_TYPE (op0) == type)
7867 return op0;
7868
7869 if (COMPARISON_CLASS_P (op0))
7870 {
7871 /* If we have (type) (a CMP b) and type is an integral type, return
7872 new expression involving the new type. Canonicalize
7873 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7874 non-integral type.
7875 Do not fold the result as that would not simplify further, also
7876 folding again results in recursions. */
7877 if (TREE_CODE (type) == BOOLEAN_TYPE)
7878 return build2_loc (loc, TREE_CODE (op0), type,
7879 TREE_OPERAND (op0, 0),
7880 TREE_OPERAND (op0, 1));
7881 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7882 && TREE_CODE (type) != VECTOR_TYPE)
7883 return build3_loc (loc, COND_EXPR, type, op0,
7884 constant_boolean_node (true, type),
7885 constant_boolean_node (false, type));
7886 }
7887
7888 /* Handle cases of two conversions in a row. */
7889 if (CONVERT_EXPR_P (op0))
7890 {
7891 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7892 tree inter_type = TREE_TYPE (op0);
7893 int inside_int = INTEGRAL_TYPE_P (inside_type);
7894 int inside_ptr = POINTER_TYPE_P (inside_type);
7895 int inside_float = FLOAT_TYPE_P (inside_type);
7896 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7897 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7898 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7899 int inter_int = INTEGRAL_TYPE_P (inter_type);
7900 int inter_ptr = POINTER_TYPE_P (inter_type);
7901 int inter_float = FLOAT_TYPE_P (inter_type);
7902 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7903 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7904 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7905 int final_int = INTEGRAL_TYPE_P (type);
7906 int final_ptr = POINTER_TYPE_P (type);
7907 int final_float = FLOAT_TYPE_P (type);
7908 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7909 unsigned int final_prec = TYPE_PRECISION (type);
7910 int final_unsignedp = TYPE_UNSIGNED (type);
7911
7912 /* In addition to the cases of two conversions in a row
7913 handled below, if we are converting something to its own
7914 type via an object of identical or wider precision, neither
7915 conversion is needed. */
7916 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7917 && (((inter_int || inter_ptr) && final_int)
7918 || (inter_float && final_float))
7919 && inter_prec >= final_prec)
7920 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7921
7922 /* Likewise, if the intermediate and initial types are either both
7923 float or both integer, we don't need the middle conversion if the
7924 former is wider than the latter and doesn't change the signedness
7925 (for integers). Avoid this if the final type is a pointer since
7926 then we sometimes need the middle conversion. Likewise if the
7927 final type has a precision not equal to the size of its mode. */
7928 if (((inter_int && inside_int)
7929 || (inter_float && inside_float)
7930 || (inter_vec && inside_vec))
7931 && inter_prec >= inside_prec
7932 && (inter_float || inter_vec
7933 || inter_unsignedp == inside_unsignedp)
7934 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7935 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7936 && ! final_ptr
7937 && (! final_vec || inter_prec == inside_prec))
7938 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7939
7940 /* If we have a sign-extension of a zero-extended value, we can
7941 replace that by a single zero-extension. Likewise if the
7942 final conversion does not change precision we can drop the
7943 intermediate conversion. */
7944 if (inside_int && inter_int && final_int
7945 && ((inside_prec < inter_prec && inter_prec < final_prec
7946 && inside_unsignedp && !inter_unsignedp)
7947 || final_prec == inter_prec))
7948 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7949
7950 /* Two conversions in a row are not needed unless:
7951 - some conversion is floating-point (overstrict for now), or
7952 - some conversion is a vector (overstrict for now), or
7953 - the intermediate type is narrower than both initial and
7954 final, or
7955 - the intermediate type and innermost type differ in signedness,
7956 and the outermost type is wider than the intermediate, or
7957 - the initial type is a pointer type and the precisions of the
7958 intermediate and final types differ, or
7959 - the final type is a pointer type and the precisions of the
7960 initial and intermediate types differ. */
7961 if (! inside_float && ! inter_float && ! final_float
7962 && ! inside_vec && ! inter_vec && ! final_vec
7963 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7964 && ! (inside_int && inter_int
7965 && inter_unsignedp != inside_unsignedp
7966 && inter_prec < final_prec)
7967 && ((inter_unsignedp && inter_prec > inside_prec)
7968 == (final_unsignedp && final_prec > inter_prec))
7969 && ! (inside_ptr && inter_prec != final_prec)
7970 && ! (final_ptr && inside_prec != inter_prec)
7971 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7972 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7973 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7974 }
7975
7976 /* Handle (T *)&A.B.C for A being of type T and B and C
7977 living at offset zero. This occurs frequently in
7978 C++ upcasting and then accessing the base. */
7979 if (TREE_CODE (op0) == ADDR_EXPR
7980 && POINTER_TYPE_P (type)
7981 && handled_component_p (TREE_OPERAND (op0, 0)))
7982 {
7983 HOST_WIDE_INT bitsize, bitpos;
7984 tree offset;
7985 enum machine_mode mode;
7986 int unsignedp, volatilep;
7987 tree base = TREE_OPERAND (op0, 0);
7988 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7989 &mode, &unsignedp, &volatilep, false);
7990 /* If the reference was to a (constant) zero offset, we can use
7991 the address of the base if it has the same base type
7992 as the result type and the pointer type is unqualified. */
7993 if (! offset && bitpos == 0
7994 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7995 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7996 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7997 return fold_convert_loc (loc, type,
7998 build_fold_addr_expr_loc (loc, base));
7999 }
8000
8001 if (TREE_CODE (op0) == MODIFY_EXPR
8002 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8003 /* Detect assigning a bitfield. */
8004 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8005 && DECL_BIT_FIELD
8006 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8007 {
8008 /* Don't leave an assignment inside a conversion
8009 unless assigning a bitfield. */
8010 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8011 /* First do the assignment, then return converted constant. */
8012 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8013 TREE_NO_WARNING (tem) = 1;
8014 TREE_USED (tem) = 1;
8015 return tem;
8016 }
8017
8018 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8019 constants (if x has signed type, the sign bit cannot be set
8020 in c). This folds extension into the BIT_AND_EXPR.
8021 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8022 very likely don't have maximal range for their precision and this
8023 transformation effectively doesn't preserve non-maximal ranges. */
8024 if (TREE_CODE (type) == INTEGER_TYPE
8025 && TREE_CODE (op0) == BIT_AND_EXPR
8026 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8027 {
8028 tree and_expr = op0;
8029 tree and0 = TREE_OPERAND (and_expr, 0);
8030 tree and1 = TREE_OPERAND (and_expr, 1);
8031 int change = 0;
8032
8033 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8034 || (TYPE_PRECISION (type)
8035 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8036 change = 1;
8037 else if (TYPE_PRECISION (TREE_TYPE (and1))
8038 <= HOST_BITS_PER_WIDE_INT
8039 && tree_fits_uhwi_p (and1))
8040 {
8041 unsigned HOST_WIDE_INT cst;
8042
8043 cst = tree_to_uhwi (and1);
8044 cst &= HOST_WIDE_INT_M1U
8045 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8046 change = (cst == 0);
8047 #ifdef LOAD_EXTEND_OP
8048 if (change
8049 && !flag_syntax_only
8050 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8051 == ZERO_EXTEND))
8052 {
8053 tree uns = unsigned_type_for (TREE_TYPE (and0));
8054 and0 = fold_convert_loc (loc, uns, and0);
8055 and1 = fold_convert_loc (loc, uns, and1);
8056 }
8057 #endif
8058 }
8059 if (change)
8060 {
8061 tem = force_fit_type (type, wi::to_widest (and1), 0,
8062 TREE_OVERFLOW (and1));
8063 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8064 fold_convert_loc (loc, type, and0), tem);
8065 }
8066 }
8067
8068 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8069 when one of the new casts will fold away. Conservatively we assume
8070 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8071 if (POINTER_TYPE_P (type)
8072 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8073 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8074 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8075 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8076 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8077 {
8078 tree arg00 = TREE_OPERAND (arg0, 0);
8079 tree arg01 = TREE_OPERAND (arg0, 1);
8080
8081 return fold_build_pointer_plus_loc
8082 (loc, fold_convert_loc (loc, type, arg00), arg01);
8083 }
8084
8085 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8086 of the same precision, and X is an integer type not narrower than
8087 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8088 if (INTEGRAL_TYPE_P (type)
8089 && TREE_CODE (op0) == BIT_NOT_EXPR
8090 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8091 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8092 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8093 {
8094 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8095 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8096 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8097 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8098 fold_convert_loc (loc, type, tem));
8099 }
8100
8101 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8102 type of X and Y (integer types only). */
8103 if (INTEGRAL_TYPE_P (type)
8104 && TREE_CODE (op0) == MULT_EXPR
8105 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8106 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8107 {
8108 /* Be careful not to introduce new overflows. */
8109 tree mult_type;
8110 if (TYPE_OVERFLOW_WRAPS (type))
8111 mult_type = type;
8112 else
8113 mult_type = unsigned_type_for (type);
8114
8115 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8116 {
8117 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8118 fold_convert_loc (loc, mult_type,
8119 TREE_OPERAND (op0, 0)),
8120 fold_convert_loc (loc, mult_type,
8121 TREE_OPERAND (op0, 1)));
8122 return fold_convert_loc (loc, type, tem);
8123 }
8124 }
8125
8126 tem = fold_convert_const (code, type, op0);
8127 return tem ? tem : NULL_TREE;
8128
8129 case ADDR_SPACE_CONVERT_EXPR:
8130 if (integer_zerop (arg0))
8131 return fold_convert_const (code, type, arg0);
8132 return NULL_TREE;
8133
8134 case FIXED_CONVERT_EXPR:
8135 tem = fold_convert_const (code, type, arg0);
8136 return tem ? tem : NULL_TREE;
8137
8138 case VIEW_CONVERT_EXPR:
8139 if (TREE_TYPE (op0) == type)
8140 return op0;
8141 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8142 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8143 type, TREE_OPERAND (op0, 0));
8144 if (TREE_CODE (op0) == MEM_REF)
8145 return fold_build2_loc (loc, MEM_REF, type,
8146 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8147
8148 /* For integral conversions with the same precision or pointer
8149 conversions use a NOP_EXPR instead. */
8150 if ((INTEGRAL_TYPE_P (type)
8151 || POINTER_TYPE_P (type))
8152 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8153 || POINTER_TYPE_P (TREE_TYPE (op0)))
8154 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8155 return fold_convert_loc (loc, type, op0);
8156
8157 /* Strip inner integral conversions that do not change the precision. */
8158 if (CONVERT_EXPR_P (op0)
8159 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8160 || POINTER_TYPE_P (TREE_TYPE (op0)))
8161 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8162 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8163 && (TYPE_PRECISION (TREE_TYPE (op0))
8164 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8165 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8166 type, TREE_OPERAND (op0, 0));
8167
8168 return fold_view_convert_expr (type, op0);
8169
8170 case NEGATE_EXPR:
8171 tem = fold_negate_expr (loc, arg0);
8172 if (tem)
8173 return fold_convert_loc (loc, type, tem);
8174 return NULL_TREE;
8175
8176 case ABS_EXPR:
8177 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8178 return fold_abs_const (arg0, type);
8179 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8180 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8181 /* Convert fabs((double)float) into (double)fabsf(float). */
8182 else if (TREE_CODE (arg0) == NOP_EXPR
8183 && TREE_CODE (type) == REAL_TYPE)
8184 {
8185 tree targ0 = strip_float_extensions (arg0);
8186 if (targ0 != arg0)
8187 return fold_convert_loc (loc, type,
8188 fold_build1_loc (loc, ABS_EXPR,
8189 TREE_TYPE (targ0),
8190 targ0));
8191 }
8192 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8193 else if (TREE_CODE (arg0) == ABS_EXPR)
8194 return arg0;
8195 else if (tree_expr_nonnegative_p (arg0))
8196 return arg0;
8197
8198 /* Strip sign ops from argument. */
8199 if (TREE_CODE (type) == REAL_TYPE)
8200 {
8201 tem = fold_strip_sign_ops (arg0);
8202 if (tem)
8203 return fold_build1_loc (loc, ABS_EXPR, type,
8204 fold_convert_loc (loc, type, tem));
8205 }
8206 return NULL_TREE;
8207
8208 case CONJ_EXPR:
8209 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8210 return fold_convert_loc (loc, type, arg0);
8211 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8212 {
8213 tree itype = TREE_TYPE (type);
8214 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8215 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8216 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8217 negate_expr (ipart));
8218 }
8219 if (TREE_CODE (arg0) == COMPLEX_CST)
8220 {
8221 tree itype = TREE_TYPE (type);
8222 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8223 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8224 return build_complex (type, rpart, negate_expr (ipart));
8225 }
8226 if (TREE_CODE (arg0) == CONJ_EXPR)
8227 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8228 return NULL_TREE;
8229
8230 case BIT_NOT_EXPR:
8231 if (TREE_CODE (arg0) == INTEGER_CST)
8232 return fold_not_const (arg0, type);
8233 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8234 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8235 /* Convert ~ (-A) to A - 1. */
8236 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8237 return fold_build2_loc (loc, MINUS_EXPR, type,
8238 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8239 build_int_cst (type, 1));
8240 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8241 else if (INTEGRAL_TYPE_P (type)
8242 && ((TREE_CODE (arg0) == MINUS_EXPR
8243 && integer_onep (TREE_OPERAND (arg0, 1)))
8244 || (TREE_CODE (arg0) == PLUS_EXPR
8245 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8246 return fold_build1_loc (loc, NEGATE_EXPR, type,
8247 fold_convert_loc (loc, type,
8248 TREE_OPERAND (arg0, 0)));
8249 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8250 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8251 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8252 fold_convert_loc (loc, type,
8253 TREE_OPERAND (arg0, 0)))))
8254 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8255 fold_convert_loc (loc, type,
8256 TREE_OPERAND (arg0, 1)));
8257 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8258 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8259 fold_convert_loc (loc, type,
8260 TREE_OPERAND (arg0, 1)))))
8261 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8262 fold_convert_loc (loc, type,
8263 TREE_OPERAND (arg0, 0)), tem);
8264 /* Perform BIT_NOT_EXPR on each element individually. */
8265 else if (TREE_CODE (arg0) == VECTOR_CST)
8266 {
8267 tree *elements;
8268 tree elem;
8269 unsigned count = VECTOR_CST_NELTS (arg0), i;
8270
8271 elements = XALLOCAVEC (tree, count);
8272 for (i = 0; i < count; i++)
8273 {
8274 elem = VECTOR_CST_ELT (arg0, i);
8275 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8276 if (elem == NULL_TREE)
8277 break;
8278 elements[i] = elem;
8279 }
8280 if (i == count)
8281 return build_vector (type, elements);
8282 }
8283 else if (COMPARISON_CLASS_P (arg0)
8284 && (VECTOR_TYPE_P (type)
8285 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8286 {
8287 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8288 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8289 HONOR_NANS (TYPE_MODE (op_type)));
8290 if (subcode != ERROR_MARK)
8291 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8292 TREE_OPERAND (arg0, 1));
8293 }
8294
8295
8296 return NULL_TREE;
8297
8298 case TRUTH_NOT_EXPR:
8299 /* Note that the operand of this must be an int
8300 and its values must be 0 or 1.
8301 ("true" is a fixed value perhaps depending on the language,
8302 but we don't handle values other than 1 correctly yet.) */
8303 tem = fold_truth_not_expr (loc, arg0);
8304 if (!tem)
8305 return NULL_TREE;
8306 return fold_convert_loc (loc, type, tem);
8307
8308 case REALPART_EXPR:
8309 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8310 return fold_convert_loc (loc, type, arg0);
8311 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8312 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8313 TREE_OPERAND (arg0, 1));
8314 if (TREE_CODE (arg0) == COMPLEX_CST)
8315 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8316 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8317 {
8318 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8319 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8320 fold_build1_loc (loc, REALPART_EXPR, itype,
8321 TREE_OPERAND (arg0, 0)),
8322 fold_build1_loc (loc, REALPART_EXPR, itype,
8323 TREE_OPERAND (arg0, 1)));
8324 return fold_convert_loc (loc, type, tem);
8325 }
8326 if (TREE_CODE (arg0) == CONJ_EXPR)
8327 {
8328 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8329 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8330 TREE_OPERAND (arg0, 0));
8331 return fold_convert_loc (loc, type, tem);
8332 }
8333 if (TREE_CODE (arg0) == CALL_EXPR)
8334 {
8335 tree fn = get_callee_fndecl (arg0);
8336 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8337 switch (DECL_FUNCTION_CODE (fn))
8338 {
8339 CASE_FLT_FN (BUILT_IN_CEXPI):
8340 fn = mathfn_built_in (type, BUILT_IN_COS);
8341 if (fn)
8342 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8343 break;
8344
8345 default:
8346 break;
8347 }
8348 }
8349 return NULL_TREE;
8350
8351 case IMAGPART_EXPR:
8352 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8353 return build_zero_cst (type);
8354 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8355 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8356 TREE_OPERAND (arg0, 0));
8357 if (TREE_CODE (arg0) == COMPLEX_CST)
8358 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8359 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8360 {
8361 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8362 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8363 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8364 TREE_OPERAND (arg0, 0)),
8365 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8366 TREE_OPERAND (arg0, 1)));
8367 return fold_convert_loc (loc, type, tem);
8368 }
8369 if (TREE_CODE (arg0) == CONJ_EXPR)
8370 {
8371 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8372 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8373 return fold_convert_loc (loc, type, negate_expr (tem));
8374 }
8375 if (TREE_CODE (arg0) == CALL_EXPR)
8376 {
8377 tree fn = get_callee_fndecl (arg0);
8378 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8379 switch (DECL_FUNCTION_CODE (fn))
8380 {
8381 CASE_FLT_FN (BUILT_IN_CEXPI):
8382 fn = mathfn_built_in (type, BUILT_IN_SIN);
8383 if (fn)
8384 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8385 break;
8386
8387 default:
8388 break;
8389 }
8390 }
8391 return NULL_TREE;
8392
8393 case INDIRECT_REF:
8394 /* Fold *&X to X if X is an lvalue. */
8395 if (TREE_CODE (op0) == ADDR_EXPR)
8396 {
8397 tree op00 = TREE_OPERAND (op0, 0);
8398 if ((TREE_CODE (op00) == VAR_DECL
8399 || TREE_CODE (op00) == PARM_DECL
8400 || TREE_CODE (op00) == RESULT_DECL)
8401 && !TREE_READONLY (op00))
8402 return op00;
8403 }
8404 return NULL_TREE;
8405
8406 case VEC_UNPACK_LO_EXPR:
8407 case VEC_UNPACK_HI_EXPR:
8408 case VEC_UNPACK_FLOAT_LO_EXPR:
8409 case VEC_UNPACK_FLOAT_HI_EXPR:
8410 {
8411 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8412 tree *elts;
8413 enum tree_code subcode;
8414
8415 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8416 if (TREE_CODE (arg0) != VECTOR_CST)
8417 return NULL_TREE;
8418
8419 elts = XALLOCAVEC (tree, nelts * 2);
8420 if (!vec_cst_ctor_to_array (arg0, elts))
8421 return NULL_TREE;
8422
8423 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8424 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8425 elts += nelts;
8426
8427 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8428 subcode = NOP_EXPR;
8429 else
8430 subcode = FLOAT_EXPR;
8431
8432 for (i = 0; i < nelts; i++)
8433 {
8434 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8435 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8436 return NULL_TREE;
8437 }
8438
8439 return build_vector (type, elts);
8440 }
8441
8442 case REDUC_MIN_EXPR:
8443 case REDUC_MAX_EXPR:
8444 case REDUC_PLUS_EXPR:
8445 {
8446 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8447 tree *elts;
8448 enum tree_code subcode;
8449
8450 if (TREE_CODE (op0) != VECTOR_CST)
8451 return NULL_TREE;
8452
8453 elts = XALLOCAVEC (tree, nelts);
8454 if (!vec_cst_ctor_to_array (op0, elts))
8455 return NULL_TREE;
8456
8457 switch (code)
8458 {
8459 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8460 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8461 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8462 default: gcc_unreachable ();
8463 }
8464
8465 for (i = 1; i < nelts; i++)
8466 {
8467 elts[0] = const_binop (subcode, elts[0], elts[i]);
8468 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8469 return NULL_TREE;
8470 elts[i] = build_zero_cst (TREE_TYPE (type));
8471 }
8472
8473 return build_vector (type, elts);
8474 }
8475
8476 default:
8477 return NULL_TREE;
8478 } /* switch (code) */
8479 }
8480
8481
8482 /* If the operation was a conversion do _not_ mark a resulting constant
8483 with TREE_OVERFLOW if the original constant was not. These conversions
8484 have implementation defined behavior and retaining the TREE_OVERFLOW
8485 flag here would confuse later passes such as VRP. */
8486 tree
8487 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8488 tree type, tree op0)
8489 {
8490 tree res = fold_unary_loc (loc, code, type, op0);
8491 if (res
8492 && TREE_CODE (res) == INTEGER_CST
8493 && TREE_CODE (op0) == INTEGER_CST
8494 && CONVERT_EXPR_CODE_P (code))
8495 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8496
8497 return res;
8498 }
8499
8500 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8501 operands OP0 and OP1. LOC is the location of the resulting expression.
8502 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8503 Return the folded expression if folding is successful. Otherwise,
8504 return NULL_TREE. */
8505 static tree
8506 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8507 tree arg0, tree arg1, tree op0, tree op1)
8508 {
8509 tree tem;
8510
8511 /* We only do these simplifications if we are optimizing. */
8512 if (!optimize)
8513 return NULL_TREE;
8514
8515 /* Check for things like (A || B) && (A || C). We can convert this
8516 to A || (B && C). Note that either operator can be any of the four
8517 truth and/or operations and the transformation will still be
8518 valid. Also note that we only care about order for the
8519 ANDIF and ORIF operators. If B contains side effects, this
8520 might change the truth-value of A. */
8521 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8522 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8523 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8524 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8525 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8526 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8527 {
8528 tree a00 = TREE_OPERAND (arg0, 0);
8529 tree a01 = TREE_OPERAND (arg0, 1);
8530 tree a10 = TREE_OPERAND (arg1, 0);
8531 tree a11 = TREE_OPERAND (arg1, 1);
8532 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8533 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8534 && (code == TRUTH_AND_EXPR
8535 || code == TRUTH_OR_EXPR));
8536
8537 if (operand_equal_p (a00, a10, 0))
8538 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8539 fold_build2_loc (loc, code, type, a01, a11));
8540 else if (commutative && operand_equal_p (a00, a11, 0))
8541 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8542 fold_build2_loc (loc, code, type, a01, a10));
8543 else if (commutative && operand_equal_p (a01, a10, 0))
8544 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8545 fold_build2_loc (loc, code, type, a00, a11));
8546
8547 /* This case if tricky because we must either have commutative
8548 operators or else A10 must not have side-effects. */
8549
8550 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8551 && operand_equal_p (a01, a11, 0))
8552 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8553 fold_build2_loc (loc, code, type, a00, a10),
8554 a01);
8555 }
8556
8557 /* See if we can build a range comparison. */
8558 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8559 return tem;
8560
8561 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8562 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8563 {
8564 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8565 if (tem)
8566 return fold_build2_loc (loc, code, type, tem, arg1);
8567 }
8568
8569 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8570 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8571 {
8572 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8573 if (tem)
8574 return fold_build2_loc (loc, code, type, arg0, tem);
8575 }
8576
8577 /* Check for the possibility of merging component references. If our
8578 lhs is another similar operation, try to merge its rhs with our
8579 rhs. Then try to merge our lhs and rhs. */
8580 if (TREE_CODE (arg0) == code
8581 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8582 TREE_OPERAND (arg0, 1), arg1)))
8583 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8584
8585 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8586 return tem;
8587
8588 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8589 && (code == TRUTH_AND_EXPR
8590 || code == TRUTH_ANDIF_EXPR
8591 || code == TRUTH_OR_EXPR
8592 || code == TRUTH_ORIF_EXPR))
8593 {
8594 enum tree_code ncode, icode;
8595
8596 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8597 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8598 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8599
8600 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8601 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8602 We don't want to pack more than two leafs to a non-IF AND/OR
8603 expression.
8604 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8605 equal to IF-CODE, then we don't want to add right-hand operand.
8606 If the inner right-hand side of left-hand operand has
8607 side-effects, or isn't simple, then we can't add to it,
8608 as otherwise we might destroy if-sequence. */
8609 if (TREE_CODE (arg0) == icode
8610 && simple_operand_p_2 (arg1)
8611 /* Needed for sequence points to handle trappings, and
8612 side-effects. */
8613 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8614 {
8615 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8616 arg1);
8617 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8618 tem);
8619 }
8620 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8621 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8622 else if (TREE_CODE (arg1) == icode
8623 && simple_operand_p_2 (arg0)
8624 /* Needed for sequence points to handle trappings, and
8625 side-effects. */
8626 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8627 {
8628 tem = fold_build2_loc (loc, ncode, type,
8629 arg0, TREE_OPERAND (arg1, 0));
8630 return fold_build2_loc (loc, icode, type, tem,
8631 TREE_OPERAND (arg1, 1));
8632 }
8633 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8634 into (A OR B).
8635 For sequence point consistancy, we need to check for trapping,
8636 and side-effects. */
8637 else if (code == icode && simple_operand_p_2 (arg0)
8638 && simple_operand_p_2 (arg1))
8639 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8640 }
8641
8642 return NULL_TREE;
8643 }
8644
8645 /* Fold a binary expression of code CODE and type TYPE with operands
8646 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8647 Return the folded expression if folding is successful. Otherwise,
8648 return NULL_TREE. */
8649
8650 static tree
8651 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8652 {
8653 enum tree_code compl_code;
8654
8655 if (code == MIN_EXPR)
8656 compl_code = MAX_EXPR;
8657 else if (code == MAX_EXPR)
8658 compl_code = MIN_EXPR;
8659 else
8660 gcc_unreachable ();
8661
8662 /* MIN (MAX (a, b), b) == b. */
8663 if (TREE_CODE (op0) == compl_code
8664 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8665 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8666
8667 /* MIN (MAX (b, a), b) == b. */
8668 if (TREE_CODE (op0) == compl_code
8669 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8670 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8671 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8672
8673 /* MIN (a, MAX (a, b)) == a. */
8674 if (TREE_CODE (op1) == compl_code
8675 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8676 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8677 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8678
8679 /* MIN (a, MAX (b, a)) == a. */
8680 if (TREE_CODE (op1) == compl_code
8681 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8682 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8683 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8684
8685 return NULL_TREE;
8686 }
8687
8688 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8689 by changing CODE to reduce the magnitude of constants involved in
8690 ARG0 of the comparison.
8691 Returns a canonicalized comparison tree if a simplification was
8692 possible, otherwise returns NULL_TREE.
8693 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8694 valid if signed overflow is undefined. */
8695
8696 static tree
8697 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8698 tree arg0, tree arg1,
8699 bool *strict_overflow_p)
8700 {
8701 enum tree_code code0 = TREE_CODE (arg0);
8702 tree t, cst0 = NULL_TREE;
8703 int sgn0;
8704 bool swap = false;
8705
8706 /* Match A +- CST code arg1 and CST code arg1. We can change the
8707 first form only if overflow is undefined. */
8708 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8709 /* In principle pointers also have undefined overflow behavior,
8710 but that causes problems elsewhere. */
8711 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8712 && (code0 == MINUS_EXPR
8713 || code0 == PLUS_EXPR)
8714 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8715 || code0 == INTEGER_CST))
8716 return NULL_TREE;
8717
8718 /* Identify the constant in arg0 and its sign. */
8719 if (code0 == INTEGER_CST)
8720 cst0 = arg0;
8721 else
8722 cst0 = TREE_OPERAND (arg0, 1);
8723 sgn0 = tree_int_cst_sgn (cst0);
8724
8725 /* Overflowed constants and zero will cause problems. */
8726 if (integer_zerop (cst0)
8727 || TREE_OVERFLOW (cst0))
8728 return NULL_TREE;
8729
8730 /* See if we can reduce the magnitude of the constant in
8731 arg0 by changing the comparison code. */
8732 if (code0 == INTEGER_CST)
8733 {
8734 /* CST <= arg1 -> CST-1 < arg1. */
8735 if (code == LE_EXPR && sgn0 == 1)
8736 code = LT_EXPR;
8737 /* -CST < arg1 -> -CST-1 <= arg1. */
8738 else if (code == LT_EXPR && sgn0 == -1)
8739 code = LE_EXPR;
8740 /* CST > arg1 -> CST-1 >= arg1. */
8741 else if (code == GT_EXPR && sgn0 == 1)
8742 code = GE_EXPR;
8743 /* -CST >= arg1 -> -CST-1 > arg1. */
8744 else if (code == GE_EXPR && sgn0 == -1)
8745 code = GT_EXPR;
8746 else
8747 return NULL_TREE;
8748 /* arg1 code' CST' might be more canonical. */
8749 swap = true;
8750 }
8751 else
8752 {
8753 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8754 if (code == LT_EXPR
8755 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8756 code = LE_EXPR;
8757 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8758 else if (code == GT_EXPR
8759 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8760 code = GE_EXPR;
8761 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8762 else if (code == LE_EXPR
8763 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8764 code = LT_EXPR;
8765 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8766 else if (code == GE_EXPR
8767 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8768 code = GT_EXPR;
8769 else
8770 return NULL_TREE;
8771 *strict_overflow_p = true;
8772 }
8773
8774 /* Now build the constant reduced in magnitude. But not if that
8775 would produce one outside of its types range. */
8776 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8777 && ((sgn0 == 1
8778 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8779 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8780 || (sgn0 == -1
8781 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8782 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8783 /* We cannot swap the comparison here as that would cause us to
8784 endlessly recurse. */
8785 return NULL_TREE;
8786
8787 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8788 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8789 if (code0 != INTEGER_CST)
8790 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8791 t = fold_convert (TREE_TYPE (arg1), t);
8792
8793 /* If swapping might yield to a more canonical form, do so. */
8794 if (swap)
8795 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8796 else
8797 return fold_build2_loc (loc, code, type, t, arg1);
8798 }
8799
8800 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8801 overflow further. Try to decrease the magnitude of constants involved
8802 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8803 and put sole constants at the second argument position.
8804 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8805
8806 static tree
8807 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8808 tree arg0, tree arg1)
8809 {
8810 tree t;
8811 bool strict_overflow_p;
8812 const char * const warnmsg = G_("assuming signed overflow does not occur "
8813 "when reducing constant in comparison");
8814
8815 /* Try canonicalization by simplifying arg0. */
8816 strict_overflow_p = false;
8817 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8818 &strict_overflow_p);
8819 if (t)
8820 {
8821 if (strict_overflow_p)
8822 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8823 return t;
8824 }
8825
8826 /* Try canonicalization by simplifying arg1 using the swapped
8827 comparison. */
8828 code = swap_tree_comparison (code);
8829 strict_overflow_p = false;
8830 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8831 &strict_overflow_p);
8832 if (t && strict_overflow_p)
8833 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8834 return t;
8835 }
8836
8837 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8838 space. This is used to avoid issuing overflow warnings for
8839 expressions like &p->x which can not wrap. */
8840
8841 static bool
8842 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8843 {
8844 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8845 return true;
8846
8847 if (bitpos < 0)
8848 return true;
8849
8850 wide_int wi_offset;
8851 int precision = TYPE_PRECISION (TREE_TYPE (base));
8852 if (offset == NULL_TREE)
8853 wi_offset = wi::zero (precision);
8854 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8855 return true;
8856 else
8857 wi_offset = offset;
8858
8859 bool overflow;
8860 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8861 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8862 if (overflow)
8863 return true;
8864
8865 if (!wi::fits_uhwi_p (total))
8866 return true;
8867
8868 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8869 if (size <= 0)
8870 return true;
8871
8872 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8873 array. */
8874 if (TREE_CODE (base) == ADDR_EXPR)
8875 {
8876 HOST_WIDE_INT base_size;
8877
8878 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8879 if (base_size > 0 && size < base_size)
8880 size = base_size;
8881 }
8882
8883 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8884 }
8885
8886 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8887 kind INTEGER_CST. This makes sure to properly sign-extend the
8888 constant. */
8889
8890 static HOST_WIDE_INT
8891 size_low_cst (const_tree t)
8892 {
8893 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8894 int prec = TYPE_PRECISION (TREE_TYPE (t));
8895 if (prec < HOST_BITS_PER_WIDE_INT)
8896 return sext_hwi (w, prec);
8897 return w;
8898 }
8899
8900 /* Subroutine of fold_binary. This routine performs all of the
8901 transformations that are common to the equality/inequality
8902 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8903 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8904 fold_binary should call fold_binary. Fold a comparison with
8905 tree code CODE and type TYPE with operands OP0 and OP1. Return
8906 the folded comparison or NULL_TREE. */
8907
8908 static tree
8909 fold_comparison (location_t loc, enum tree_code code, tree type,
8910 tree op0, tree op1)
8911 {
8912 tree arg0, arg1, tem;
8913
8914 arg0 = op0;
8915 arg1 = op1;
8916
8917 STRIP_SIGN_NOPS (arg0);
8918 STRIP_SIGN_NOPS (arg1);
8919
8920 tem = fold_relational_const (code, type, arg0, arg1);
8921 if (tem != NULL_TREE)
8922 return tem;
8923
8924 /* If one arg is a real or integer constant, put it last. */
8925 if (tree_swap_operands_p (arg0, arg1, true))
8926 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8927
8928 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8929 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8930 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8931 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8932 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8933 && (TREE_CODE (arg1) == INTEGER_CST
8934 && !TREE_OVERFLOW (arg1)))
8935 {
8936 tree const1 = TREE_OPERAND (arg0, 1);
8937 tree const2 = arg1;
8938 tree variable = TREE_OPERAND (arg0, 0);
8939 tree lhs;
8940 int lhs_add;
8941 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8942
8943 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8944 TREE_TYPE (arg1), const2, const1);
8945
8946 /* If the constant operation overflowed this can be
8947 simplified as a comparison against INT_MAX/INT_MIN. */
8948 if (TREE_CODE (lhs) == INTEGER_CST
8949 && TREE_OVERFLOW (lhs))
8950 {
8951 int const1_sgn = tree_int_cst_sgn (const1);
8952 enum tree_code code2 = code;
8953
8954 /* Get the sign of the constant on the lhs if the
8955 operation were VARIABLE + CONST1. */
8956 if (TREE_CODE (arg0) == MINUS_EXPR)
8957 const1_sgn = -const1_sgn;
8958
8959 /* The sign of the constant determines if we overflowed
8960 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8961 Canonicalize to the INT_MIN overflow by swapping the comparison
8962 if necessary. */
8963 if (const1_sgn == -1)
8964 code2 = swap_tree_comparison (code);
8965
8966 /* We now can look at the canonicalized case
8967 VARIABLE + 1 CODE2 INT_MIN
8968 and decide on the result. */
8969 if (code2 == LT_EXPR
8970 || code2 == LE_EXPR
8971 || code2 == EQ_EXPR)
8972 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8973 else if (code2 == NE_EXPR
8974 || code2 == GE_EXPR
8975 || code2 == GT_EXPR)
8976 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8977 }
8978
8979 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8980 && (TREE_CODE (lhs) != INTEGER_CST
8981 || !TREE_OVERFLOW (lhs)))
8982 {
8983 if (code != EQ_EXPR && code != NE_EXPR)
8984 fold_overflow_warning ("assuming signed overflow does not occur "
8985 "when changing X +- C1 cmp C2 to "
8986 "X cmp C1 +- C2",
8987 WARN_STRICT_OVERFLOW_COMPARISON);
8988 return fold_build2_loc (loc, code, type, variable, lhs);
8989 }
8990 }
8991
8992 /* For comparisons of pointers we can decompose it to a compile time
8993 comparison of the base objects and the offsets into the object.
8994 This requires at least one operand being an ADDR_EXPR or a
8995 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8996 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8997 && (TREE_CODE (arg0) == ADDR_EXPR
8998 || TREE_CODE (arg1) == ADDR_EXPR
8999 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9000 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9001 {
9002 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9003 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9004 enum machine_mode mode;
9005 int volatilep, unsignedp;
9006 bool indirect_base0 = false, indirect_base1 = false;
9007
9008 /* Get base and offset for the access. Strip ADDR_EXPR for
9009 get_inner_reference, but put it back by stripping INDIRECT_REF
9010 off the base object if possible. indirect_baseN will be true
9011 if baseN is not an address but refers to the object itself. */
9012 base0 = arg0;
9013 if (TREE_CODE (arg0) == ADDR_EXPR)
9014 {
9015 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9016 &bitsize, &bitpos0, &offset0, &mode,
9017 &unsignedp, &volatilep, false);
9018 if (TREE_CODE (base0) == INDIRECT_REF)
9019 base0 = TREE_OPERAND (base0, 0);
9020 else
9021 indirect_base0 = true;
9022 }
9023 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9024 {
9025 base0 = TREE_OPERAND (arg0, 0);
9026 STRIP_SIGN_NOPS (base0);
9027 if (TREE_CODE (base0) == ADDR_EXPR)
9028 {
9029 base0 = TREE_OPERAND (base0, 0);
9030 indirect_base0 = true;
9031 }
9032 offset0 = TREE_OPERAND (arg0, 1);
9033 if (tree_fits_shwi_p (offset0))
9034 {
9035 HOST_WIDE_INT off = size_low_cst (offset0);
9036 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9037 * BITS_PER_UNIT)
9038 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9039 {
9040 bitpos0 = off * BITS_PER_UNIT;
9041 offset0 = NULL_TREE;
9042 }
9043 }
9044 }
9045
9046 base1 = arg1;
9047 if (TREE_CODE (arg1) == ADDR_EXPR)
9048 {
9049 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9050 &bitsize, &bitpos1, &offset1, &mode,
9051 &unsignedp, &volatilep, false);
9052 if (TREE_CODE (base1) == INDIRECT_REF)
9053 base1 = TREE_OPERAND (base1, 0);
9054 else
9055 indirect_base1 = true;
9056 }
9057 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9058 {
9059 base1 = TREE_OPERAND (arg1, 0);
9060 STRIP_SIGN_NOPS (base1);
9061 if (TREE_CODE (base1) == ADDR_EXPR)
9062 {
9063 base1 = TREE_OPERAND (base1, 0);
9064 indirect_base1 = true;
9065 }
9066 offset1 = TREE_OPERAND (arg1, 1);
9067 if (tree_fits_shwi_p (offset1))
9068 {
9069 HOST_WIDE_INT off = size_low_cst (offset1);
9070 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9071 * BITS_PER_UNIT)
9072 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9073 {
9074 bitpos1 = off * BITS_PER_UNIT;
9075 offset1 = NULL_TREE;
9076 }
9077 }
9078 }
9079
9080 /* A local variable can never be pointed to by
9081 the default SSA name of an incoming parameter. */
9082 if ((TREE_CODE (arg0) == ADDR_EXPR
9083 && indirect_base0
9084 && TREE_CODE (base0) == VAR_DECL
9085 && auto_var_in_fn_p (base0, current_function_decl)
9086 && !indirect_base1
9087 && TREE_CODE (base1) == SSA_NAME
9088 && SSA_NAME_IS_DEFAULT_DEF (base1)
9089 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9090 || (TREE_CODE (arg1) == ADDR_EXPR
9091 && indirect_base1
9092 && TREE_CODE (base1) == VAR_DECL
9093 && auto_var_in_fn_p (base1, current_function_decl)
9094 && !indirect_base0
9095 && TREE_CODE (base0) == SSA_NAME
9096 && SSA_NAME_IS_DEFAULT_DEF (base0)
9097 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9098 {
9099 if (code == NE_EXPR)
9100 return constant_boolean_node (1, type);
9101 else if (code == EQ_EXPR)
9102 return constant_boolean_node (0, type);
9103 }
9104 /* If we have equivalent bases we might be able to simplify. */
9105 else if (indirect_base0 == indirect_base1
9106 && operand_equal_p (base0, base1, 0))
9107 {
9108 /* We can fold this expression to a constant if the non-constant
9109 offset parts are equal. */
9110 if ((offset0 == offset1
9111 || (offset0 && offset1
9112 && operand_equal_p (offset0, offset1, 0)))
9113 && (code == EQ_EXPR
9114 || code == NE_EXPR
9115 || (indirect_base0 && DECL_P (base0))
9116 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9117
9118 {
9119 if (code != EQ_EXPR
9120 && code != NE_EXPR
9121 && bitpos0 != bitpos1
9122 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9123 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9124 fold_overflow_warning (("assuming pointer wraparound does not "
9125 "occur when comparing P +- C1 with "
9126 "P +- C2"),
9127 WARN_STRICT_OVERFLOW_CONDITIONAL);
9128
9129 switch (code)
9130 {
9131 case EQ_EXPR:
9132 return constant_boolean_node (bitpos0 == bitpos1, type);
9133 case NE_EXPR:
9134 return constant_boolean_node (bitpos0 != bitpos1, type);
9135 case LT_EXPR:
9136 return constant_boolean_node (bitpos0 < bitpos1, type);
9137 case LE_EXPR:
9138 return constant_boolean_node (bitpos0 <= bitpos1, type);
9139 case GE_EXPR:
9140 return constant_boolean_node (bitpos0 >= bitpos1, type);
9141 case GT_EXPR:
9142 return constant_boolean_node (bitpos0 > bitpos1, type);
9143 default:;
9144 }
9145 }
9146 /* We can simplify the comparison to a comparison of the variable
9147 offset parts if the constant offset parts are equal.
9148 Be careful to use signed sizetype here because otherwise we
9149 mess with array offsets in the wrong way. This is possible
9150 because pointer arithmetic is restricted to retain within an
9151 object and overflow on pointer differences is undefined as of
9152 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9153 else if (bitpos0 == bitpos1
9154 && ((code == EQ_EXPR || code == NE_EXPR)
9155 || (indirect_base0 && DECL_P (base0))
9156 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9157 {
9158 /* By converting to signed sizetype we cover middle-end pointer
9159 arithmetic which operates on unsigned pointer types of size
9160 type size and ARRAY_REF offsets which are properly sign or
9161 zero extended from their type in case it is narrower than
9162 sizetype. */
9163 if (offset0 == NULL_TREE)
9164 offset0 = build_int_cst (ssizetype, 0);
9165 else
9166 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9167 if (offset1 == NULL_TREE)
9168 offset1 = build_int_cst (ssizetype, 0);
9169 else
9170 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9171
9172 if (code != EQ_EXPR
9173 && code != NE_EXPR
9174 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9175 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9176 fold_overflow_warning (("assuming pointer wraparound does not "
9177 "occur when comparing P +- C1 with "
9178 "P +- C2"),
9179 WARN_STRICT_OVERFLOW_COMPARISON);
9180
9181 return fold_build2_loc (loc, code, type, offset0, offset1);
9182 }
9183 }
9184 /* For non-equal bases we can simplify if they are addresses
9185 of local binding decls or constants. */
9186 else if (indirect_base0 && indirect_base1
9187 /* We know that !operand_equal_p (base0, base1, 0)
9188 because the if condition was false. But make
9189 sure two decls are not the same. */
9190 && base0 != base1
9191 && TREE_CODE (arg0) == ADDR_EXPR
9192 && TREE_CODE (arg1) == ADDR_EXPR
9193 && (((TREE_CODE (base0) == VAR_DECL
9194 || TREE_CODE (base0) == PARM_DECL)
9195 && (targetm.binds_local_p (base0)
9196 || CONSTANT_CLASS_P (base1)))
9197 || CONSTANT_CLASS_P (base0))
9198 && (((TREE_CODE (base1) == VAR_DECL
9199 || TREE_CODE (base1) == PARM_DECL)
9200 && (targetm.binds_local_p (base1)
9201 || CONSTANT_CLASS_P (base0)))
9202 || CONSTANT_CLASS_P (base1)))
9203 {
9204 if (code == EQ_EXPR)
9205 return omit_two_operands_loc (loc, type, boolean_false_node,
9206 arg0, arg1);
9207 else if (code == NE_EXPR)
9208 return omit_two_operands_loc (loc, type, boolean_true_node,
9209 arg0, arg1);
9210 }
9211 /* For equal offsets we can simplify to a comparison of the
9212 base addresses. */
9213 else if (bitpos0 == bitpos1
9214 && (indirect_base0
9215 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9216 && (indirect_base1
9217 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9218 && ((offset0 == offset1)
9219 || (offset0 && offset1
9220 && operand_equal_p (offset0, offset1, 0))))
9221 {
9222 if (indirect_base0)
9223 base0 = build_fold_addr_expr_loc (loc, base0);
9224 if (indirect_base1)
9225 base1 = build_fold_addr_expr_loc (loc, base1);
9226 return fold_build2_loc (loc, code, type, base0, base1);
9227 }
9228 }
9229
9230 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9231 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9232 the resulting offset is smaller in absolute value than the
9233 original one. */
9234 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9235 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9236 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9237 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9238 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9239 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9240 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9241 {
9242 tree const1 = TREE_OPERAND (arg0, 1);
9243 tree const2 = TREE_OPERAND (arg1, 1);
9244 tree variable1 = TREE_OPERAND (arg0, 0);
9245 tree variable2 = TREE_OPERAND (arg1, 0);
9246 tree cst;
9247 const char * const warnmsg = G_("assuming signed overflow does not "
9248 "occur when combining constants around "
9249 "a comparison");
9250
9251 /* Put the constant on the side where it doesn't overflow and is
9252 of lower absolute value than before. */
9253 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9254 ? MINUS_EXPR : PLUS_EXPR,
9255 const2, const1);
9256 if (!TREE_OVERFLOW (cst)
9257 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9258 {
9259 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9260 return fold_build2_loc (loc, code, type,
9261 variable1,
9262 fold_build2_loc (loc,
9263 TREE_CODE (arg1), TREE_TYPE (arg1),
9264 variable2, cst));
9265 }
9266
9267 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9268 ? MINUS_EXPR : PLUS_EXPR,
9269 const1, const2);
9270 if (!TREE_OVERFLOW (cst)
9271 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9272 {
9273 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9274 return fold_build2_loc (loc, code, type,
9275 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9276 variable1, cst),
9277 variable2);
9278 }
9279 }
9280
9281 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9282 signed arithmetic case. That form is created by the compiler
9283 often enough for folding it to be of value. One example is in
9284 computing loop trip counts after Operator Strength Reduction. */
9285 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9286 && TREE_CODE (arg0) == MULT_EXPR
9287 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9288 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9289 && integer_zerop (arg1))
9290 {
9291 tree const1 = TREE_OPERAND (arg0, 1);
9292 tree const2 = arg1; /* zero */
9293 tree variable1 = TREE_OPERAND (arg0, 0);
9294 enum tree_code cmp_code = code;
9295
9296 /* Handle unfolded multiplication by zero. */
9297 if (integer_zerop (const1))
9298 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9299
9300 fold_overflow_warning (("assuming signed overflow does not occur when "
9301 "eliminating multiplication in comparison "
9302 "with zero"),
9303 WARN_STRICT_OVERFLOW_COMPARISON);
9304
9305 /* If const1 is negative we swap the sense of the comparison. */
9306 if (tree_int_cst_sgn (const1) < 0)
9307 cmp_code = swap_tree_comparison (cmp_code);
9308
9309 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9310 }
9311
9312 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9313 if (tem)
9314 return tem;
9315
9316 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9317 {
9318 tree targ0 = strip_float_extensions (arg0);
9319 tree targ1 = strip_float_extensions (arg1);
9320 tree newtype = TREE_TYPE (targ0);
9321
9322 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9323 newtype = TREE_TYPE (targ1);
9324
9325 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9326 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9327 return fold_build2_loc (loc, code, type,
9328 fold_convert_loc (loc, newtype, targ0),
9329 fold_convert_loc (loc, newtype, targ1));
9330
9331 /* (-a) CMP (-b) -> b CMP a */
9332 if (TREE_CODE (arg0) == NEGATE_EXPR
9333 && TREE_CODE (arg1) == NEGATE_EXPR)
9334 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9335 TREE_OPERAND (arg0, 0));
9336
9337 if (TREE_CODE (arg1) == REAL_CST)
9338 {
9339 REAL_VALUE_TYPE cst;
9340 cst = TREE_REAL_CST (arg1);
9341
9342 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9343 if (TREE_CODE (arg0) == NEGATE_EXPR)
9344 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9345 TREE_OPERAND (arg0, 0),
9346 build_real (TREE_TYPE (arg1),
9347 real_value_negate (&cst)));
9348
9349 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9350 /* a CMP (-0) -> a CMP 0 */
9351 if (REAL_VALUE_MINUS_ZERO (cst))
9352 return fold_build2_loc (loc, code, type, arg0,
9353 build_real (TREE_TYPE (arg1), dconst0));
9354
9355 /* x != NaN is always true, other ops are always false. */
9356 if (REAL_VALUE_ISNAN (cst)
9357 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9358 {
9359 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9360 return omit_one_operand_loc (loc, type, tem, arg0);
9361 }
9362
9363 /* Fold comparisons against infinity. */
9364 if (REAL_VALUE_ISINF (cst)
9365 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9366 {
9367 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9368 if (tem != NULL_TREE)
9369 return tem;
9370 }
9371 }
9372
9373 /* If this is a comparison of a real constant with a PLUS_EXPR
9374 or a MINUS_EXPR of a real constant, we can convert it into a
9375 comparison with a revised real constant as long as no overflow
9376 occurs when unsafe_math_optimizations are enabled. */
9377 if (flag_unsafe_math_optimizations
9378 && TREE_CODE (arg1) == REAL_CST
9379 && (TREE_CODE (arg0) == PLUS_EXPR
9380 || TREE_CODE (arg0) == MINUS_EXPR)
9381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9382 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9383 ? MINUS_EXPR : PLUS_EXPR,
9384 arg1, TREE_OPERAND (arg0, 1)))
9385 && !TREE_OVERFLOW (tem))
9386 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9387
9388 /* Likewise, we can simplify a comparison of a real constant with
9389 a MINUS_EXPR whose first operand is also a real constant, i.e.
9390 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9391 floating-point types only if -fassociative-math is set. */
9392 if (flag_associative_math
9393 && TREE_CODE (arg1) == REAL_CST
9394 && TREE_CODE (arg0) == MINUS_EXPR
9395 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9396 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9397 arg1))
9398 && !TREE_OVERFLOW (tem))
9399 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9400 TREE_OPERAND (arg0, 1), tem);
9401
9402 /* Fold comparisons against built-in math functions. */
9403 if (TREE_CODE (arg1) == REAL_CST
9404 && flag_unsafe_math_optimizations
9405 && ! flag_errno_math)
9406 {
9407 enum built_in_function fcode = builtin_mathfn_code (arg0);
9408
9409 if (fcode != END_BUILTINS)
9410 {
9411 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9412 if (tem != NULL_TREE)
9413 return tem;
9414 }
9415 }
9416 }
9417
9418 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9419 && CONVERT_EXPR_P (arg0))
9420 {
9421 /* If we are widening one operand of an integer comparison,
9422 see if the other operand is similarly being widened. Perhaps we
9423 can do the comparison in the narrower type. */
9424 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9425 if (tem)
9426 return tem;
9427
9428 /* Or if we are changing signedness. */
9429 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9430 if (tem)
9431 return tem;
9432 }
9433
9434 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9435 constant, we can simplify it. */
9436 if (TREE_CODE (arg1) == INTEGER_CST
9437 && (TREE_CODE (arg0) == MIN_EXPR
9438 || TREE_CODE (arg0) == MAX_EXPR)
9439 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9440 {
9441 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9442 if (tem)
9443 return tem;
9444 }
9445
9446 /* Simplify comparison of something with itself. (For IEEE
9447 floating-point, we can only do some of these simplifications.) */
9448 if (operand_equal_p (arg0, arg1, 0))
9449 {
9450 switch (code)
9451 {
9452 case EQ_EXPR:
9453 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9454 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9455 return constant_boolean_node (1, type);
9456 break;
9457
9458 case GE_EXPR:
9459 case LE_EXPR:
9460 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9461 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9462 return constant_boolean_node (1, type);
9463 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9464
9465 case NE_EXPR:
9466 /* For NE, we can only do this simplification if integer
9467 or we don't honor IEEE floating point NaNs. */
9468 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9469 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9470 break;
9471 /* ... fall through ... */
9472 case GT_EXPR:
9473 case LT_EXPR:
9474 return constant_boolean_node (0, type);
9475 default:
9476 gcc_unreachable ();
9477 }
9478 }
9479
9480 /* If we are comparing an expression that just has comparisons
9481 of two integer values, arithmetic expressions of those comparisons,
9482 and constants, we can simplify it. There are only three cases
9483 to check: the two values can either be equal, the first can be
9484 greater, or the second can be greater. Fold the expression for
9485 those three values. Since each value must be 0 or 1, we have
9486 eight possibilities, each of which corresponds to the constant 0
9487 or 1 or one of the six possible comparisons.
9488
9489 This handles common cases like (a > b) == 0 but also handles
9490 expressions like ((x > y) - (y > x)) > 0, which supposedly
9491 occur in macroized code. */
9492
9493 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9494 {
9495 tree cval1 = 0, cval2 = 0;
9496 int save_p = 0;
9497
9498 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9499 /* Don't handle degenerate cases here; they should already
9500 have been handled anyway. */
9501 && cval1 != 0 && cval2 != 0
9502 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9503 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9504 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9505 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9506 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9507 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9508 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9509 {
9510 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9511 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9512
9513 /* We can't just pass T to eval_subst in case cval1 or cval2
9514 was the same as ARG1. */
9515
9516 tree high_result
9517 = fold_build2_loc (loc, code, type,
9518 eval_subst (loc, arg0, cval1, maxval,
9519 cval2, minval),
9520 arg1);
9521 tree equal_result
9522 = fold_build2_loc (loc, code, type,
9523 eval_subst (loc, arg0, cval1, maxval,
9524 cval2, maxval),
9525 arg1);
9526 tree low_result
9527 = fold_build2_loc (loc, code, type,
9528 eval_subst (loc, arg0, cval1, minval,
9529 cval2, maxval),
9530 arg1);
9531
9532 /* All three of these results should be 0 or 1. Confirm they are.
9533 Then use those values to select the proper code to use. */
9534
9535 if (TREE_CODE (high_result) == INTEGER_CST
9536 && TREE_CODE (equal_result) == INTEGER_CST
9537 && TREE_CODE (low_result) == INTEGER_CST)
9538 {
9539 /* Make a 3-bit mask with the high-order bit being the
9540 value for `>', the next for '=', and the low for '<'. */
9541 switch ((integer_onep (high_result) * 4)
9542 + (integer_onep (equal_result) * 2)
9543 + integer_onep (low_result))
9544 {
9545 case 0:
9546 /* Always false. */
9547 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9548 case 1:
9549 code = LT_EXPR;
9550 break;
9551 case 2:
9552 code = EQ_EXPR;
9553 break;
9554 case 3:
9555 code = LE_EXPR;
9556 break;
9557 case 4:
9558 code = GT_EXPR;
9559 break;
9560 case 5:
9561 code = NE_EXPR;
9562 break;
9563 case 6:
9564 code = GE_EXPR;
9565 break;
9566 case 7:
9567 /* Always true. */
9568 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9569 }
9570
9571 if (save_p)
9572 {
9573 tem = save_expr (build2 (code, type, cval1, cval2));
9574 SET_EXPR_LOCATION (tem, loc);
9575 return tem;
9576 }
9577 return fold_build2_loc (loc, code, type, cval1, cval2);
9578 }
9579 }
9580 }
9581
9582 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9583 into a single range test. */
9584 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9585 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9586 && TREE_CODE (arg1) == INTEGER_CST
9587 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9588 && !integer_zerop (TREE_OPERAND (arg0, 1))
9589 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9590 && !TREE_OVERFLOW (arg1))
9591 {
9592 tem = fold_div_compare (loc, code, type, arg0, arg1);
9593 if (tem != NULL_TREE)
9594 return tem;
9595 }
9596
9597 /* Fold ~X op ~Y as Y op X. */
9598 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9599 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9600 {
9601 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9602 return fold_build2_loc (loc, code, type,
9603 fold_convert_loc (loc, cmp_type,
9604 TREE_OPERAND (arg1, 0)),
9605 TREE_OPERAND (arg0, 0));
9606 }
9607
9608 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9609 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9610 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9611 {
9612 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9613 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9614 TREE_OPERAND (arg0, 0),
9615 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9616 fold_convert_loc (loc, cmp_type, arg1)));
9617 }
9618
9619 return NULL_TREE;
9620 }
9621
9622
9623 /* Subroutine of fold_binary. Optimize complex multiplications of the
9624 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9625 argument EXPR represents the expression "z" of type TYPE. */
9626
9627 static tree
9628 fold_mult_zconjz (location_t loc, tree type, tree expr)
9629 {
9630 tree itype = TREE_TYPE (type);
9631 tree rpart, ipart, tem;
9632
9633 if (TREE_CODE (expr) == COMPLEX_EXPR)
9634 {
9635 rpart = TREE_OPERAND (expr, 0);
9636 ipart = TREE_OPERAND (expr, 1);
9637 }
9638 else if (TREE_CODE (expr) == COMPLEX_CST)
9639 {
9640 rpart = TREE_REALPART (expr);
9641 ipart = TREE_IMAGPART (expr);
9642 }
9643 else
9644 {
9645 expr = save_expr (expr);
9646 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9647 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9648 }
9649
9650 rpart = save_expr (rpart);
9651 ipart = save_expr (ipart);
9652 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9653 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9654 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9655 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9656 build_zero_cst (itype));
9657 }
9658
9659
9660 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9661 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9662 guarantees that P and N have the same least significant log2(M) bits.
9663 N is not otherwise constrained. In particular, N is not normalized to
9664 0 <= N < M as is common. In general, the precise value of P is unknown.
9665 M is chosen as large as possible such that constant N can be determined.
9666
9667 Returns M and sets *RESIDUE to N.
9668
9669 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9670 account. This is not always possible due to PR 35705.
9671 */
9672
9673 static unsigned HOST_WIDE_INT
9674 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9675 bool allow_func_align)
9676 {
9677 enum tree_code code;
9678
9679 *residue = 0;
9680
9681 code = TREE_CODE (expr);
9682 if (code == ADDR_EXPR)
9683 {
9684 unsigned int bitalign;
9685 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9686 *residue /= BITS_PER_UNIT;
9687 return bitalign / BITS_PER_UNIT;
9688 }
9689 else if (code == POINTER_PLUS_EXPR)
9690 {
9691 tree op0, op1;
9692 unsigned HOST_WIDE_INT modulus;
9693 enum tree_code inner_code;
9694
9695 op0 = TREE_OPERAND (expr, 0);
9696 STRIP_NOPS (op0);
9697 modulus = get_pointer_modulus_and_residue (op0, residue,
9698 allow_func_align);
9699
9700 op1 = TREE_OPERAND (expr, 1);
9701 STRIP_NOPS (op1);
9702 inner_code = TREE_CODE (op1);
9703 if (inner_code == INTEGER_CST)
9704 {
9705 *residue += TREE_INT_CST_LOW (op1);
9706 return modulus;
9707 }
9708 else if (inner_code == MULT_EXPR)
9709 {
9710 op1 = TREE_OPERAND (op1, 1);
9711 if (TREE_CODE (op1) == INTEGER_CST)
9712 {
9713 unsigned HOST_WIDE_INT align;
9714
9715 /* Compute the greatest power-of-2 divisor of op1. */
9716 align = TREE_INT_CST_LOW (op1);
9717 align &= -align;
9718
9719 /* If align is non-zero and less than *modulus, replace
9720 *modulus with align., If align is 0, then either op1 is 0
9721 or the greatest power-of-2 divisor of op1 doesn't fit in an
9722 unsigned HOST_WIDE_INT. In either case, no additional
9723 constraint is imposed. */
9724 if (align)
9725 modulus = MIN (modulus, align);
9726
9727 return modulus;
9728 }
9729 }
9730 }
9731
9732 /* If we get here, we were unable to determine anything useful about the
9733 expression. */
9734 return 1;
9735 }
9736
9737 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9738 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9739
9740 static bool
9741 vec_cst_ctor_to_array (tree arg, tree *elts)
9742 {
9743 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9744
9745 if (TREE_CODE (arg) == VECTOR_CST)
9746 {
9747 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9748 elts[i] = VECTOR_CST_ELT (arg, i);
9749 }
9750 else if (TREE_CODE (arg) == CONSTRUCTOR)
9751 {
9752 constructor_elt *elt;
9753
9754 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9755 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9756 return false;
9757 else
9758 elts[i] = elt->value;
9759 }
9760 else
9761 return false;
9762 for (; i < nelts; i++)
9763 elts[i]
9764 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9765 return true;
9766 }
9767
9768 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9769 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9770 NULL_TREE otherwise. */
9771
9772 static tree
9773 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9774 {
9775 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9776 tree *elts;
9777 bool need_ctor = false;
9778
9779 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9780 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9781 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9782 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9783 return NULL_TREE;
9784
9785 elts = XALLOCAVEC (tree, nelts * 3);
9786 if (!vec_cst_ctor_to_array (arg0, elts)
9787 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9788 return NULL_TREE;
9789
9790 for (i = 0; i < nelts; i++)
9791 {
9792 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9793 need_ctor = true;
9794 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9795 }
9796
9797 if (need_ctor)
9798 {
9799 vec<constructor_elt, va_gc> *v;
9800 vec_alloc (v, nelts);
9801 for (i = 0; i < nelts; i++)
9802 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9803 return build_constructor (type, v);
9804 }
9805 else
9806 return build_vector (type, &elts[2 * nelts]);
9807 }
9808
9809 /* Try to fold a pointer difference of type TYPE two address expressions of
9810 array references AREF0 and AREF1 using location LOC. Return a
9811 simplified expression for the difference or NULL_TREE. */
9812
9813 static tree
9814 fold_addr_of_array_ref_difference (location_t loc, tree type,
9815 tree aref0, tree aref1)
9816 {
9817 tree base0 = TREE_OPERAND (aref0, 0);
9818 tree base1 = TREE_OPERAND (aref1, 0);
9819 tree base_offset = build_int_cst (type, 0);
9820
9821 /* If the bases are array references as well, recurse. If the bases
9822 are pointer indirections compute the difference of the pointers.
9823 If the bases are equal, we are set. */
9824 if ((TREE_CODE (base0) == ARRAY_REF
9825 && TREE_CODE (base1) == ARRAY_REF
9826 && (base_offset
9827 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9828 || (INDIRECT_REF_P (base0)
9829 && INDIRECT_REF_P (base1)
9830 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9831 TREE_OPERAND (base0, 0),
9832 TREE_OPERAND (base1, 0))))
9833 || operand_equal_p (base0, base1, 0))
9834 {
9835 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9836 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9837 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9838 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9839 return fold_build2_loc (loc, PLUS_EXPR, type,
9840 base_offset,
9841 fold_build2_loc (loc, MULT_EXPR, type,
9842 diff, esz));
9843 }
9844 return NULL_TREE;
9845 }
9846
9847 /* If the real or vector real constant CST of type TYPE has an exact
9848 inverse, return it, else return NULL. */
9849
9850 static tree
9851 exact_inverse (tree type, tree cst)
9852 {
9853 REAL_VALUE_TYPE r;
9854 tree unit_type, *elts;
9855 enum machine_mode mode;
9856 unsigned vec_nelts, i;
9857
9858 switch (TREE_CODE (cst))
9859 {
9860 case REAL_CST:
9861 r = TREE_REAL_CST (cst);
9862
9863 if (exact_real_inverse (TYPE_MODE (type), &r))
9864 return build_real (type, r);
9865
9866 return NULL_TREE;
9867
9868 case VECTOR_CST:
9869 vec_nelts = VECTOR_CST_NELTS (cst);
9870 elts = XALLOCAVEC (tree, vec_nelts);
9871 unit_type = TREE_TYPE (type);
9872 mode = TYPE_MODE (unit_type);
9873
9874 for (i = 0; i < vec_nelts; i++)
9875 {
9876 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9877 if (!exact_real_inverse (mode, &r))
9878 return NULL_TREE;
9879 elts[i] = build_real (unit_type, r);
9880 }
9881
9882 return build_vector (type, elts);
9883
9884 default:
9885 return NULL_TREE;
9886 }
9887 }
9888
9889 /* Mask out the tz least significant bits of X of type TYPE where
9890 tz is the number of trailing zeroes in Y. */
9891 static wide_int
9892 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9893 {
9894 int tz = wi::ctz (y);
9895 if (tz > 0)
9896 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9897 return x;
9898 }
9899
9900 /* Return true when T is an address and is known to be nonzero.
9901 For floating point we further ensure that T is not denormal.
9902 Similar logic is present in nonzero_address in rtlanal.h.
9903
9904 If the return value is based on the assumption that signed overflow
9905 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9906 change *STRICT_OVERFLOW_P. */
9907
9908 static bool
9909 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9910 {
9911 tree type = TREE_TYPE (t);
9912 enum tree_code code;
9913
9914 /* Doing something useful for floating point would need more work. */
9915 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9916 return false;
9917
9918 code = TREE_CODE (t);
9919 switch (TREE_CODE_CLASS (code))
9920 {
9921 case tcc_unary:
9922 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9923 strict_overflow_p);
9924 case tcc_binary:
9925 case tcc_comparison:
9926 return tree_binary_nonzero_warnv_p (code, type,
9927 TREE_OPERAND (t, 0),
9928 TREE_OPERAND (t, 1),
9929 strict_overflow_p);
9930 case tcc_constant:
9931 case tcc_declaration:
9932 case tcc_reference:
9933 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9934
9935 default:
9936 break;
9937 }
9938
9939 switch (code)
9940 {
9941 case TRUTH_NOT_EXPR:
9942 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9943 strict_overflow_p);
9944
9945 case TRUTH_AND_EXPR:
9946 case TRUTH_OR_EXPR:
9947 case TRUTH_XOR_EXPR:
9948 return tree_binary_nonzero_warnv_p (code, type,
9949 TREE_OPERAND (t, 0),
9950 TREE_OPERAND (t, 1),
9951 strict_overflow_p);
9952
9953 case COND_EXPR:
9954 case CONSTRUCTOR:
9955 case OBJ_TYPE_REF:
9956 case ASSERT_EXPR:
9957 case ADDR_EXPR:
9958 case WITH_SIZE_EXPR:
9959 case SSA_NAME:
9960 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9961
9962 case COMPOUND_EXPR:
9963 case MODIFY_EXPR:
9964 case BIND_EXPR:
9965 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9966 strict_overflow_p);
9967
9968 case SAVE_EXPR:
9969 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9970 strict_overflow_p);
9971
9972 case CALL_EXPR:
9973 {
9974 tree fndecl = get_callee_fndecl (t);
9975 if (!fndecl) return false;
9976 if (flag_delete_null_pointer_checks && !flag_check_new
9977 && DECL_IS_OPERATOR_NEW (fndecl)
9978 && !TREE_NOTHROW (fndecl))
9979 return true;
9980 if (flag_delete_null_pointer_checks
9981 && lookup_attribute ("returns_nonnull",
9982 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9983 return true;
9984 return alloca_call_p (t);
9985 }
9986
9987 default:
9988 break;
9989 }
9990 return false;
9991 }
9992
9993 /* Return true when T is an address and is known to be nonzero.
9994 Handle warnings about undefined signed overflow. */
9995
9996 static bool
9997 tree_expr_nonzero_p (tree t)
9998 {
9999 bool ret, strict_overflow_p;
10000
10001 strict_overflow_p = false;
10002 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10003 if (strict_overflow_p)
10004 fold_overflow_warning (("assuming signed overflow does not occur when "
10005 "determining that expression is always "
10006 "non-zero"),
10007 WARN_STRICT_OVERFLOW_MISC);
10008 return ret;
10009 }
10010
10011 /* Fold a binary expression of code CODE and type TYPE with operands
10012 OP0 and OP1. LOC is the location of the resulting expression.
10013 Return the folded expression if folding is successful. Otherwise,
10014 return NULL_TREE. */
10015
10016 tree
10017 fold_binary_loc (location_t loc,
10018 enum tree_code code, tree type, tree op0, tree op1)
10019 {
10020 enum tree_code_class kind = TREE_CODE_CLASS (code);
10021 tree arg0, arg1, tem;
10022 tree t1 = NULL_TREE;
10023 bool strict_overflow_p;
10024 unsigned int prec;
10025
10026 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10027 && TREE_CODE_LENGTH (code) == 2
10028 && op0 != NULL_TREE
10029 && op1 != NULL_TREE);
10030
10031 arg0 = op0;
10032 arg1 = op1;
10033
10034 /* Strip any conversions that don't change the mode. This is
10035 safe for every expression, except for a comparison expression
10036 because its signedness is derived from its operands. So, in
10037 the latter case, only strip conversions that don't change the
10038 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10039 preserved.
10040
10041 Note that this is done as an internal manipulation within the
10042 constant folder, in order to find the simplest representation
10043 of the arguments so that their form can be studied. In any
10044 cases, the appropriate type conversions should be put back in
10045 the tree that will get out of the constant folder. */
10046
10047 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10048 {
10049 STRIP_SIGN_NOPS (arg0);
10050 STRIP_SIGN_NOPS (arg1);
10051 }
10052 else
10053 {
10054 STRIP_NOPS (arg0);
10055 STRIP_NOPS (arg1);
10056 }
10057
10058 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10059 constant but we can't do arithmetic on them. */
10060 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10061 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10062 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10063 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10064 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10065 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10066 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10067 {
10068 if (kind == tcc_binary)
10069 {
10070 /* Make sure type and arg0 have the same saturating flag. */
10071 gcc_assert (TYPE_SATURATING (type)
10072 == TYPE_SATURATING (TREE_TYPE (arg0)));
10073 tem = const_binop (code, arg0, arg1);
10074 }
10075 else if (kind == tcc_comparison)
10076 tem = fold_relational_const (code, type, arg0, arg1);
10077 else
10078 tem = NULL_TREE;
10079
10080 if (tem != NULL_TREE)
10081 {
10082 if (TREE_TYPE (tem) != type)
10083 tem = fold_convert_loc (loc, type, tem);
10084 return tem;
10085 }
10086 }
10087
10088 /* If this is a commutative operation, and ARG0 is a constant, move it
10089 to ARG1 to reduce the number of tests below. */
10090 if (commutative_tree_code (code)
10091 && tree_swap_operands_p (arg0, arg1, true))
10092 return fold_build2_loc (loc, code, type, op1, op0);
10093
10094 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10095
10096 First check for cases where an arithmetic operation is applied to a
10097 compound, conditional, or comparison operation. Push the arithmetic
10098 operation inside the compound or conditional to see if any folding
10099 can then be done. Convert comparison to conditional for this purpose.
10100 The also optimizes non-constant cases that used to be done in
10101 expand_expr.
10102
10103 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10104 one of the operands is a comparison and the other is a comparison, a
10105 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10106 code below would make the expression more complex. Change it to a
10107 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10108 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10109
10110 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10111 || code == EQ_EXPR || code == NE_EXPR)
10112 && TREE_CODE (type) != VECTOR_TYPE
10113 && ((truth_value_p (TREE_CODE (arg0))
10114 && (truth_value_p (TREE_CODE (arg1))
10115 || (TREE_CODE (arg1) == BIT_AND_EXPR
10116 && integer_onep (TREE_OPERAND (arg1, 1)))))
10117 || (truth_value_p (TREE_CODE (arg1))
10118 && (truth_value_p (TREE_CODE (arg0))
10119 || (TREE_CODE (arg0) == BIT_AND_EXPR
10120 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10121 {
10122 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10123 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10124 : TRUTH_XOR_EXPR,
10125 boolean_type_node,
10126 fold_convert_loc (loc, boolean_type_node, arg0),
10127 fold_convert_loc (loc, boolean_type_node, arg1));
10128
10129 if (code == EQ_EXPR)
10130 tem = invert_truthvalue_loc (loc, tem);
10131
10132 return fold_convert_loc (loc, type, tem);
10133 }
10134
10135 if (TREE_CODE_CLASS (code) == tcc_binary
10136 || TREE_CODE_CLASS (code) == tcc_comparison)
10137 {
10138 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10139 {
10140 tem = fold_build2_loc (loc, code, type,
10141 fold_convert_loc (loc, TREE_TYPE (op0),
10142 TREE_OPERAND (arg0, 1)), op1);
10143 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10144 tem);
10145 }
10146 if (TREE_CODE (arg1) == COMPOUND_EXPR
10147 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10148 {
10149 tem = fold_build2_loc (loc, code, type, op0,
10150 fold_convert_loc (loc, TREE_TYPE (op1),
10151 TREE_OPERAND (arg1, 1)));
10152 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10153 tem);
10154 }
10155
10156 if (TREE_CODE (arg0) == COND_EXPR
10157 || TREE_CODE (arg0) == VEC_COND_EXPR
10158 || COMPARISON_CLASS_P (arg0))
10159 {
10160 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10161 arg0, arg1,
10162 /*cond_first_p=*/1);
10163 if (tem != NULL_TREE)
10164 return tem;
10165 }
10166
10167 if (TREE_CODE (arg1) == COND_EXPR
10168 || TREE_CODE (arg1) == VEC_COND_EXPR
10169 || COMPARISON_CLASS_P (arg1))
10170 {
10171 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10172 arg1, arg0,
10173 /*cond_first_p=*/0);
10174 if (tem != NULL_TREE)
10175 return tem;
10176 }
10177 }
10178
10179 switch (code)
10180 {
10181 case MEM_REF:
10182 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10183 if (TREE_CODE (arg0) == ADDR_EXPR
10184 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10185 {
10186 tree iref = TREE_OPERAND (arg0, 0);
10187 return fold_build2 (MEM_REF, type,
10188 TREE_OPERAND (iref, 0),
10189 int_const_binop (PLUS_EXPR, arg1,
10190 TREE_OPERAND (iref, 1)));
10191 }
10192
10193 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10194 if (TREE_CODE (arg0) == ADDR_EXPR
10195 && handled_component_p (TREE_OPERAND (arg0, 0)))
10196 {
10197 tree base;
10198 HOST_WIDE_INT coffset;
10199 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10200 &coffset);
10201 if (!base)
10202 return NULL_TREE;
10203 return fold_build2 (MEM_REF, type,
10204 build_fold_addr_expr (base),
10205 int_const_binop (PLUS_EXPR, arg1,
10206 size_int (coffset)));
10207 }
10208
10209 return NULL_TREE;
10210
10211 case POINTER_PLUS_EXPR:
10212 /* 0 +p index -> (type)index */
10213 if (integer_zerop (arg0))
10214 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10215
10216 /* PTR +p 0 -> PTR */
10217 if (integer_zerop (arg1))
10218 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10219
10220 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10221 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10222 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10223 return fold_convert_loc (loc, type,
10224 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10225 fold_convert_loc (loc, sizetype,
10226 arg1),
10227 fold_convert_loc (loc, sizetype,
10228 arg0)));
10229
10230 /* (PTR +p B) +p A -> PTR +p (B + A) */
10231 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10232 {
10233 tree inner;
10234 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10235 tree arg00 = TREE_OPERAND (arg0, 0);
10236 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10237 arg01, fold_convert_loc (loc, sizetype, arg1));
10238 return fold_convert_loc (loc, type,
10239 fold_build_pointer_plus_loc (loc,
10240 arg00, inner));
10241 }
10242
10243 /* PTR_CST +p CST -> CST1 */
10244 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10245 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10246 fold_convert_loc (loc, type, arg1));
10247
10248 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10249 of the array. Loop optimizer sometimes produce this type of
10250 expressions. */
10251 if (TREE_CODE (arg0) == ADDR_EXPR)
10252 {
10253 tem = try_move_mult_to_index (loc, arg0,
10254 fold_convert_loc (loc,
10255 ssizetype, arg1));
10256 if (tem)
10257 return fold_convert_loc (loc, type, tem);
10258 }
10259
10260 return NULL_TREE;
10261
10262 case PLUS_EXPR:
10263 /* A + (-B) -> A - B */
10264 if (TREE_CODE (arg1) == NEGATE_EXPR)
10265 return fold_build2_loc (loc, MINUS_EXPR, type,
10266 fold_convert_loc (loc, type, arg0),
10267 fold_convert_loc (loc, type,
10268 TREE_OPERAND (arg1, 0)));
10269 /* (-A) + B -> B - A */
10270 if (TREE_CODE (arg0) == NEGATE_EXPR
10271 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10272 return fold_build2_loc (loc, MINUS_EXPR, type,
10273 fold_convert_loc (loc, type, arg1),
10274 fold_convert_loc (loc, type,
10275 TREE_OPERAND (arg0, 0)));
10276
10277 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10278 {
10279 /* Convert ~A + 1 to -A. */
10280 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10281 && integer_onep (arg1))
10282 return fold_build1_loc (loc, NEGATE_EXPR, type,
10283 fold_convert_loc (loc, type,
10284 TREE_OPERAND (arg0, 0)));
10285
10286 /* ~X + X is -1. */
10287 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10288 && !TYPE_OVERFLOW_TRAPS (type))
10289 {
10290 tree tem = TREE_OPERAND (arg0, 0);
10291
10292 STRIP_NOPS (tem);
10293 if (operand_equal_p (tem, arg1, 0))
10294 {
10295 t1 = build_all_ones_cst (type);
10296 return omit_one_operand_loc (loc, type, t1, arg1);
10297 }
10298 }
10299
10300 /* X + ~X is -1. */
10301 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10302 && !TYPE_OVERFLOW_TRAPS (type))
10303 {
10304 tree tem = TREE_OPERAND (arg1, 0);
10305
10306 STRIP_NOPS (tem);
10307 if (operand_equal_p (arg0, tem, 0))
10308 {
10309 t1 = build_all_ones_cst (type);
10310 return omit_one_operand_loc (loc, type, t1, arg0);
10311 }
10312 }
10313
10314 /* X + (X / CST) * -CST is X % CST. */
10315 if (TREE_CODE (arg1) == MULT_EXPR
10316 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10317 && operand_equal_p (arg0,
10318 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10319 {
10320 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10321 tree cst1 = TREE_OPERAND (arg1, 1);
10322 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10323 cst1, cst0);
10324 if (sum && integer_zerop (sum))
10325 return fold_convert_loc (loc, type,
10326 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10327 TREE_TYPE (arg0), arg0,
10328 cst0));
10329 }
10330 }
10331
10332 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10333 one. Make sure the type is not saturating and has the signedness of
10334 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10335 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10336 if ((TREE_CODE (arg0) == MULT_EXPR
10337 || TREE_CODE (arg1) == MULT_EXPR)
10338 && !TYPE_SATURATING (type)
10339 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10340 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10341 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10342 {
10343 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10344 if (tem)
10345 return tem;
10346 }
10347
10348 if (! FLOAT_TYPE_P (type))
10349 {
10350 if (integer_zerop (arg1))
10351 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10352
10353 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10354 with a constant, and the two constants have no bits in common,
10355 we should treat this as a BIT_IOR_EXPR since this may produce more
10356 simplifications. */
10357 if (TREE_CODE (arg0) == BIT_AND_EXPR
10358 && TREE_CODE (arg1) == BIT_AND_EXPR
10359 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10360 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10361 && integer_zerop (const_binop (BIT_AND_EXPR,
10362 TREE_OPERAND (arg0, 1),
10363 TREE_OPERAND (arg1, 1))))
10364 {
10365 code = BIT_IOR_EXPR;
10366 goto bit_ior;
10367 }
10368
10369 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10370 (plus (plus (mult) (mult)) (foo)) so that we can
10371 take advantage of the factoring cases below. */
10372 if (TYPE_OVERFLOW_WRAPS (type)
10373 && (((TREE_CODE (arg0) == PLUS_EXPR
10374 || TREE_CODE (arg0) == MINUS_EXPR)
10375 && TREE_CODE (arg1) == MULT_EXPR)
10376 || ((TREE_CODE (arg1) == PLUS_EXPR
10377 || TREE_CODE (arg1) == MINUS_EXPR)
10378 && TREE_CODE (arg0) == MULT_EXPR)))
10379 {
10380 tree parg0, parg1, parg, marg;
10381 enum tree_code pcode;
10382
10383 if (TREE_CODE (arg1) == MULT_EXPR)
10384 parg = arg0, marg = arg1;
10385 else
10386 parg = arg1, marg = arg0;
10387 pcode = TREE_CODE (parg);
10388 parg0 = TREE_OPERAND (parg, 0);
10389 parg1 = TREE_OPERAND (parg, 1);
10390 STRIP_NOPS (parg0);
10391 STRIP_NOPS (parg1);
10392
10393 if (TREE_CODE (parg0) == MULT_EXPR
10394 && TREE_CODE (parg1) != MULT_EXPR)
10395 return fold_build2_loc (loc, pcode, type,
10396 fold_build2_loc (loc, PLUS_EXPR, type,
10397 fold_convert_loc (loc, type,
10398 parg0),
10399 fold_convert_loc (loc, type,
10400 marg)),
10401 fold_convert_loc (loc, type, parg1));
10402 if (TREE_CODE (parg0) != MULT_EXPR
10403 && TREE_CODE (parg1) == MULT_EXPR)
10404 return
10405 fold_build2_loc (loc, PLUS_EXPR, type,
10406 fold_convert_loc (loc, type, parg0),
10407 fold_build2_loc (loc, pcode, type,
10408 fold_convert_loc (loc, type, marg),
10409 fold_convert_loc (loc, type,
10410 parg1)));
10411 }
10412 }
10413 else
10414 {
10415 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10416 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10417 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10418
10419 /* Likewise if the operands are reversed. */
10420 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10421 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10422
10423 /* Convert X + -C into X - C. */
10424 if (TREE_CODE (arg1) == REAL_CST
10425 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10426 {
10427 tem = fold_negate_const (arg1, type);
10428 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10429 return fold_build2_loc (loc, MINUS_EXPR, type,
10430 fold_convert_loc (loc, type, arg0),
10431 fold_convert_loc (loc, type, tem));
10432 }
10433
10434 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10435 to __complex__ ( x, y ). This is not the same for SNaNs or
10436 if signed zeros are involved. */
10437 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10438 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10439 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10440 {
10441 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10442 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10443 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10444 bool arg0rz = false, arg0iz = false;
10445 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10446 || (arg0i && (arg0iz = real_zerop (arg0i))))
10447 {
10448 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10449 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10450 if (arg0rz && arg1i && real_zerop (arg1i))
10451 {
10452 tree rp = arg1r ? arg1r
10453 : build1 (REALPART_EXPR, rtype, arg1);
10454 tree ip = arg0i ? arg0i
10455 : build1 (IMAGPART_EXPR, rtype, arg0);
10456 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10457 }
10458 else if (arg0iz && arg1r && real_zerop (arg1r))
10459 {
10460 tree rp = arg0r ? arg0r
10461 : build1 (REALPART_EXPR, rtype, arg0);
10462 tree ip = arg1i ? arg1i
10463 : build1 (IMAGPART_EXPR, rtype, arg1);
10464 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10465 }
10466 }
10467 }
10468
10469 if (flag_unsafe_math_optimizations
10470 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10471 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10472 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10473 return tem;
10474
10475 /* Convert x+x into x*2.0. */
10476 if (operand_equal_p (arg0, arg1, 0)
10477 && SCALAR_FLOAT_TYPE_P (type))
10478 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10479 build_real (type, dconst2));
10480
10481 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10482 We associate floats only if the user has specified
10483 -fassociative-math. */
10484 if (flag_associative_math
10485 && TREE_CODE (arg1) == PLUS_EXPR
10486 && TREE_CODE (arg0) != MULT_EXPR)
10487 {
10488 tree tree10 = TREE_OPERAND (arg1, 0);
10489 tree tree11 = TREE_OPERAND (arg1, 1);
10490 if (TREE_CODE (tree11) == MULT_EXPR
10491 && TREE_CODE (tree10) == MULT_EXPR)
10492 {
10493 tree tree0;
10494 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10495 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10496 }
10497 }
10498 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10499 We associate floats only if the user has specified
10500 -fassociative-math. */
10501 if (flag_associative_math
10502 && TREE_CODE (arg0) == PLUS_EXPR
10503 && TREE_CODE (arg1) != MULT_EXPR)
10504 {
10505 tree tree00 = TREE_OPERAND (arg0, 0);
10506 tree tree01 = TREE_OPERAND (arg0, 1);
10507 if (TREE_CODE (tree01) == MULT_EXPR
10508 && TREE_CODE (tree00) == MULT_EXPR)
10509 {
10510 tree tree0;
10511 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10512 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10513 }
10514 }
10515 }
10516
10517 bit_rotate:
10518 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10519 is a rotate of A by C1 bits. */
10520 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10521 is a rotate of A by B bits. */
10522 {
10523 enum tree_code code0, code1;
10524 tree rtype;
10525 code0 = TREE_CODE (arg0);
10526 code1 = TREE_CODE (arg1);
10527 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10528 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10529 && operand_equal_p (TREE_OPERAND (arg0, 0),
10530 TREE_OPERAND (arg1, 0), 0)
10531 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10532 TYPE_UNSIGNED (rtype))
10533 /* Only create rotates in complete modes. Other cases are not
10534 expanded properly. */
10535 && (element_precision (rtype)
10536 == element_precision (TYPE_MODE (rtype))))
10537 {
10538 tree tree01, tree11;
10539 enum tree_code code01, code11;
10540
10541 tree01 = TREE_OPERAND (arg0, 1);
10542 tree11 = TREE_OPERAND (arg1, 1);
10543 STRIP_NOPS (tree01);
10544 STRIP_NOPS (tree11);
10545 code01 = TREE_CODE (tree01);
10546 code11 = TREE_CODE (tree11);
10547 if (code01 == INTEGER_CST
10548 && code11 == INTEGER_CST
10549 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10550 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10551 {
10552 tem = build2_loc (loc, LROTATE_EXPR,
10553 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10554 TREE_OPERAND (arg0, 0),
10555 code0 == LSHIFT_EXPR ? tree01 : tree11);
10556 return fold_convert_loc (loc, type, tem);
10557 }
10558 else if (code11 == MINUS_EXPR)
10559 {
10560 tree tree110, tree111;
10561 tree110 = TREE_OPERAND (tree11, 0);
10562 tree111 = TREE_OPERAND (tree11, 1);
10563 STRIP_NOPS (tree110);
10564 STRIP_NOPS (tree111);
10565 if (TREE_CODE (tree110) == INTEGER_CST
10566 && 0 == compare_tree_int (tree110,
10567 element_precision
10568 (TREE_TYPE (TREE_OPERAND
10569 (arg0, 0))))
10570 && operand_equal_p (tree01, tree111, 0))
10571 return
10572 fold_convert_loc (loc, type,
10573 build2 ((code0 == LSHIFT_EXPR
10574 ? LROTATE_EXPR
10575 : RROTATE_EXPR),
10576 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10577 TREE_OPERAND (arg0, 0), tree01));
10578 }
10579 else if (code01 == MINUS_EXPR)
10580 {
10581 tree tree010, tree011;
10582 tree010 = TREE_OPERAND (tree01, 0);
10583 tree011 = TREE_OPERAND (tree01, 1);
10584 STRIP_NOPS (tree010);
10585 STRIP_NOPS (tree011);
10586 if (TREE_CODE (tree010) == INTEGER_CST
10587 && 0 == compare_tree_int (tree010,
10588 element_precision
10589 (TREE_TYPE (TREE_OPERAND
10590 (arg0, 0))))
10591 && operand_equal_p (tree11, tree011, 0))
10592 return fold_convert_loc
10593 (loc, type,
10594 build2 ((code0 != LSHIFT_EXPR
10595 ? LROTATE_EXPR
10596 : RROTATE_EXPR),
10597 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10598 TREE_OPERAND (arg0, 0), tree11));
10599 }
10600 }
10601 }
10602
10603 associate:
10604 /* In most languages, can't associate operations on floats through
10605 parentheses. Rather than remember where the parentheses were, we
10606 don't associate floats at all, unless the user has specified
10607 -fassociative-math.
10608 And, we need to make sure type is not saturating. */
10609
10610 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10611 && !TYPE_SATURATING (type))
10612 {
10613 tree var0, con0, lit0, minus_lit0;
10614 tree var1, con1, lit1, minus_lit1;
10615 tree atype = type;
10616 bool ok = true;
10617
10618 /* Split both trees into variables, constants, and literals. Then
10619 associate each group together, the constants with literals,
10620 then the result with variables. This increases the chances of
10621 literals being recombined later and of generating relocatable
10622 expressions for the sum of a constant and literal. */
10623 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10624 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10625 code == MINUS_EXPR);
10626
10627 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10628 if (code == MINUS_EXPR)
10629 code = PLUS_EXPR;
10630
10631 /* With undefined overflow prefer doing association in a type
10632 which wraps on overflow, if that is one of the operand types. */
10633 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10634 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10635 {
10636 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10637 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10638 atype = TREE_TYPE (arg0);
10639 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10640 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10641 atype = TREE_TYPE (arg1);
10642 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10643 }
10644
10645 /* With undefined overflow we can only associate constants with one
10646 variable, and constants whose association doesn't overflow. */
10647 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10648 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10649 {
10650 if (var0 && var1)
10651 {
10652 tree tmp0 = var0;
10653 tree tmp1 = var1;
10654
10655 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10656 tmp0 = TREE_OPERAND (tmp0, 0);
10657 if (CONVERT_EXPR_P (tmp0)
10658 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10659 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10660 <= TYPE_PRECISION (atype)))
10661 tmp0 = TREE_OPERAND (tmp0, 0);
10662 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10663 tmp1 = TREE_OPERAND (tmp1, 0);
10664 if (CONVERT_EXPR_P (tmp1)
10665 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10666 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10667 <= TYPE_PRECISION (atype)))
10668 tmp1 = TREE_OPERAND (tmp1, 0);
10669 /* The only case we can still associate with two variables
10670 is if they are the same, modulo negation and bit-pattern
10671 preserving conversions. */
10672 if (!operand_equal_p (tmp0, tmp1, 0))
10673 ok = false;
10674 }
10675 }
10676
10677 /* Only do something if we found more than two objects. Otherwise,
10678 nothing has changed and we risk infinite recursion. */
10679 if (ok
10680 && (2 < ((var0 != 0) + (var1 != 0)
10681 + (con0 != 0) + (con1 != 0)
10682 + (lit0 != 0) + (lit1 != 0)
10683 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10684 {
10685 bool any_overflows = false;
10686 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10687 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10688 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10689 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10690 var0 = associate_trees (loc, var0, var1, code, atype);
10691 con0 = associate_trees (loc, con0, con1, code, atype);
10692 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10693 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10694 code, atype);
10695
10696 /* Preserve the MINUS_EXPR if the negative part of the literal is
10697 greater than the positive part. Otherwise, the multiplicative
10698 folding code (i.e extract_muldiv) may be fooled in case
10699 unsigned constants are subtracted, like in the following
10700 example: ((X*2 + 4) - 8U)/2. */
10701 if (minus_lit0 && lit0)
10702 {
10703 if (TREE_CODE (lit0) == INTEGER_CST
10704 && TREE_CODE (minus_lit0) == INTEGER_CST
10705 && tree_int_cst_lt (lit0, minus_lit0))
10706 {
10707 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10708 MINUS_EXPR, atype);
10709 lit0 = 0;
10710 }
10711 else
10712 {
10713 lit0 = associate_trees (loc, lit0, minus_lit0,
10714 MINUS_EXPR, atype);
10715 minus_lit0 = 0;
10716 }
10717 }
10718
10719 /* Don't introduce overflows through reassociation. */
10720 if (!any_overflows
10721 && ((lit0 && TREE_OVERFLOW (lit0))
10722 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10723 return NULL_TREE;
10724
10725 if (minus_lit0)
10726 {
10727 if (con0 == 0)
10728 return
10729 fold_convert_loc (loc, type,
10730 associate_trees (loc, var0, minus_lit0,
10731 MINUS_EXPR, atype));
10732 else
10733 {
10734 con0 = associate_trees (loc, con0, minus_lit0,
10735 MINUS_EXPR, atype);
10736 return
10737 fold_convert_loc (loc, type,
10738 associate_trees (loc, var0, con0,
10739 PLUS_EXPR, atype));
10740 }
10741 }
10742
10743 con0 = associate_trees (loc, con0, lit0, code, atype);
10744 return
10745 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10746 code, atype));
10747 }
10748 }
10749
10750 return NULL_TREE;
10751
10752 case MINUS_EXPR:
10753 /* Pointer simplifications for subtraction, simple reassociations. */
10754 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10755 {
10756 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10757 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10758 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10759 {
10760 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10761 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10762 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10763 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10764 return fold_build2_loc (loc, PLUS_EXPR, type,
10765 fold_build2_loc (loc, MINUS_EXPR, type,
10766 arg00, arg10),
10767 fold_build2_loc (loc, MINUS_EXPR, type,
10768 arg01, arg11));
10769 }
10770 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10771 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10772 {
10773 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10774 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10775 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10776 fold_convert_loc (loc, type, arg1));
10777 if (tmp)
10778 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10779 }
10780 }
10781 /* A - (-B) -> A + B */
10782 if (TREE_CODE (arg1) == NEGATE_EXPR)
10783 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10784 fold_convert_loc (loc, type,
10785 TREE_OPERAND (arg1, 0)));
10786 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10787 if (TREE_CODE (arg0) == NEGATE_EXPR
10788 && negate_expr_p (arg1)
10789 && reorder_operands_p (arg0, arg1))
10790 return fold_build2_loc (loc, MINUS_EXPR, type,
10791 fold_convert_loc (loc, type,
10792 negate_expr (arg1)),
10793 fold_convert_loc (loc, type,
10794 TREE_OPERAND (arg0, 0)));
10795 /* Convert -A - 1 to ~A. */
10796 if (TREE_CODE (type) != COMPLEX_TYPE
10797 && TREE_CODE (arg0) == NEGATE_EXPR
10798 && integer_onep (arg1)
10799 && !TYPE_OVERFLOW_TRAPS (type))
10800 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10801 fold_convert_loc (loc, type,
10802 TREE_OPERAND (arg0, 0)));
10803
10804 /* Convert -1 - A to ~A. */
10805 if (TREE_CODE (type) != COMPLEX_TYPE
10806 && integer_all_onesp (arg0))
10807 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10808
10809
10810 /* X - (X / Y) * Y is X % Y. */
10811 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10812 && TREE_CODE (arg1) == MULT_EXPR
10813 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10814 && operand_equal_p (arg0,
10815 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10816 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10817 TREE_OPERAND (arg1, 1), 0))
10818 return
10819 fold_convert_loc (loc, type,
10820 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10821 arg0, TREE_OPERAND (arg1, 1)));
10822
10823 if (! FLOAT_TYPE_P (type))
10824 {
10825 if (integer_zerop (arg0))
10826 return negate_expr (fold_convert_loc (loc, type, arg1));
10827 if (integer_zerop (arg1))
10828 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10829
10830 /* Fold A - (A & B) into ~B & A. */
10831 if (!TREE_SIDE_EFFECTS (arg0)
10832 && TREE_CODE (arg1) == BIT_AND_EXPR)
10833 {
10834 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10835 {
10836 tree arg10 = fold_convert_loc (loc, type,
10837 TREE_OPERAND (arg1, 0));
10838 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10839 fold_build1_loc (loc, BIT_NOT_EXPR,
10840 type, arg10),
10841 fold_convert_loc (loc, type, arg0));
10842 }
10843 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10844 {
10845 tree arg11 = fold_convert_loc (loc,
10846 type, TREE_OPERAND (arg1, 1));
10847 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10848 fold_build1_loc (loc, BIT_NOT_EXPR,
10849 type, arg11),
10850 fold_convert_loc (loc, type, arg0));
10851 }
10852 }
10853
10854 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10855 any power of 2 minus 1. */
10856 if (TREE_CODE (arg0) == BIT_AND_EXPR
10857 && TREE_CODE (arg1) == BIT_AND_EXPR
10858 && operand_equal_p (TREE_OPERAND (arg0, 0),
10859 TREE_OPERAND (arg1, 0), 0))
10860 {
10861 tree mask0 = TREE_OPERAND (arg0, 1);
10862 tree mask1 = TREE_OPERAND (arg1, 1);
10863 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10864
10865 if (operand_equal_p (tem, mask1, 0))
10866 {
10867 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10868 TREE_OPERAND (arg0, 0), mask1);
10869 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10870 }
10871 }
10872 }
10873
10874 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10875 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10876 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10877
10878 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10879 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10880 (-ARG1 + ARG0) reduces to -ARG1. */
10881 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10882 return negate_expr (fold_convert_loc (loc, type, arg1));
10883
10884 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10885 __complex__ ( x, -y ). This is not the same for SNaNs or if
10886 signed zeros are involved. */
10887 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10888 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10889 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10890 {
10891 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10892 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10893 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10894 bool arg0rz = false, arg0iz = false;
10895 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10896 || (arg0i && (arg0iz = real_zerop (arg0i))))
10897 {
10898 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10899 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10900 if (arg0rz && arg1i && real_zerop (arg1i))
10901 {
10902 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10903 arg1r ? arg1r
10904 : build1 (REALPART_EXPR, rtype, arg1));
10905 tree ip = arg0i ? arg0i
10906 : build1 (IMAGPART_EXPR, rtype, arg0);
10907 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10908 }
10909 else if (arg0iz && arg1r && real_zerop (arg1r))
10910 {
10911 tree rp = arg0r ? arg0r
10912 : build1 (REALPART_EXPR, rtype, arg0);
10913 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10914 arg1i ? arg1i
10915 : build1 (IMAGPART_EXPR, rtype, arg1));
10916 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10917 }
10918 }
10919 }
10920
10921 /* Fold &x - &x. This can happen from &x.foo - &x.
10922 This is unsafe for certain floats even in non-IEEE formats.
10923 In IEEE, it is unsafe because it does wrong for NaNs.
10924 Also note that operand_equal_p is always false if an operand
10925 is volatile. */
10926
10927 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10928 && operand_equal_p (arg0, arg1, 0))
10929 return build_zero_cst (type);
10930
10931 /* A - B -> A + (-B) if B is easily negatable. */
10932 if (negate_expr_p (arg1)
10933 && ((FLOAT_TYPE_P (type)
10934 /* Avoid this transformation if B is a positive REAL_CST. */
10935 && (TREE_CODE (arg1) != REAL_CST
10936 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10937 || INTEGRAL_TYPE_P (type)))
10938 return fold_build2_loc (loc, PLUS_EXPR, type,
10939 fold_convert_loc (loc, type, arg0),
10940 fold_convert_loc (loc, type,
10941 negate_expr (arg1)));
10942
10943 /* Try folding difference of addresses. */
10944 {
10945 HOST_WIDE_INT diff;
10946
10947 if ((TREE_CODE (arg0) == ADDR_EXPR
10948 || TREE_CODE (arg1) == ADDR_EXPR)
10949 && ptr_difference_const (arg0, arg1, &diff))
10950 return build_int_cst_type (type, diff);
10951 }
10952
10953 /* Fold &a[i] - &a[j] to i-j. */
10954 if (TREE_CODE (arg0) == ADDR_EXPR
10955 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10956 && TREE_CODE (arg1) == ADDR_EXPR
10957 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10958 {
10959 tree tem = fold_addr_of_array_ref_difference (loc, type,
10960 TREE_OPERAND (arg0, 0),
10961 TREE_OPERAND (arg1, 0));
10962 if (tem)
10963 return tem;
10964 }
10965
10966 if (FLOAT_TYPE_P (type)
10967 && flag_unsafe_math_optimizations
10968 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10969 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10970 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10971 return tem;
10972
10973 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10974 one. Make sure the type is not saturating and has the signedness of
10975 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10976 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10977 if ((TREE_CODE (arg0) == MULT_EXPR
10978 || TREE_CODE (arg1) == MULT_EXPR)
10979 && !TYPE_SATURATING (type)
10980 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10981 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10982 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10983 {
10984 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10985 if (tem)
10986 return tem;
10987 }
10988
10989 goto associate;
10990
10991 case MULT_EXPR:
10992 /* (-A) * (-B) -> A * B */
10993 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10994 return fold_build2_loc (loc, MULT_EXPR, type,
10995 fold_convert_loc (loc, type,
10996 TREE_OPERAND (arg0, 0)),
10997 fold_convert_loc (loc, type,
10998 negate_expr (arg1)));
10999 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11000 return fold_build2_loc (loc, MULT_EXPR, type,
11001 fold_convert_loc (loc, type,
11002 negate_expr (arg0)),
11003 fold_convert_loc (loc, type,
11004 TREE_OPERAND (arg1, 0)));
11005
11006 if (! FLOAT_TYPE_P (type))
11007 {
11008 if (integer_zerop (arg1))
11009 return omit_one_operand_loc (loc, type, arg1, arg0);
11010 if (integer_onep (arg1))
11011 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11012 /* Transform x * -1 into -x. Make sure to do the negation
11013 on the original operand with conversions not stripped
11014 because we can only strip non-sign-changing conversions. */
11015 if (integer_minus_onep (arg1))
11016 return fold_convert_loc (loc, type, negate_expr (op0));
11017 /* Transform x * -C into -x * C if x is easily negatable. */
11018 if (TREE_CODE (arg1) == INTEGER_CST
11019 && tree_int_cst_sgn (arg1) == -1
11020 && negate_expr_p (arg0)
11021 && (tem = negate_expr (arg1)) != arg1
11022 && !TREE_OVERFLOW (tem))
11023 return fold_build2_loc (loc, MULT_EXPR, type,
11024 fold_convert_loc (loc, type,
11025 negate_expr (arg0)),
11026 tem);
11027
11028 /* (a * (1 << b)) is (a << b) */
11029 if (TREE_CODE (arg1) == LSHIFT_EXPR
11030 && integer_onep (TREE_OPERAND (arg1, 0)))
11031 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11032 TREE_OPERAND (arg1, 1));
11033 if (TREE_CODE (arg0) == LSHIFT_EXPR
11034 && integer_onep (TREE_OPERAND (arg0, 0)))
11035 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11036 TREE_OPERAND (arg0, 1));
11037
11038 /* (A + A) * C -> A * 2 * C */
11039 if (TREE_CODE (arg0) == PLUS_EXPR
11040 && TREE_CODE (arg1) == INTEGER_CST
11041 && operand_equal_p (TREE_OPERAND (arg0, 0),
11042 TREE_OPERAND (arg0, 1), 0))
11043 return fold_build2_loc (loc, MULT_EXPR, type,
11044 omit_one_operand_loc (loc, type,
11045 TREE_OPERAND (arg0, 0),
11046 TREE_OPERAND (arg0, 1)),
11047 fold_build2_loc (loc, MULT_EXPR, type,
11048 build_int_cst (type, 2) , arg1));
11049
11050 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11051 sign-changing only. */
11052 if (TREE_CODE (arg1) == INTEGER_CST
11053 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11054 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11055 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11056
11057 strict_overflow_p = false;
11058 if (TREE_CODE (arg1) == INTEGER_CST
11059 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11060 &strict_overflow_p)))
11061 {
11062 if (strict_overflow_p)
11063 fold_overflow_warning (("assuming signed overflow does not "
11064 "occur when simplifying "
11065 "multiplication"),
11066 WARN_STRICT_OVERFLOW_MISC);
11067 return fold_convert_loc (loc, type, tem);
11068 }
11069
11070 /* Optimize z * conj(z) for integer complex numbers. */
11071 if (TREE_CODE (arg0) == CONJ_EXPR
11072 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11073 return fold_mult_zconjz (loc, type, arg1);
11074 if (TREE_CODE (arg1) == CONJ_EXPR
11075 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11076 return fold_mult_zconjz (loc, type, arg0);
11077 }
11078 else
11079 {
11080 /* Maybe fold x * 0 to 0. The expressions aren't the same
11081 when x is NaN, since x * 0 is also NaN. Nor are they the
11082 same in modes with signed zeros, since multiplying a
11083 negative value by 0 gives -0, not +0. */
11084 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11085 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11086 && real_zerop (arg1))
11087 return omit_one_operand_loc (loc, type, arg1, arg0);
11088 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11089 Likewise for complex arithmetic with signed zeros. */
11090 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11091 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11092 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11093 && real_onep (arg1))
11094 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11095
11096 /* Transform x * -1.0 into -x. */
11097 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11098 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11099 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11100 && real_minus_onep (arg1))
11101 return fold_convert_loc (loc, type, negate_expr (arg0));
11102
11103 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11104 the result for floating point types due to rounding so it is applied
11105 only if -fassociative-math was specify. */
11106 if (flag_associative_math
11107 && TREE_CODE (arg0) == RDIV_EXPR
11108 && TREE_CODE (arg1) == REAL_CST
11109 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11110 {
11111 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11112 arg1);
11113 if (tem)
11114 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11115 TREE_OPERAND (arg0, 1));
11116 }
11117
11118 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11119 if (operand_equal_p (arg0, arg1, 0))
11120 {
11121 tree tem = fold_strip_sign_ops (arg0);
11122 if (tem != NULL_TREE)
11123 {
11124 tem = fold_convert_loc (loc, type, tem);
11125 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11126 }
11127 }
11128
11129 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11130 This is not the same for NaNs or if signed zeros are
11131 involved. */
11132 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11133 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11134 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11135 && TREE_CODE (arg1) == COMPLEX_CST
11136 && real_zerop (TREE_REALPART (arg1)))
11137 {
11138 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11139 if (real_onep (TREE_IMAGPART (arg1)))
11140 return
11141 fold_build2_loc (loc, COMPLEX_EXPR, type,
11142 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11143 rtype, arg0)),
11144 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11145 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11146 return
11147 fold_build2_loc (loc, COMPLEX_EXPR, type,
11148 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11149 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11150 rtype, arg0)));
11151 }
11152
11153 /* Optimize z * conj(z) for floating point complex numbers.
11154 Guarded by flag_unsafe_math_optimizations as non-finite
11155 imaginary components don't produce scalar results. */
11156 if (flag_unsafe_math_optimizations
11157 && TREE_CODE (arg0) == CONJ_EXPR
11158 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11159 return fold_mult_zconjz (loc, type, arg1);
11160 if (flag_unsafe_math_optimizations
11161 && TREE_CODE (arg1) == CONJ_EXPR
11162 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11163 return fold_mult_zconjz (loc, type, arg0);
11164
11165 if (flag_unsafe_math_optimizations)
11166 {
11167 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11168 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11169
11170 /* Optimizations of root(...)*root(...). */
11171 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11172 {
11173 tree rootfn, arg;
11174 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11175 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11176
11177 /* Optimize sqrt(x)*sqrt(x) as x. */
11178 if (BUILTIN_SQRT_P (fcode0)
11179 && operand_equal_p (arg00, arg10, 0)
11180 && ! HONOR_SNANS (TYPE_MODE (type)))
11181 return arg00;
11182
11183 /* Optimize root(x)*root(y) as root(x*y). */
11184 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11185 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11186 return build_call_expr_loc (loc, rootfn, 1, arg);
11187 }
11188
11189 /* Optimize expN(x)*expN(y) as expN(x+y). */
11190 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11191 {
11192 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11193 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11194 CALL_EXPR_ARG (arg0, 0),
11195 CALL_EXPR_ARG (arg1, 0));
11196 return build_call_expr_loc (loc, expfn, 1, arg);
11197 }
11198
11199 /* Optimizations of pow(...)*pow(...). */
11200 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11201 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11202 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11203 {
11204 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11205 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11206 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11207 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11208
11209 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11210 if (operand_equal_p (arg01, arg11, 0))
11211 {
11212 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11213 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11214 arg00, arg10);
11215 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11216 }
11217
11218 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11219 if (operand_equal_p (arg00, arg10, 0))
11220 {
11221 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11222 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11223 arg01, arg11);
11224 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11225 }
11226 }
11227
11228 /* Optimize tan(x)*cos(x) as sin(x). */
11229 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11230 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11231 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11232 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11233 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11234 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11235 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11236 CALL_EXPR_ARG (arg1, 0), 0))
11237 {
11238 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11239
11240 if (sinfn != NULL_TREE)
11241 return build_call_expr_loc (loc, sinfn, 1,
11242 CALL_EXPR_ARG (arg0, 0));
11243 }
11244
11245 /* Optimize x*pow(x,c) as pow(x,c+1). */
11246 if (fcode1 == BUILT_IN_POW
11247 || fcode1 == BUILT_IN_POWF
11248 || fcode1 == BUILT_IN_POWL)
11249 {
11250 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11251 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11252 if (TREE_CODE (arg11) == REAL_CST
11253 && !TREE_OVERFLOW (arg11)
11254 && operand_equal_p (arg0, arg10, 0))
11255 {
11256 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11257 REAL_VALUE_TYPE c;
11258 tree arg;
11259
11260 c = TREE_REAL_CST (arg11);
11261 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11262 arg = build_real (type, c);
11263 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11264 }
11265 }
11266
11267 /* Optimize pow(x,c)*x as pow(x,c+1). */
11268 if (fcode0 == BUILT_IN_POW
11269 || fcode0 == BUILT_IN_POWF
11270 || fcode0 == BUILT_IN_POWL)
11271 {
11272 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11273 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11274 if (TREE_CODE (arg01) == REAL_CST
11275 && !TREE_OVERFLOW (arg01)
11276 && operand_equal_p (arg1, arg00, 0))
11277 {
11278 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11279 REAL_VALUE_TYPE c;
11280 tree arg;
11281
11282 c = TREE_REAL_CST (arg01);
11283 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11284 arg = build_real (type, c);
11285 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11286 }
11287 }
11288
11289 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11290 if (!in_gimple_form
11291 && optimize
11292 && operand_equal_p (arg0, arg1, 0))
11293 {
11294 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11295
11296 if (powfn)
11297 {
11298 tree arg = build_real (type, dconst2);
11299 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11300 }
11301 }
11302 }
11303 }
11304 goto associate;
11305
11306 case BIT_IOR_EXPR:
11307 bit_ior:
11308 if (integer_all_onesp (arg1))
11309 return omit_one_operand_loc (loc, type, arg1, arg0);
11310 if (integer_zerop (arg1))
11311 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11312 if (operand_equal_p (arg0, arg1, 0))
11313 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11314
11315 /* ~X | X is -1. */
11316 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11317 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11318 {
11319 t1 = build_zero_cst (type);
11320 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11321 return omit_one_operand_loc (loc, type, t1, arg1);
11322 }
11323
11324 /* X | ~X is -1. */
11325 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11326 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11327 {
11328 t1 = build_zero_cst (type);
11329 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11330 return omit_one_operand_loc (loc, type, t1, arg0);
11331 }
11332
11333 /* Canonicalize (X & C1) | C2. */
11334 if (TREE_CODE (arg0) == BIT_AND_EXPR
11335 && TREE_CODE (arg1) == INTEGER_CST
11336 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11337 {
11338 int width = TYPE_PRECISION (type), w;
11339 bool try_simplify = true;
11340 wide_int c1 = TREE_OPERAND (arg0, 1);
11341 wide_int c2 = arg1;
11342
11343 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11344 if ((c1 & c2) == c1)
11345 return omit_one_operand_loc (loc, type, arg1,
11346 TREE_OPERAND (arg0, 0));
11347
11348 wide_int msk = wi::mask (width, false,
11349 TYPE_PRECISION (TREE_TYPE (arg1)));
11350
11351 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11352 if (msk.and_not (c1 | c2) == 0)
11353 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11354 TREE_OPERAND (arg0, 0), arg1);
11355
11356 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11357 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11358 mode which allows further optimizations. */
11359 c1 &= msk;
11360 c2 &= msk;
11361 wide_int c3 = c1.and_not (c2);
11362 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11363 {
11364 wide_int mask = wi::mask (width - w, false,
11365 TYPE_PRECISION (type));
11366 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11367 {
11368 c3 = mask;
11369 break;
11370 }
11371 }
11372
11373 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11374 with that optimization from the BIT_AND_EXPR optimizations.
11375 This could end up in an infinite recursion. */
11376 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11377 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11378 == INTEGER_CST)
11379 {
11380 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11381 wide_int masked = mask_with_tz (type, c3, t);
11382
11383 try_simplify = (masked != c1);
11384 }
11385
11386 if (try_simplify && c3 != c1)
11387 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11388 fold_build2_loc (loc, BIT_AND_EXPR, type,
11389 TREE_OPERAND (arg0, 0),
11390 wide_int_to_tree (type,
11391 c3)),
11392 arg1);
11393 }
11394
11395 /* (X & Y) | Y is (X, Y). */
11396 if (TREE_CODE (arg0) == BIT_AND_EXPR
11397 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11398 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11399 /* (X & Y) | X is (Y, X). */
11400 if (TREE_CODE (arg0) == BIT_AND_EXPR
11401 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11402 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11403 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11404 /* X | (X & Y) is (Y, X). */
11405 if (TREE_CODE (arg1) == BIT_AND_EXPR
11406 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11407 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11408 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11409 /* X | (Y & X) is (Y, X). */
11410 if (TREE_CODE (arg1) == BIT_AND_EXPR
11411 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11412 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11413 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11414
11415 /* (X & ~Y) | (~X & Y) is X ^ Y */
11416 if (TREE_CODE (arg0) == BIT_AND_EXPR
11417 && TREE_CODE (arg1) == BIT_AND_EXPR)
11418 {
11419 tree a0, a1, l0, l1, n0, n1;
11420
11421 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11422 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11423
11424 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11425 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11426
11427 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11428 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11429
11430 if ((operand_equal_p (n0, a0, 0)
11431 && operand_equal_p (n1, a1, 0))
11432 || (operand_equal_p (n0, a1, 0)
11433 && operand_equal_p (n1, a0, 0)))
11434 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11435 }
11436
11437 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11438 if (t1 != NULL_TREE)
11439 return t1;
11440
11441 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11442
11443 This results in more efficient code for machines without a NAND
11444 instruction. Combine will canonicalize to the first form
11445 which will allow use of NAND instructions provided by the
11446 backend if they exist. */
11447 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11448 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11449 {
11450 return
11451 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11452 build2 (BIT_AND_EXPR, type,
11453 fold_convert_loc (loc, type,
11454 TREE_OPERAND (arg0, 0)),
11455 fold_convert_loc (loc, type,
11456 TREE_OPERAND (arg1, 0))));
11457 }
11458
11459 /* See if this can be simplified into a rotate first. If that
11460 is unsuccessful continue in the association code. */
11461 goto bit_rotate;
11462
11463 case BIT_XOR_EXPR:
11464 if (integer_zerop (arg1))
11465 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11466 if (integer_all_onesp (arg1))
11467 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11468 if (operand_equal_p (arg0, arg1, 0))
11469 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11470
11471 /* ~X ^ X is -1. */
11472 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11473 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11474 {
11475 t1 = build_zero_cst (type);
11476 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11477 return omit_one_operand_loc (loc, type, t1, arg1);
11478 }
11479
11480 /* X ^ ~X is -1. */
11481 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11482 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11483 {
11484 t1 = build_zero_cst (type);
11485 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11486 return omit_one_operand_loc (loc, type, t1, arg0);
11487 }
11488
11489 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11490 with a constant, and the two constants have no bits in common,
11491 we should treat this as a BIT_IOR_EXPR since this may produce more
11492 simplifications. */
11493 if (TREE_CODE (arg0) == BIT_AND_EXPR
11494 && TREE_CODE (arg1) == BIT_AND_EXPR
11495 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11496 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11497 && integer_zerop (const_binop (BIT_AND_EXPR,
11498 TREE_OPERAND (arg0, 1),
11499 TREE_OPERAND (arg1, 1))))
11500 {
11501 code = BIT_IOR_EXPR;
11502 goto bit_ior;
11503 }
11504
11505 /* (X | Y) ^ X -> Y & ~ X*/
11506 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11507 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11508 {
11509 tree t2 = TREE_OPERAND (arg0, 1);
11510 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11511 arg1);
11512 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11513 fold_convert_loc (loc, type, t2),
11514 fold_convert_loc (loc, type, t1));
11515 return t1;
11516 }
11517
11518 /* (Y | X) ^ X -> Y & ~ X*/
11519 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11520 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11521 {
11522 tree t2 = TREE_OPERAND (arg0, 0);
11523 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11524 arg1);
11525 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11526 fold_convert_loc (loc, type, t2),
11527 fold_convert_loc (loc, type, t1));
11528 return t1;
11529 }
11530
11531 /* X ^ (X | Y) -> Y & ~ X*/
11532 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11533 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11534 {
11535 tree t2 = TREE_OPERAND (arg1, 1);
11536 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11537 arg0);
11538 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11539 fold_convert_loc (loc, type, t2),
11540 fold_convert_loc (loc, type, t1));
11541 return t1;
11542 }
11543
11544 /* X ^ (Y | X) -> Y & ~ X*/
11545 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11546 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11547 {
11548 tree t2 = TREE_OPERAND (arg1, 0);
11549 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11550 arg0);
11551 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11552 fold_convert_loc (loc, type, t2),
11553 fold_convert_loc (loc, type, t1));
11554 return t1;
11555 }
11556
11557 /* Convert ~X ^ ~Y to X ^ Y. */
11558 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11559 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11560 return fold_build2_loc (loc, code, type,
11561 fold_convert_loc (loc, type,
11562 TREE_OPERAND (arg0, 0)),
11563 fold_convert_loc (loc, type,
11564 TREE_OPERAND (arg1, 0)));
11565
11566 /* Convert ~X ^ C to X ^ ~C. */
11567 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11568 && TREE_CODE (arg1) == INTEGER_CST)
11569 return fold_build2_loc (loc, code, type,
11570 fold_convert_loc (loc, type,
11571 TREE_OPERAND (arg0, 0)),
11572 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11573
11574 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11575 if (TREE_CODE (arg0) == BIT_AND_EXPR
11576 && integer_onep (TREE_OPERAND (arg0, 1))
11577 && integer_onep (arg1))
11578 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11579 build_zero_cst (TREE_TYPE (arg0)));
11580
11581 /* Fold (X & Y) ^ Y as ~X & Y. */
11582 if (TREE_CODE (arg0) == BIT_AND_EXPR
11583 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11584 {
11585 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11586 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11587 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11588 fold_convert_loc (loc, type, arg1));
11589 }
11590 /* Fold (X & Y) ^ X as ~Y & X. */
11591 if (TREE_CODE (arg0) == BIT_AND_EXPR
11592 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11593 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11594 {
11595 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11596 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11597 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11598 fold_convert_loc (loc, type, arg1));
11599 }
11600 /* Fold X ^ (X & Y) as X & ~Y. */
11601 if (TREE_CODE (arg1) == BIT_AND_EXPR
11602 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11603 {
11604 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11605 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11606 fold_convert_loc (loc, type, arg0),
11607 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11608 }
11609 /* Fold X ^ (Y & X) as ~Y & X. */
11610 if (TREE_CODE (arg1) == BIT_AND_EXPR
11611 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11612 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11613 {
11614 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11615 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11616 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11617 fold_convert_loc (loc, type, arg0));
11618 }
11619
11620 /* See if this can be simplified into a rotate first. If that
11621 is unsuccessful continue in the association code. */
11622 goto bit_rotate;
11623
11624 case BIT_AND_EXPR:
11625 if (integer_all_onesp (arg1))
11626 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11627 if (integer_zerop (arg1))
11628 return omit_one_operand_loc (loc, type, arg1, arg0);
11629 if (operand_equal_p (arg0, arg1, 0))
11630 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11631
11632 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11633 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11634 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11635 || (TREE_CODE (arg0) == EQ_EXPR
11636 && integer_zerop (TREE_OPERAND (arg0, 1))))
11637 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11638 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11639
11640 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11641 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11642 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11643 || (TREE_CODE (arg1) == EQ_EXPR
11644 && integer_zerop (TREE_OPERAND (arg1, 1))))
11645 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11646 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11647
11648 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11649 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11650 && TREE_CODE (arg1) == INTEGER_CST
11651 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11652 {
11653 tree tmp1 = fold_convert_loc (loc, type, arg1);
11654 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11655 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11656 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11657 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11658 return
11659 fold_convert_loc (loc, type,
11660 fold_build2_loc (loc, BIT_IOR_EXPR,
11661 type, tmp2, tmp3));
11662 }
11663
11664 /* (X | Y) & Y is (X, Y). */
11665 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11666 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11667 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11668 /* (X | Y) & X is (Y, X). */
11669 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11670 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11671 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11672 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11673 /* X & (X | Y) is (Y, X). */
11674 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11675 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11676 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11677 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11678 /* X & (Y | X) is (Y, X). */
11679 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11680 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11681 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11682 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11683
11684 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11685 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11686 && integer_onep (TREE_OPERAND (arg0, 1))
11687 && integer_onep (arg1))
11688 {
11689 tree tem2;
11690 tem = TREE_OPERAND (arg0, 0);
11691 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11692 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11693 tem, tem2);
11694 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11695 build_zero_cst (TREE_TYPE (tem)));
11696 }
11697 /* Fold ~X & 1 as (X & 1) == 0. */
11698 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11699 && integer_onep (arg1))
11700 {
11701 tree tem2;
11702 tem = TREE_OPERAND (arg0, 0);
11703 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11704 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11705 tem, tem2);
11706 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11707 build_zero_cst (TREE_TYPE (tem)));
11708 }
11709 /* Fold !X & 1 as X == 0. */
11710 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11711 && integer_onep (arg1))
11712 {
11713 tem = TREE_OPERAND (arg0, 0);
11714 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11715 build_zero_cst (TREE_TYPE (tem)));
11716 }
11717
11718 /* Fold (X ^ Y) & Y as ~X & Y. */
11719 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11720 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11721 {
11722 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11723 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11724 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11725 fold_convert_loc (loc, type, arg1));
11726 }
11727 /* Fold (X ^ Y) & X as ~Y & X. */
11728 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11729 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11730 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11731 {
11732 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11733 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11734 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11735 fold_convert_loc (loc, type, arg1));
11736 }
11737 /* Fold X & (X ^ Y) as X & ~Y. */
11738 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11739 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11740 {
11741 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11742 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11743 fold_convert_loc (loc, type, arg0),
11744 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11745 }
11746 /* Fold X & (Y ^ X) as ~Y & X. */
11747 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11748 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11749 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11750 {
11751 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11752 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11753 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11754 fold_convert_loc (loc, type, arg0));
11755 }
11756
11757 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11758 multiple of 1 << CST. */
11759 if (TREE_CODE (arg1) == INTEGER_CST)
11760 {
11761 wide_int cst1 = arg1;
11762 wide_int ncst1 = -cst1;
11763 if ((cst1 & ncst1) == ncst1
11764 && multiple_of_p (type, arg0,
11765 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11766 return fold_convert_loc (loc, type, arg0);
11767 }
11768
11769 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11770 bits from CST2. */
11771 if (TREE_CODE (arg1) == INTEGER_CST
11772 && TREE_CODE (arg0) == MULT_EXPR
11773 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11774 {
11775 wide_int masked = mask_with_tz (type, arg1, TREE_OPERAND (arg0, 1));
11776
11777 if (masked == 0)
11778 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11779 arg0, arg1);
11780 else if (masked != arg1)
11781 return fold_build2_loc (loc, code, type, op0,
11782 wide_int_to_tree (type, masked));
11783 }
11784
11785 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11786 ((A & N) + B) & M -> (A + B) & M
11787 Similarly if (N & M) == 0,
11788 ((A | N) + B) & M -> (A + B) & M
11789 and for - instead of + (or unary - instead of +)
11790 and/or ^ instead of |.
11791 If B is constant and (B & M) == 0, fold into A & M. */
11792 if (TREE_CODE (arg1) == INTEGER_CST)
11793 {
11794 wide_int cst1 = arg1;
11795 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11796 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11797 && (TREE_CODE (arg0) == PLUS_EXPR
11798 || TREE_CODE (arg0) == MINUS_EXPR
11799 || TREE_CODE (arg0) == NEGATE_EXPR)
11800 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11801 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11802 {
11803 tree pmop[2];
11804 int which = 0;
11805 wide_int cst0;
11806
11807 /* Now we know that arg0 is (C + D) or (C - D) or
11808 -C and arg1 (M) is == (1LL << cst) - 1.
11809 Store C into PMOP[0] and D into PMOP[1]. */
11810 pmop[0] = TREE_OPERAND (arg0, 0);
11811 pmop[1] = NULL;
11812 if (TREE_CODE (arg0) != NEGATE_EXPR)
11813 {
11814 pmop[1] = TREE_OPERAND (arg0, 1);
11815 which = 1;
11816 }
11817
11818 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11819 which = -1;
11820
11821 for (; which >= 0; which--)
11822 switch (TREE_CODE (pmop[which]))
11823 {
11824 case BIT_AND_EXPR:
11825 case BIT_IOR_EXPR:
11826 case BIT_XOR_EXPR:
11827 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11828 != INTEGER_CST)
11829 break;
11830 cst0 = TREE_OPERAND (pmop[which], 1);
11831 cst0 &= cst1;
11832 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11833 {
11834 if (cst0 != cst1)
11835 break;
11836 }
11837 else if (cst0 != 0)
11838 break;
11839 /* If C or D is of the form (A & N) where
11840 (N & M) == M, or of the form (A | N) or
11841 (A ^ N) where (N & M) == 0, replace it with A. */
11842 pmop[which] = TREE_OPERAND (pmop[which], 0);
11843 break;
11844 case INTEGER_CST:
11845 /* If C or D is a N where (N & M) == 0, it can be
11846 omitted (assumed 0). */
11847 if ((TREE_CODE (arg0) == PLUS_EXPR
11848 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11849 && (cst1 & pmop[which]) == 0)
11850 pmop[which] = NULL;
11851 break;
11852 default:
11853 break;
11854 }
11855
11856 /* Only build anything new if we optimized one or both arguments
11857 above. */
11858 if (pmop[0] != TREE_OPERAND (arg0, 0)
11859 || (TREE_CODE (arg0) != NEGATE_EXPR
11860 && pmop[1] != TREE_OPERAND (arg0, 1)))
11861 {
11862 tree utype = TREE_TYPE (arg0);
11863 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11864 {
11865 /* Perform the operations in a type that has defined
11866 overflow behavior. */
11867 utype = unsigned_type_for (TREE_TYPE (arg0));
11868 if (pmop[0] != NULL)
11869 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11870 if (pmop[1] != NULL)
11871 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11872 }
11873
11874 if (TREE_CODE (arg0) == NEGATE_EXPR)
11875 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11876 else if (TREE_CODE (arg0) == PLUS_EXPR)
11877 {
11878 if (pmop[0] != NULL && pmop[1] != NULL)
11879 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11880 pmop[0], pmop[1]);
11881 else if (pmop[0] != NULL)
11882 tem = pmop[0];
11883 else if (pmop[1] != NULL)
11884 tem = pmop[1];
11885 else
11886 return build_int_cst (type, 0);
11887 }
11888 else if (pmop[0] == NULL)
11889 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11890 else
11891 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11892 pmop[0], pmop[1]);
11893 /* TEM is now the new binary +, - or unary - replacement. */
11894 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11895 fold_convert_loc (loc, utype, arg1));
11896 return fold_convert_loc (loc, type, tem);
11897 }
11898 }
11899 }
11900
11901 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11902 if (t1 != NULL_TREE)
11903 return t1;
11904 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11905 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11906 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11907 {
11908 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11909
11910 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11911 if (mask == -1)
11912 return
11913 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11914 }
11915
11916 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11917
11918 This results in more efficient code for machines without a NOR
11919 instruction. Combine will canonicalize to the first form
11920 which will allow use of NOR instructions provided by the
11921 backend if they exist. */
11922 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11923 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11924 {
11925 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11926 build2 (BIT_IOR_EXPR, type,
11927 fold_convert_loc (loc, type,
11928 TREE_OPERAND (arg0, 0)),
11929 fold_convert_loc (loc, type,
11930 TREE_OPERAND (arg1, 0))));
11931 }
11932
11933 /* If arg0 is derived from the address of an object or function, we may
11934 be able to fold this expression using the object or function's
11935 alignment. */
11936 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11937 {
11938 unsigned HOST_WIDE_INT modulus, residue;
11939 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11940
11941 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11942 integer_onep (arg1));
11943
11944 /* This works because modulus is a power of 2. If this weren't the
11945 case, we'd have to replace it by its greatest power-of-2
11946 divisor: modulus & -modulus. */
11947 if (low < modulus)
11948 return build_int_cst (type, residue & low);
11949 }
11950
11951 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11952 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11953 if the new mask might be further optimized. */
11954 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11955 || TREE_CODE (arg0) == RSHIFT_EXPR)
11956 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11957 && TREE_CODE (arg1) == INTEGER_CST
11958 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11959 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11960 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11961 < TYPE_PRECISION (TREE_TYPE (arg0))))
11962 {
11963 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11964 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11965 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11966 tree shift_type = TREE_TYPE (arg0);
11967
11968 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11969 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11970 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11971 && TYPE_PRECISION (TREE_TYPE (arg0))
11972 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11973 {
11974 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11975 tree arg00 = TREE_OPERAND (arg0, 0);
11976 /* See if more bits can be proven as zero because of
11977 zero extension. */
11978 if (TREE_CODE (arg00) == NOP_EXPR
11979 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11980 {
11981 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11982 if (TYPE_PRECISION (inner_type)
11983 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11984 && TYPE_PRECISION (inner_type) < prec)
11985 {
11986 prec = TYPE_PRECISION (inner_type);
11987 /* See if we can shorten the right shift. */
11988 if (shiftc < prec)
11989 shift_type = inner_type;
11990 }
11991 }
11992 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11993 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11994 zerobits <<= prec - shiftc;
11995 /* For arithmetic shift if sign bit could be set, zerobits
11996 can contain actually sign bits, so no transformation is
11997 possible, unless MASK masks them all away. In that
11998 case the shift needs to be converted into logical shift. */
11999 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12000 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12001 {
12002 if ((mask & zerobits) == 0)
12003 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12004 else
12005 zerobits = 0;
12006 }
12007 }
12008
12009 /* ((X << 16) & 0xff00) is (X, 0). */
12010 if ((mask & zerobits) == mask)
12011 return omit_one_operand_loc (loc, type,
12012 build_int_cst (type, 0), arg0);
12013
12014 newmask = mask | zerobits;
12015 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12016 {
12017 /* Only do the transformation if NEWMASK is some integer
12018 mode's mask. */
12019 for (prec = BITS_PER_UNIT;
12020 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12021 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12022 break;
12023 if (prec < HOST_BITS_PER_WIDE_INT
12024 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12025 {
12026 tree newmaskt;
12027
12028 if (shift_type != TREE_TYPE (arg0))
12029 {
12030 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12031 fold_convert_loc (loc, shift_type,
12032 TREE_OPERAND (arg0, 0)),
12033 TREE_OPERAND (arg0, 1));
12034 tem = fold_convert_loc (loc, type, tem);
12035 }
12036 else
12037 tem = op0;
12038 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12039 if (!tree_int_cst_equal (newmaskt, arg1))
12040 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12041 }
12042 }
12043 }
12044
12045 goto associate;
12046
12047 case RDIV_EXPR:
12048 /* Don't touch a floating-point divide by zero unless the mode
12049 of the constant can represent infinity. */
12050 if (TREE_CODE (arg1) == REAL_CST
12051 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12052 && real_zerop (arg1))
12053 return NULL_TREE;
12054
12055 /* Optimize A / A to 1.0 if we don't care about
12056 NaNs or Infinities. Skip the transformation
12057 for non-real operands. */
12058 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12059 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12060 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12061 && operand_equal_p (arg0, arg1, 0))
12062 {
12063 tree r = build_real (TREE_TYPE (arg0), dconst1);
12064
12065 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12066 }
12067
12068 /* The complex version of the above A / A optimization. */
12069 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12070 && operand_equal_p (arg0, arg1, 0))
12071 {
12072 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12073 if (! HONOR_NANS (TYPE_MODE (elem_type))
12074 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12075 {
12076 tree r = build_real (elem_type, dconst1);
12077 /* omit_two_operands will call fold_convert for us. */
12078 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12079 }
12080 }
12081
12082 /* (-A) / (-B) -> A / B */
12083 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12084 return fold_build2_loc (loc, RDIV_EXPR, type,
12085 TREE_OPERAND (arg0, 0),
12086 negate_expr (arg1));
12087 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12088 return fold_build2_loc (loc, RDIV_EXPR, type,
12089 negate_expr (arg0),
12090 TREE_OPERAND (arg1, 0));
12091
12092 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12093 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12094 && real_onep (arg1))
12095 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12096
12097 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12098 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12099 && real_minus_onep (arg1))
12100 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12101 negate_expr (arg0)));
12102
12103 /* If ARG1 is a constant, we can convert this to a multiply by the
12104 reciprocal. This does not have the same rounding properties,
12105 so only do this if -freciprocal-math. We can actually
12106 always safely do it if ARG1 is a power of two, but it's hard to
12107 tell if it is or not in a portable manner. */
12108 if (optimize
12109 && (TREE_CODE (arg1) == REAL_CST
12110 || (TREE_CODE (arg1) == COMPLEX_CST
12111 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12112 || (TREE_CODE (arg1) == VECTOR_CST
12113 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12114 {
12115 if (flag_reciprocal_math
12116 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12117 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12118 /* Find the reciprocal if optimizing and the result is exact.
12119 TODO: Complex reciprocal not implemented. */
12120 if (TREE_CODE (arg1) != COMPLEX_CST)
12121 {
12122 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12123
12124 if (inverse)
12125 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12126 }
12127 }
12128 /* Convert A/B/C to A/(B*C). */
12129 if (flag_reciprocal_math
12130 && TREE_CODE (arg0) == RDIV_EXPR)
12131 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12132 fold_build2_loc (loc, MULT_EXPR, type,
12133 TREE_OPERAND (arg0, 1), arg1));
12134
12135 /* Convert A/(B/C) to (A/B)*C. */
12136 if (flag_reciprocal_math
12137 && TREE_CODE (arg1) == RDIV_EXPR)
12138 return fold_build2_loc (loc, MULT_EXPR, type,
12139 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12140 TREE_OPERAND (arg1, 0)),
12141 TREE_OPERAND (arg1, 1));
12142
12143 /* Convert C1/(X*C2) into (C1/C2)/X. */
12144 if (flag_reciprocal_math
12145 && TREE_CODE (arg1) == MULT_EXPR
12146 && TREE_CODE (arg0) == REAL_CST
12147 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12148 {
12149 tree tem = const_binop (RDIV_EXPR, arg0,
12150 TREE_OPERAND (arg1, 1));
12151 if (tem)
12152 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12153 TREE_OPERAND (arg1, 0));
12154 }
12155
12156 if (flag_unsafe_math_optimizations)
12157 {
12158 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12159 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12160
12161 /* Optimize sin(x)/cos(x) as tan(x). */
12162 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12163 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12164 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12165 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12166 CALL_EXPR_ARG (arg1, 0), 0))
12167 {
12168 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12169
12170 if (tanfn != NULL_TREE)
12171 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12172 }
12173
12174 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12175 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12176 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12177 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12178 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12179 CALL_EXPR_ARG (arg1, 0), 0))
12180 {
12181 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12182
12183 if (tanfn != NULL_TREE)
12184 {
12185 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12186 CALL_EXPR_ARG (arg0, 0));
12187 return fold_build2_loc (loc, RDIV_EXPR, type,
12188 build_real (type, dconst1), tmp);
12189 }
12190 }
12191
12192 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12193 NaNs or Infinities. */
12194 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12195 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12196 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12197 {
12198 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12199 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12200
12201 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12202 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12203 && operand_equal_p (arg00, arg01, 0))
12204 {
12205 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12206
12207 if (cosfn != NULL_TREE)
12208 return build_call_expr_loc (loc, cosfn, 1, arg00);
12209 }
12210 }
12211
12212 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12213 NaNs or Infinities. */
12214 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12215 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12216 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12217 {
12218 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12219 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12220
12221 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12222 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12223 && operand_equal_p (arg00, arg01, 0))
12224 {
12225 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12226
12227 if (cosfn != NULL_TREE)
12228 {
12229 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12230 return fold_build2_loc (loc, RDIV_EXPR, type,
12231 build_real (type, dconst1),
12232 tmp);
12233 }
12234 }
12235 }
12236
12237 /* Optimize pow(x,c)/x as pow(x,c-1). */
12238 if (fcode0 == BUILT_IN_POW
12239 || fcode0 == BUILT_IN_POWF
12240 || fcode0 == BUILT_IN_POWL)
12241 {
12242 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12243 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12244 if (TREE_CODE (arg01) == REAL_CST
12245 && !TREE_OVERFLOW (arg01)
12246 && operand_equal_p (arg1, arg00, 0))
12247 {
12248 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12249 REAL_VALUE_TYPE c;
12250 tree arg;
12251
12252 c = TREE_REAL_CST (arg01);
12253 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12254 arg = build_real (type, c);
12255 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12256 }
12257 }
12258
12259 /* Optimize a/root(b/c) into a*root(c/b). */
12260 if (BUILTIN_ROOT_P (fcode1))
12261 {
12262 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12263
12264 if (TREE_CODE (rootarg) == RDIV_EXPR)
12265 {
12266 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12267 tree b = TREE_OPERAND (rootarg, 0);
12268 tree c = TREE_OPERAND (rootarg, 1);
12269
12270 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12271
12272 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12273 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12274 }
12275 }
12276
12277 /* Optimize x/expN(y) into x*expN(-y). */
12278 if (BUILTIN_EXPONENT_P (fcode1))
12279 {
12280 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12281 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12282 arg1 = build_call_expr_loc (loc,
12283 expfn, 1,
12284 fold_convert_loc (loc, type, arg));
12285 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12286 }
12287
12288 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12289 if (fcode1 == BUILT_IN_POW
12290 || fcode1 == BUILT_IN_POWF
12291 || fcode1 == BUILT_IN_POWL)
12292 {
12293 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12294 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12295 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12296 tree neg11 = fold_convert_loc (loc, type,
12297 negate_expr (arg11));
12298 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12299 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12300 }
12301 }
12302 return NULL_TREE;
12303
12304 case TRUNC_DIV_EXPR:
12305 /* Optimize (X & (-A)) / A where A is a power of 2,
12306 to X >> log2(A) */
12307 if (TREE_CODE (arg0) == BIT_AND_EXPR
12308 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12309 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12310 {
12311 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12312 arg1, TREE_OPERAND (arg0, 1));
12313 if (sum && integer_zerop (sum)) {
12314 tree pow2 = build_int_cst (integer_type_node,
12315 wi::exact_log2 (arg1));
12316 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12317 TREE_OPERAND (arg0, 0), pow2);
12318 }
12319 }
12320
12321 /* Fall through */
12322
12323 case FLOOR_DIV_EXPR:
12324 /* Simplify A / (B << N) where A and B are positive and B is
12325 a power of 2, to A >> (N + log2(B)). */
12326 strict_overflow_p = false;
12327 if (TREE_CODE (arg1) == LSHIFT_EXPR
12328 && (TYPE_UNSIGNED (type)
12329 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12330 {
12331 tree sval = TREE_OPERAND (arg1, 0);
12332 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12333 {
12334 tree sh_cnt = TREE_OPERAND (arg1, 1);
12335 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12336 wi::exact_log2 (sval));
12337
12338 if (strict_overflow_p)
12339 fold_overflow_warning (("assuming signed overflow does not "
12340 "occur when simplifying A / (B << N)"),
12341 WARN_STRICT_OVERFLOW_MISC);
12342
12343 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12344 sh_cnt, pow2);
12345 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12346 fold_convert_loc (loc, type, arg0), sh_cnt);
12347 }
12348 }
12349
12350 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12351 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12352 if (INTEGRAL_TYPE_P (type)
12353 && TYPE_UNSIGNED (type)
12354 && code == FLOOR_DIV_EXPR)
12355 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12356
12357 /* Fall through */
12358
12359 case ROUND_DIV_EXPR:
12360 case CEIL_DIV_EXPR:
12361 case EXACT_DIV_EXPR:
12362 if (integer_onep (arg1))
12363 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12364 if (integer_zerop (arg1))
12365 return NULL_TREE;
12366 /* X / -1 is -X. */
12367 if (!TYPE_UNSIGNED (type)
12368 && TREE_CODE (arg1) == INTEGER_CST
12369 && wi::eq_p (arg1, -1))
12370 return fold_convert_loc (loc, type, negate_expr (arg0));
12371
12372 /* Convert -A / -B to A / B when the type is signed and overflow is
12373 undefined. */
12374 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12375 && TREE_CODE (arg0) == NEGATE_EXPR
12376 && negate_expr_p (arg1))
12377 {
12378 if (INTEGRAL_TYPE_P (type))
12379 fold_overflow_warning (("assuming signed overflow does not occur "
12380 "when distributing negation across "
12381 "division"),
12382 WARN_STRICT_OVERFLOW_MISC);
12383 return fold_build2_loc (loc, code, type,
12384 fold_convert_loc (loc, type,
12385 TREE_OPERAND (arg0, 0)),
12386 fold_convert_loc (loc, type,
12387 negate_expr (arg1)));
12388 }
12389 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12390 && TREE_CODE (arg1) == NEGATE_EXPR
12391 && negate_expr_p (arg0))
12392 {
12393 if (INTEGRAL_TYPE_P (type))
12394 fold_overflow_warning (("assuming signed overflow does not occur "
12395 "when distributing negation across "
12396 "division"),
12397 WARN_STRICT_OVERFLOW_MISC);
12398 return fold_build2_loc (loc, code, type,
12399 fold_convert_loc (loc, type,
12400 negate_expr (arg0)),
12401 fold_convert_loc (loc, type,
12402 TREE_OPERAND (arg1, 0)));
12403 }
12404
12405 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12406 operation, EXACT_DIV_EXPR.
12407
12408 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12409 At one time others generated faster code, it's not clear if they do
12410 after the last round to changes to the DIV code in expmed.c. */
12411 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12412 && multiple_of_p (type, arg0, arg1))
12413 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12414
12415 strict_overflow_p = false;
12416 if (TREE_CODE (arg1) == INTEGER_CST
12417 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12418 &strict_overflow_p)))
12419 {
12420 if (strict_overflow_p)
12421 fold_overflow_warning (("assuming signed overflow does not occur "
12422 "when simplifying division"),
12423 WARN_STRICT_OVERFLOW_MISC);
12424 return fold_convert_loc (loc, type, tem);
12425 }
12426
12427 return NULL_TREE;
12428
12429 case CEIL_MOD_EXPR:
12430 case FLOOR_MOD_EXPR:
12431 case ROUND_MOD_EXPR:
12432 case TRUNC_MOD_EXPR:
12433 /* X % 1 is always zero, but be sure to preserve any side
12434 effects in X. */
12435 if (integer_onep (arg1))
12436 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12437
12438 /* X % 0, return X % 0 unchanged so that we can get the
12439 proper warnings and errors. */
12440 if (integer_zerop (arg1))
12441 return NULL_TREE;
12442
12443 /* 0 % X is always zero, but be sure to preserve any side
12444 effects in X. Place this after checking for X == 0. */
12445 if (integer_zerop (arg0))
12446 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12447
12448 /* X % -1 is zero. */
12449 if (!TYPE_UNSIGNED (type)
12450 && TREE_CODE (arg1) == INTEGER_CST
12451 && wi::eq_p (arg1, -1))
12452 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12453
12454 /* X % -C is the same as X % C. */
12455 if (code == TRUNC_MOD_EXPR
12456 && TYPE_SIGN (type) == SIGNED
12457 && TREE_CODE (arg1) == INTEGER_CST
12458 && !TREE_OVERFLOW (arg1)
12459 && wi::neg_p (arg1)
12460 && !TYPE_OVERFLOW_TRAPS (type)
12461 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12462 && !sign_bit_p (arg1, arg1))
12463 return fold_build2_loc (loc, code, type,
12464 fold_convert_loc (loc, type, arg0),
12465 fold_convert_loc (loc, type,
12466 negate_expr (arg1)));
12467
12468 /* X % -Y is the same as X % Y. */
12469 if (code == TRUNC_MOD_EXPR
12470 && !TYPE_UNSIGNED (type)
12471 && TREE_CODE (arg1) == NEGATE_EXPR
12472 && !TYPE_OVERFLOW_TRAPS (type))
12473 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12474 fold_convert_loc (loc, type,
12475 TREE_OPERAND (arg1, 0)));
12476
12477 strict_overflow_p = false;
12478 if (TREE_CODE (arg1) == INTEGER_CST
12479 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12480 &strict_overflow_p)))
12481 {
12482 if (strict_overflow_p)
12483 fold_overflow_warning (("assuming signed overflow does not occur "
12484 "when simplifying modulus"),
12485 WARN_STRICT_OVERFLOW_MISC);
12486 return fold_convert_loc (loc, type, tem);
12487 }
12488
12489 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12490 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12491 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12492 && (TYPE_UNSIGNED (type)
12493 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12494 {
12495 tree c = arg1;
12496 /* Also optimize A % (C << N) where C is a power of 2,
12497 to A & ((C << N) - 1). */
12498 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12499 c = TREE_OPERAND (arg1, 0);
12500
12501 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12502 {
12503 tree mask
12504 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12505 build_int_cst (TREE_TYPE (arg1), 1));
12506 if (strict_overflow_p)
12507 fold_overflow_warning (("assuming signed overflow does not "
12508 "occur when simplifying "
12509 "X % (power of two)"),
12510 WARN_STRICT_OVERFLOW_MISC);
12511 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12512 fold_convert_loc (loc, type, arg0),
12513 fold_convert_loc (loc, type, mask));
12514 }
12515 }
12516
12517 return NULL_TREE;
12518
12519 case LROTATE_EXPR:
12520 case RROTATE_EXPR:
12521 if (integer_all_onesp (arg0))
12522 return omit_one_operand_loc (loc, type, arg0, arg1);
12523 goto shift;
12524
12525 case RSHIFT_EXPR:
12526 /* Optimize -1 >> x for arithmetic right shifts. */
12527 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12528 && tree_expr_nonnegative_p (arg1))
12529 return omit_one_operand_loc (loc, type, arg0, arg1);
12530 /* ... fall through ... */
12531
12532 case LSHIFT_EXPR:
12533 shift:
12534 if (integer_zerop (arg1))
12535 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12536 if (integer_zerop (arg0))
12537 return omit_one_operand_loc (loc, type, arg0, arg1);
12538
12539 /* Prefer vector1 << scalar to vector1 << vector2
12540 if vector2 is uniform. */
12541 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12542 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12543 return fold_build2_loc (loc, code, type, op0, tem);
12544
12545 /* Since negative shift count is not well-defined,
12546 don't try to compute it in the compiler. */
12547 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12548 return NULL_TREE;
12549
12550 prec = element_precision (type);
12551
12552 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12553 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12554 && tree_to_uhwi (arg1) < prec
12555 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12556 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12557 {
12558 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12559 + tree_to_uhwi (arg1));
12560
12561 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12562 being well defined. */
12563 if (low >= prec)
12564 {
12565 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12566 low = low % prec;
12567 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12568 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12569 TREE_OPERAND (arg0, 0));
12570 else
12571 low = prec - 1;
12572 }
12573
12574 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12575 build_int_cst (TREE_TYPE (arg1), low));
12576 }
12577
12578 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12579 into x & ((unsigned)-1 >> c) for unsigned types. */
12580 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12581 || (TYPE_UNSIGNED (type)
12582 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12583 && tree_fits_uhwi_p (arg1)
12584 && tree_to_uhwi (arg1) < prec
12585 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12586 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12587 {
12588 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12589 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12590 tree lshift;
12591 tree arg00;
12592
12593 if (low0 == low1)
12594 {
12595 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12596
12597 lshift = build_minus_one_cst (type);
12598 lshift = const_binop (code, lshift, arg1);
12599
12600 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12601 }
12602 }
12603
12604 /* Rewrite an LROTATE_EXPR by a constant into an
12605 RROTATE_EXPR by a new constant. */
12606 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12607 {
12608 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12609 tem = const_binop (MINUS_EXPR, tem, arg1);
12610 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12611 }
12612
12613 /* If we have a rotate of a bit operation with the rotate count and
12614 the second operand of the bit operation both constant,
12615 permute the two operations. */
12616 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12617 && (TREE_CODE (arg0) == BIT_AND_EXPR
12618 || TREE_CODE (arg0) == BIT_IOR_EXPR
12619 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12620 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12621 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12622 fold_build2_loc (loc, code, type,
12623 TREE_OPERAND (arg0, 0), arg1),
12624 fold_build2_loc (loc, code, type,
12625 TREE_OPERAND (arg0, 1), arg1));
12626
12627 /* Two consecutive rotates adding up to the some integer
12628 multiple of the precision of the type can be ignored. */
12629 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12630 && TREE_CODE (arg0) == RROTATE_EXPR
12631 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12632 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12633 prec) == 0)
12634 return TREE_OPERAND (arg0, 0);
12635
12636 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12637 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12638 if the latter can be further optimized. */
12639 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12640 && TREE_CODE (arg0) == BIT_AND_EXPR
12641 && TREE_CODE (arg1) == INTEGER_CST
12642 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12643 {
12644 tree mask = fold_build2_loc (loc, code, type,
12645 fold_convert_loc (loc, type,
12646 TREE_OPERAND (arg0, 1)),
12647 arg1);
12648 tree shift = fold_build2_loc (loc, code, type,
12649 fold_convert_loc (loc, type,
12650 TREE_OPERAND (arg0, 0)),
12651 arg1);
12652 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12653 if (tem)
12654 return tem;
12655 }
12656
12657 return NULL_TREE;
12658
12659 case MIN_EXPR:
12660 if (operand_equal_p (arg0, arg1, 0))
12661 return omit_one_operand_loc (loc, type, arg0, arg1);
12662 if (INTEGRAL_TYPE_P (type)
12663 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12664 return omit_one_operand_loc (loc, type, arg1, arg0);
12665 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12666 if (tem)
12667 return tem;
12668 goto associate;
12669
12670 case MAX_EXPR:
12671 if (operand_equal_p (arg0, arg1, 0))
12672 return omit_one_operand_loc (loc, type, arg0, arg1);
12673 if (INTEGRAL_TYPE_P (type)
12674 && TYPE_MAX_VALUE (type)
12675 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12676 return omit_one_operand_loc (loc, type, arg1, arg0);
12677 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12678 if (tem)
12679 return tem;
12680 goto associate;
12681
12682 case TRUTH_ANDIF_EXPR:
12683 /* Note that the operands of this must be ints
12684 and their values must be 0 or 1.
12685 ("true" is a fixed value perhaps depending on the language.) */
12686 /* If first arg is constant zero, return it. */
12687 if (integer_zerop (arg0))
12688 return fold_convert_loc (loc, type, arg0);
12689 case TRUTH_AND_EXPR:
12690 /* If either arg is constant true, drop it. */
12691 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12692 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12693 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12694 /* Preserve sequence points. */
12695 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12696 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12697 /* If second arg is constant zero, result is zero, but first arg
12698 must be evaluated. */
12699 if (integer_zerop (arg1))
12700 return omit_one_operand_loc (loc, type, arg1, arg0);
12701 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12702 case will be handled here. */
12703 if (integer_zerop (arg0))
12704 return omit_one_operand_loc (loc, type, arg0, arg1);
12705
12706 /* !X && X is always false. */
12707 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12708 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12709 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12710 /* X && !X is always false. */
12711 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12712 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12713 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12714
12715 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12716 means A >= Y && A != MAX, but in this case we know that
12717 A < X <= MAX. */
12718
12719 if (!TREE_SIDE_EFFECTS (arg0)
12720 && !TREE_SIDE_EFFECTS (arg1))
12721 {
12722 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12723 if (tem && !operand_equal_p (tem, arg0, 0))
12724 return fold_build2_loc (loc, code, type, tem, arg1);
12725
12726 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12727 if (tem && !operand_equal_p (tem, arg1, 0))
12728 return fold_build2_loc (loc, code, type, arg0, tem);
12729 }
12730
12731 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12732 != NULL_TREE)
12733 return tem;
12734
12735 return NULL_TREE;
12736
12737 case TRUTH_ORIF_EXPR:
12738 /* Note that the operands of this must be ints
12739 and their values must be 0 or true.
12740 ("true" is a fixed value perhaps depending on the language.) */
12741 /* If first arg is constant true, return it. */
12742 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12743 return fold_convert_loc (loc, type, arg0);
12744 case TRUTH_OR_EXPR:
12745 /* If either arg is constant zero, drop it. */
12746 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12747 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12748 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12749 /* Preserve sequence points. */
12750 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12751 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12752 /* If second arg is constant true, result is true, but we must
12753 evaluate first arg. */
12754 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12755 return omit_one_operand_loc (loc, type, arg1, arg0);
12756 /* Likewise for first arg, but note this only occurs here for
12757 TRUTH_OR_EXPR. */
12758 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12759 return omit_one_operand_loc (loc, type, arg0, arg1);
12760
12761 /* !X || X is always true. */
12762 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12763 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12764 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12765 /* X || !X is always true. */
12766 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12767 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12768 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12769
12770 /* (X && !Y) || (!X && Y) is X ^ Y */
12771 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12772 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12773 {
12774 tree a0, a1, l0, l1, n0, n1;
12775
12776 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12777 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12778
12779 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12780 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12781
12782 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12783 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12784
12785 if ((operand_equal_p (n0, a0, 0)
12786 && operand_equal_p (n1, a1, 0))
12787 || (operand_equal_p (n0, a1, 0)
12788 && operand_equal_p (n1, a0, 0)))
12789 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12790 }
12791
12792 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12793 != NULL_TREE)
12794 return tem;
12795
12796 return NULL_TREE;
12797
12798 case TRUTH_XOR_EXPR:
12799 /* If the second arg is constant zero, drop it. */
12800 if (integer_zerop (arg1))
12801 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12802 /* If the second arg is constant true, this is a logical inversion. */
12803 if (integer_onep (arg1))
12804 {
12805 tem = invert_truthvalue_loc (loc, arg0);
12806 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12807 }
12808 /* Identical arguments cancel to zero. */
12809 if (operand_equal_p (arg0, arg1, 0))
12810 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12811
12812 /* !X ^ X is always true. */
12813 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12814 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12815 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12816
12817 /* X ^ !X is always true. */
12818 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12819 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12820 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12821
12822 return NULL_TREE;
12823
12824 case EQ_EXPR:
12825 case NE_EXPR:
12826 STRIP_NOPS (arg0);
12827 STRIP_NOPS (arg1);
12828
12829 tem = fold_comparison (loc, code, type, op0, op1);
12830 if (tem != NULL_TREE)
12831 return tem;
12832
12833 /* bool_var != 0 becomes bool_var. */
12834 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12835 && code == NE_EXPR)
12836 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12837
12838 /* bool_var == 1 becomes bool_var. */
12839 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12840 && code == EQ_EXPR)
12841 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12842
12843 /* bool_var != 1 becomes !bool_var. */
12844 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12845 && code == NE_EXPR)
12846 return fold_convert_loc (loc, type,
12847 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12848 TREE_TYPE (arg0), arg0));
12849
12850 /* bool_var == 0 becomes !bool_var. */
12851 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12852 && code == EQ_EXPR)
12853 return fold_convert_loc (loc, type,
12854 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12855 TREE_TYPE (arg0), arg0));
12856
12857 /* !exp != 0 becomes !exp */
12858 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12859 && code == NE_EXPR)
12860 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12861
12862 /* If this is an equality comparison of the address of two non-weak,
12863 unaliased symbols neither of which are extern (since we do not
12864 have access to attributes for externs), then we know the result. */
12865 if (TREE_CODE (arg0) == ADDR_EXPR
12866 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12867 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12868 && ! lookup_attribute ("alias",
12869 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12870 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12871 && TREE_CODE (arg1) == ADDR_EXPR
12872 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12873 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12874 && ! lookup_attribute ("alias",
12875 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12876 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12877 {
12878 /* We know that we're looking at the address of two
12879 non-weak, unaliased, static _DECL nodes.
12880
12881 It is both wasteful and incorrect to call operand_equal_p
12882 to compare the two ADDR_EXPR nodes. It is wasteful in that
12883 all we need to do is test pointer equality for the arguments
12884 to the two ADDR_EXPR nodes. It is incorrect to use
12885 operand_equal_p as that function is NOT equivalent to a
12886 C equality test. It can in fact return false for two
12887 objects which would test as equal using the C equality
12888 operator. */
12889 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12890 return constant_boolean_node (equal
12891 ? code == EQ_EXPR : code != EQ_EXPR,
12892 type);
12893 }
12894
12895 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12896 a MINUS_EXPR of a constant, we can convert it into a comparison with
12897 a revised constant as long as no overflow occurs. */
12898 if (TREE_CODE (arg1) == INTEGER_CST
12899 && (TREE_CODE (arg0) == PLUS_EXPR
12900 || TREE_CODE (arg0) == MINUS_EXPR)
12901 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12902 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12903 ? MINUS_EXPR : PLUS_EXPR,
12904 fold_convert_loc (loc, TREE_TYPE (arg0),
12905 arg1),
12906 TREE_OPERAND (arg0, 1)))
12907 && !TREE_OVERFLOW (tem))
12908 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12909
12910 /* Similarly for a NEGATE_EXPR. */
12911 if (TREE_CODE (arg0) == NEGATE_EXPR
12912 && TREE_CODE (arg1) == INTEGER_CST
12913 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12914 arg1)))
12915 && TREE_CODE (tem) == INTEGER_CST
12916 && !TREE_OVERFLOW (tem))
12917 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12918
12919 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12920 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12921 && TREE_CODE (arg1) == INTEGER_CST
12922 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12923 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12924 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12925 fold_convert_loc (loc,
12926 TREE_TYPE (arg0),
12927 arg1),
12928 TREE_OPERAND (arg0, 1)));
12929
12930 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12931 if ((TREE_CODE (arg0) == PLUS_EXPR
12932 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12933 || TREE_CODE (arg0) == MINUS_EXPR)
12934 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12935 0)),
12936 arg1, 0)
12937 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12938 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12939 {
12940 tree val = TREE_OPERAND (arg0, 1);
12941 return omit_two_operands_loc (loc, type,
12942 fold_build2_loc (loc, code, type,
12943 val,
12944 build_int_cst (TREE_TYPE (val),
12945 0)),
12946 TREE_OPERAND (arg0, 0), arg1);
12947 }
12948
12949 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12950 if (TREE_CODE (arg0) == MINUS_EXPR
12951 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12952 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12953 1)),
12954 arg1, 0)
12955 && wi::bit_and (TREE_OPERAND (arg0, 0), 1) == 1)
12956 {
12957 return omit_two_operands_loc (loc, type,
12958 code == NE_EXPR
12959 ? boolean_true_node : boolean_false_node,
12960 TREE_OPERAND (arg0, 1), arg1);
12961 }
12962
12963 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12964 for !=. Don't do this for ordered comparisons due to overflow. */
12965 if (TREE_CODE (arg0) == MINUS_EXPR
12966 && integer_zerop (arg1))
12967 return fold_build2_loc (loc, code, type,
12968 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12969
12970 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12971 if (TREE_CODE (arg0) == ABS_EXPR
12972 && (integer_zerop (arg1) || real_zerop (arg1)))
12973 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12974
12975 /* If this is an EQ or NE comparison with zero and ARG0 is
12976 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12977 two operations, but the latter can be done in one less insn
12978 on machines that have only two-operand insns or on which a
12979 constant cannot be the first operand. */
12980 if (TREE_CODE (arg0) == BIT_AND_EXPR
12981 && integer_zerop (arg1))
12982 {
12983 tree arg00 = TREE_OPERAND (arg0, 0);
12984 tree arg01 = TREE_OPERAND (arg0, 1);
12985 if (TREE_CODE (arg00) == LSHIFT_EXPR
12986 && integer_onep (TREE_OPERAND (arg00, 0)))
12987 {
12988 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12989 arg01, TREE_OPERAND (arg00, 1));
12990 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12991 build_int_cst (TREE_TYPE (arg0), 1));
12992 return fold_build2_loc (loc, code, type,
12993 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12994 arg1);
12995 }
12996 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12997 && integer_onep (TREE_OPERAND (arg01, 0)))
12998 {
12999 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13000 arg00, TREE_OPERAND (arg01, 1));
13001 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13002 build_int_cst (TREE_TYPE (arg0), 1));
13003 return fold_build2_loc (loc, code, type,
13004 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13005 arg1);
13006 }
13007 }
13008
13009 /* If this is an NE or EQ comparison of zero against the result of a
13010 signed MOD operation whose second operand is a power of 2, make
13011 the MOD operation unsigned since it is simpler and equivalent. */
13012 if (integer_zerop (arg1)
13013 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13014 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13015 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13016 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13017 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13018 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13019 {
13020 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13021 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13022 fold_convert_loc (loc, newtype,
13023 TREE_OPERAND (arg0, 0)),
13024 fold_convert_loc (loc, newtype,
13025 TREE_OPERAND (arg0, 1)));
13026
13027 return fold_build2_loc (loc, code, type, newmod,
13028 fold_convert_loc (loc, newtype, arg1));
13029 }
13030
13031 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13032 C1 is a valid shift constant, and C2 is a power of two, i.e.
13033 a single bit. */
13034 if (TREE_CODE (arg0) == BIT_AND_EXPR
13035 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13036 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13037 == INTEGER_CST
13038 && integer_pow2p (TREE_OPERAND (arg0, 1))
13039 && integer_zerop (arg1))
13040 {
13041 tree itype = TREE_TYPE (arg0);
13042 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13043 prec = TYPE_PRECISION (itype);
13044
13045 /* Check for a valid shift count. */
13046 if (wi::ltu_p (arg001, prec))
13047 {
13048 tree arg01 = TREE_OPERAND (arg0, 1);
13049 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13050 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13051 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13052 can be rewritten as (X & (C2 << C1)) != 0. */
13053 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13054 {
13055 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13056 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13057 return fold_build2_loc (loc, code, type, tem,
13058 fold_convert_loc (loc, itype, arg1));
13059 }
13060 /* Otherwise, for signed (arithmetic) shifts,
13061 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13062 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13063 else if (!TYPE_UNSIGNED (itype))
13064 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13065 arg000, build_int_cst (itype, 0));
13066 /* Otherwise, of unsigned (logical) shifts,
13067 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13068 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13069 else
13070 return omit_one_operand_loc (loc, type,
13071 code == EQ_EXPR ? integer_one_node
13072 : integer_zero_node,
13073 arg000);
13074 }
13075 }
13076
13077 /* If we have (A & C) == C where C is a power of 2, convert this into
13078 (A & C) != 0. Similarly for NE_EXPR. */
13079 if (TREE_CODE (arg0) == BIT_AND_EXPR
13080 && integer_pow2p (TREE_OPERAND (arg0, 1))
13081 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13082 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13083 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13084 integer_zero_node));
13085
13086 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13087 bit, then fold the expression into A < 0 or A >= 0. */
13088 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13089 if (tem)
13090 return tem;
13091
13092 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13093 Similarly for NE_EXPR. */
13094 if (TREE_CODE (arg0) == BIT_AND_EXPR
13095 && TREE_CODE (arg1) == INTEGER_CST
13096 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13097 {
13098 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13099 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13100 TREE_OPERAND (arg0, 1));
13101 tree dandnotc
13102 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13103 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13104 notc);
13105 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13106 if (integer_nonzerop (dandnotc))
13107 return omit_one_operand_loc (loc, type, rslt, arg0);
13108 }
13109
13110 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13111 Similarly for NE_EXPR. */
13112 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13113 && TREE_CODE (arg1) == INTEGER_CST
13114 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13115 {
13116 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13117 tree candnotd
13118 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13119 TREE_OPERAND (arg0, 1),
13120 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13121 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13122 if (integer_nonzerop (candnotd))
13123 return omit_one_operand_loc (loc, type, rslt, arg0);
13124 }
13125
13126 /* If this is a comparison of a field, we may be able to simplify it. */
13127 if ((TREE_CODE (arg0) == COMPONENT_REF
13128 || TREE_CODE (arg0) == BIT_FIELD_REF)
13129 /* Handle the constant case even without -O
13130 to make sure the warnings are given. */
13131 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13132 {
13133 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13134 if (t1)
13135 return t1;
13136 }
13137
13138 /* Optimize comparisons of strlen vs zero to a compare of the
13139 first character of the string vs zero. To wit,
13140 strlen(ptr) == 0 => *ptr == 0
13141 strlen(ptr) != 0 => *ptr != 0
13142 Other cases should reduce to one of these two (or a constant)
13143 due to the return value of strlen being unsigned. */
13144 if (TREE_CODE (arg0) == CALL_EXPR
13145 && integer_zerop (arg1))
13146 {
13147 tree fndecl = get_callee_fndecl (arg0);
13148
13149 if (fndecl
13150 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13151 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13152 && call_expr_nargs (arg0) == 1
13153 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13154 {
13155 tree iref = build_fold_indirect_ref_loc (loc,
13156 CALL_EXPR_ARG (arg0, 0));
13157 return fold_build2_loc (loc, code, type, iref,
13158 build_int_cst (TREE_TYPE (iref), 0));
13159 }
13160 }
13161
13162 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13163 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13164 if (TREE_CODE (arg0) == RSHIFT_EXPR
13165 && integer_zerop (arg1)
13166 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13167 {
13168 tree arg00 = TREE_OPERAND (arg0, 0);
13169 tree arg01 = TREE_OPERAND (arg0, 1);
13170 tree itype = TREE_TYPE (arg00);
13171 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
13172 {
13173 if (TYPE_UNSIGNED (itype))
13174 {
13175 itype = signed_type_for (itype);
13176 arg00 = fold_convert_loc (loc, itype, arg00);
13177 }
13178 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13179 type, arg00, build_zero_cst (itype));
13180 }
13181 }
13182
13183 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13184 if (integer_zerop (arg1)
13185 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13186 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13187 TREE_OPERAND (arg0, 1));
13188
13189 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13190 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13191 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13192 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13193 build_zero_cst (TREE_TYPE (arg0)));
13194 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13195 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13196 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13197 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13198 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13199 build_zero_cst (TREE_TYPE (arg0)));
13200
13201 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13202 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13203 && TREE_CODE (arg1) == INTEGER_CST
13204 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13205 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13206 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13207 TREE_OPERAND (arg0, 1), arg1));
13208
13209 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13210 (X & C) == 0 when C is a single bit. */
13211 if (TREE_CODE (arg0) == BIT_AND_EXPR
13212 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13213 && integer_zerop (arg1)
13214 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13215 {
13216 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13217 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13218 TREE_OPERAND (arg0, 1));
13219 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13220 type, tem,
13221 fold_convert_loc (loc, TREE_TYPE (arg0),
13222 arg1));
13223 }
13224
13225 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13226 constant C is a power of two, i.e. a single bit. */
13227 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13228 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13229 && integer_zerop (arg1)
13230 && integer_pow2p (TREE_OPERAND (arg0, 1))
13231 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13232 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13233 {
13234 tree arg00 = TREE_OPERAND (arg0, 0);
13235 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13236 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13237 }
13238
13239 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13240 when is C is a power of two, i.e. a single bit. */
13241 if (TREE_CODE (arg0) == BIT_AND_EXPR
13242 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13243 && integer_zerop (arg1)
13244 && integer_pow2p (TREE_OPERAND (arg0, 1))
13245 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13246 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13247 {
13248 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13249 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13250 arg000, TREE_OPERAND (arg0, 1));
13251 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13252 tem, build_int_cst (TREE_TYPE (tem), 0));
13253 }
13254
13255 if (integer_zerop (arg1)
13256 && tree_expr_nonzero_p (arg0))
13257 {
13258 tree res = constant_boolean_node (code==NE_EXPR, type);
13259 return omit_one_operand_loc (loc, type, res, arg0);
13260 }
13261
13262 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13263 if (TREE_CODE (arg0) == NEGATE_EXPR
13264 && TREE_CODE (arg1) == NEGATE_EXPR)
13265 return fold_build2_loc (loc, code, type,
13266 TREE_OPERAND (arg0, 0),
13267 fold_convert_loc (loc, TREE_TYPE (arg0),
13268 TREE_OPERAND (arg1, 0)));
13269
13270 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13271 if (TREE_CODE (arg0) == BIT_AND_EXPR
13272 && TREE_CODE (arg1) == BIT_AND_EXPR)
13273 {
13274 tree arg00 = TREE_OPERAND (arg0, 0);
13275 tree arg01 = TREE_OPERAND (arg0, 1);
13276 tree arg10 = TREE_OPERAND (arg1, 0);
13277 tree arg11 = TREE_OPERAND (arg1, 1);
13278 tree itype = TREE_TYPE (arg0);
13279
13280 if (operand_equal_p (arg01, arg11, 0))
13281 return fold_build2_loc (loc, code, type,
13282 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13283 fold_build2_loc (loc,
13284 BIT_XOR_EXPR, itype,
13285 arg00, arg10),
13286 arg01),
13287 build_zero_cst (itype));
13288
13289 if (operand_equal_p (arg01, arg10, 0))
13290 return fold_build2_loc (loc, code, type,
13291 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13292 fold_build2_loc (loc,
13293 BIT_XOR_EXPR, itype,
13294 arg00, arg11),
13295 arg01),
13296 build_zero_cst (itype));
13297
13298 if (operand_equal_p (arg00, arg11, 0))
13299 return fold_build2_loc (loc, code, type,
13300 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13301 fold_build2_loc (loc,
13302 BIT_XOR_EXPR, itype,
13303 arg01, arg10),
13304 arg00),
13305 build_zero_cst (itype));
13306
13307 if (operand_equal_p (arg00, arg10, 0))
13308 return fold_build2_loc (loc, code, type,
13309 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13310 fold_build2_loc (loc,
13311 BIT_XOR_EXPR, itype,
13312 arg01, arg11),
13313 arg00),
13314 build_zero_cst (itype));
13315 }
13316
13317 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13318 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13319 {
13320 tree arg00 = TREE_OPERAND (arg0, 0);
13321 tree arg01 = TREE_OPERAND (arg0, 1);
13322 tree arg10 = TREE_OPERAND (arg1, 0);
13323 tree arg11 = TREE_OPERAND (arg1, 1);
13324 tree itype = TREE_TYPE (arg0);
13325
13326 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13327 operand_equal_p guarantees no side-effects so we don't need
13328 to use omit_one_operand on Z. */
13329 if (operand_equal_p (arg01, arg11, 0))
13330 return fold_build2_loc (loc, code, type, arg00,
13331 fold_convert_loc (loc, TREE_TYPE (arg00),
13332 arg10));
13333 if (operand_equal_p (arg01, arg10, 0))
13334 return fold_build2_loc (loc, code, type, arg00,
13335 fold_convert_loc (loc, TREE_TYPE (arg00),
13336 arg11));
13337 if (operand_equal_p (arg00, arg11, 0))
13338 return fold_build2_loc (loc, code, type, arg01,
13339 fold_convert_loc (loc, TREE_TYPE (arg01),
13340 arg10));
13341 if (operand_equal_p (arg00, arg10, 0))
13342 return fold_build2_loc (loc, code, type, arg01,
13343 fold_convert_loc (loc, TREE_TYPE (arg01),
13344 arg11));
13345
13346 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13347 if (TREE_CODE (arg01) == INTEGER_CST
13348 && TREE_CODE (arg11) == INTEGER_CST)
13349 {
13350 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13351 fold_convert_loc (loc, itype, arg11));
13352 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13353 return fold_build2_loc (loc, code, type, tem,
13354 fold_convert_loc (loc, itype, arg10));
13355 }
13356 }
13357
13358 /* Attempt to simplify equality/inequality comparisons of complex
13359 values. Only lower the comparison if the result is known or
13360 can be simplified to a single scalar comparison. */
13361 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13362 || TREE_CODE (arg0) == COMPLEX_CST)
13363 && (TREE_CODE (arg1) == COMPLEX_EXPR
13364 || TREE_CODE (arg1) == COMPLEX_CST))
13365 {
13366 tree real0, imag0, real1, imag1;
13367 tree rcond, icond;
13368
13369 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13370 {
13371 real0 = TREE_OPERAND (arg0, 0);
13372 imag0 = TREE_OPERAND (arg0, 1);
13373 }
13374 else
13375 {
13376 real0 = TREE_REALPART (arg0);
13377 imag0 = TREE_IMAGPART (arg0);
13378 }
13379
13380 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13381 {
13382 real1 = TREE_OPERAND (arg1, 0);
13383 imag1 = TREE_OPERAND (arg1, 1);
13384 }
13385 else
13386 {
13387 real1 = TREE_REALPART (arg1);
13388 imag1 = TREE_IMAGPART (arg1);
13389 }
13390
13391 rcond = fold_binary_loc (loc, code, type, real0, real1);
13392 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13393 {
13394 if (integer_zerop (rcond))
13395 {
13396 if (code == EQ_EXPR)
13397 return omit_two_operands_loc (loc, type, boolean_false_node,
13398 imag0, imag1);
13399 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13400 }
13401 else
13402 {
13403 if (code == NE_EXPR)
13404 return omit_two_operands_loc (loc, type, boolean_true_node,
13405 imag0, imag1);
13406 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13407 }
13408 }
13409
13410 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13411 if (icond && TREE_CODE (icond) == INTEGER_CST)
13412 {
13413 if (integer_zerop (icond))
13414 {
13415 if (code == EQ_EXPR)
13416 return omit_two_operands_loc (loc, type, boolean_false_node,
13417 real0, real1);
13418 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13419 }
13420 else
13421 {
13422 if (code == NE_EXPR)
13423 return omit_two_operands_loc (loc, type, boolean_true_node,
13424 real0, real1);
13425 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13426 }
13427 }
13428 }
13429
13430 return NULL_TREE;
13431
13432 case LT_EXPR:
13433 case GT_EXPR:
13434 case LE_EXPR:
13435 case GE_EXPR:
13436 tem = fold_comparison (loc, code, type, op0, op1);
13437 if (tem != NULL_TREE)
13438 return tem;
13439
13440 /* Transform comparisons of the form X +- C CMP X. */
13441 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13442 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13443 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13444 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13445 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13446 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13447 {
13448 tree arg01 = TREE_OPERAND (arg0, 1);
13449 enum tree_code code0 = TREE_CODE (arg0);
13450 int is_positive;
13451
13452 if (TREE_CODE (arg01) == REAL_CST)
13453 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13454 else
13455 is_positive = tree_int_cst_sgn (arg01);
13456
13457 /* (X - c) > X becomes false. */
13458 if (code == GT_EXPR
13459 && ((code0 == MINUS_EXPR && is_positive >= 0)
13460 || (code0 == PLUS_EXPR && is_positive <= 0)))
13461 {
13462 if (TREE_CODE (arg01) == INTEGER_CST
13463 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13464 fold_overflow_warning (("assuming signed overflow does not "
13465 "occur when assuming that (X - c) > X "
13466 "is always false"),
13467 WARN_STRICT_OVERFLOW_ALL);
13468 return constant_boolean_node (0, type);
13469 }
13470
13471 /* Likewise (X + c) < X becomes false. */
13472 if (code == LT_EXPR
13473 && ((code0 == PLUS_EXPR && is_positive >= 0)
13474 || (code0 == MINUS_EXPR && is_positive <= 0)))
13475 {
13476 if (TREE_CODE (arg01) == INTEGER_CST
13477 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13478 fold_overflow_warning (("assuming signed overflow does not "
13479 "occur when assuming that "
13480 "(X + c) < X is always false"),
13481 WARN_STRICT_OVERFLOW_ALL);
13482 return constant_boolean_node (0, type);
13483 }
13484
13485 /* Convert (X - c) <= X to true. */
13486 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13487 && code == LE_EXPR
13488 && ((code0 == MINUS_EXPR && is_positive >= 0)
13489 || (code0 == PLUS_EXPR && is_positive <= 0)))
13490 {
13491 if (TREE_CODE (arg01) == INTEGER_CST
13492 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13493 fold_overflow_warning (("assuming signed overflow does not "
13494 "occur when assuming that "
13495 "(X - c) <= X is always true"),
13496 WARN_STRICT_OVERFLOW_ALL);
13497 return constant_boolean_node (1, type);
13498 }
13499
13500 /* Convert (X + c) >= X to true. */
13501 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13502 && code == GE_EXPR
13503 && ((code0 == PLUS_EXPR && is_positive >= 0)
13504 || (code0 == MINUS_EXPR && is_positive <= 0)))
13505 {
13506 if (TREE_CODE (arg01) == INTEGER_CST
13507 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13508 fold_overflow_warning (("assuming signed overflow does not "
13509 "occur when assuming that "
13510 "(X + c) >= X is always true"),
13511 WARN_STRICT_OVERFLOW_ALL);
13512 return constant_boolean_node (1, type);
13513 }
13514
13515 if (TREE_CODE (arg01) == INTEGER_CST)
13516 {
13517 /* Convert X + c > X and X - c < X to true for integers. */
13518 if (code == GT_EXPR
13519 && ((code0 == PLUS_EXPR && is_positive > 0)
13520 || (code0 == MINUS_EXPR && is_positive < 0)))
13521 {
13522 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13523 fold_overflow_warning (("assuming signed overflow does "
13524 "not occur when assuming that "
13525 "(X + c) > X is always true"),
13526 WARN_STRICT_OVERFLOW_ALL);
13527 return constant_boolean_node (1, type);
13528 }
13529
13530 if (code == LT_EXPR
13531 && ((code0 == MINUS_EXPR && is_positive > 0)
13532 || (code0 == PLUS_EXPR && is_positive < 0)))
13533 {
13534 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13535 fold_overflow_warning (("assuming signed overflow does "
13536 "not occur when assuming that "
13537 "(X - c) < X is always true"),
13538 WARN_STRICT_OVERFLOW_ALL);
13539 return constant_boolean_node (1, type);
13540 }
13541
13542 /* Convert X + c <= X and X - c >= X to false for integers. */
13543 if (code == LE_EXPR
13544 && ((code0 == PLUS_EXPR && is_positive > 0)
13545 || (code0 == MINUS_EXPR && is_positive < 0)))
13546 {
13547 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13548 fold_overflow_warning (("assuming signed overflow does "
13549 "not occur when assuming that "
13550 "(X + c) <= X is always false"),
13551 WARN_STRICT_OVERFLOW_ALL);
13552 return constant_boolean_node (0, type);
13553 }
13554
13555 if (code == GE_EXPR
13556 && ((code0 == MINUS_EXPR && is_positive > 0)
13557 || (code0 == PLUS_EXPR && is_positive < 0)))
13558 {
13559 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13560 fold_overflow_warning (("assuming signed overflow does "
13561 "not occur when assuming that "
13562 "(X - c) >= X is always false"),
13563 WARN_STRICT_OVERFLOW_ALL);
13564 return constant_boolean_node (0, type);
13565 }
13566 }
13567 }
13568
13569 /* Comparisons with the highest or lowest possible integer of
13570 the specified precision will have known values. */
13571 {
13572 tree arg1_type = TREE_TYPE (arg1);
13573 unsigned int prec = TYPE_PRECISION (arg1_type);
13574
13575 if (TREE_CODE (arg1) == INTEGER_CST
13576 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13577 {
13578 wide_int max = wi::max_value (arg1_type);
13579 wide_int signed_max = wi::max_value (prec, SIGNED);
13580 wide_int min = wi::min_value (arg1_type);
13581
13582 if (wi::eq_p (arg1, max))
13583 switch (code)
13584 {
13585 case GT_EXPR:
13586 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13587
13588 case GE_EXPR:
13589 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13590
13591 case LE_EXPR:
13592 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13593
13594 case LT_EXPR:
13595 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13596
13597 /* The GE_EXPR and LT_EXPR cases above are not normally
13598 reached because of previous transformations. */
13599
13600 default:
13601 break;
13602 }
13603 else if (wi::eq_p (arg1, max - 1))
13604 switch (code)
13605 {
13606 case GT_EXPR:
13607 arg1 = const_binop (PLUS_EXPR, arg1,
13608 build_int_cst (TREE_TYPE (arg1), 1));
13609 return fold_build2_loc (loc, EQ_EXPR, type,
13610 fold_convert_loc (loc,
13611 TREE_TYPE (arg1), arg0),
13612 arg1);
13613 case LE_EXPR:
13614 arg1 = const_binop (PLUS_EXPR, arg1,
13615 build_int_cst (TREE_TYPE (arg1), 1));
13616 return fold_build2_loc (loc, NE_EXPR, type,
13617 fold_convert_loc (loc, TREE_TYPE (arg1),
13618 arg0),
13619 arg1);
13620 default:
13621 break;
13622 }
13623 else if (wi::eq_p (arg1, min))
13624 switch (code)
13625 {
13626 case LT_EXPR:
13627 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13628
13629 case LE_EXPR:
13630 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13631
13632 case GE_EXPR:
13633 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13634
13635 case GT_EXPR:
13636 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13637
13638 default:
13639 break;
13640 }
13641 else if (wi::eq_p (arg1, min + 1))
13642 switch (code)
13643 {
13644 case GE_EXPR:
13645 arg1 = const_binop (MINUS_EXPR, arg1,
13646 build_int_cst (TREE_TYPE (arg1), 1));
13647 return fold_build2_loc (loc, NE_EXPR, type,
13648 fold_convert_loc (loc,
13649 TREE_TYPE (arg1), arg0),
13650 arg1);
13651 case LT_EXPR:
13652 arg1 = const_binop (MINUS_EXPR, arg1,
13653 build_int_cst (TREE_TYPE (arg1), 1));
13654 return fold_build2_loc (loc, EQ_EXPR, type,
13655 fold_convert_loc (loc, TREE_TYPE (arg1),
13656 arg0),
13657 arg1);
13658 default:
13659 break;
13660 }
13661
13662 else if (wi::eq_p (arg1, signed_max)
13663 && TYPE_UNSIGNED (arg1_type)
13664 /* We will flip the signedness of the comparison operator
13665 associated with the mode of arg1, so the sign bit is
13666 specified by this mode. Check that arg1 is the signed
13667 max associated with this sign bit. */
13668 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13669 /* signed_type does not work on pointer types. */
13670 && INTEGRAL_TYPE_P (arg1_type))
13671 {
13672 /* The following case also applies to X < signed_max+1
13673 and X >= signed_max+1 because previous transformations. */
13674 if (code == LE_EXPR || code == GT_EXPR)
13675 {
13676 tree st = signed_type_for (arg1_type);
13677 return fold_build2_loc (loc,
13678 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13679 type, fold_convert_loc (loc, st, arg0),
13680 build_int_cst (st, 0));
13681 }
13682 }
13683 }
13684 }
13685
13686 /* If we are comparing an ABS_EXPR with a constant, we can
13687 convert all the cases into explicit comparisons, but they may
13688 well not be faster than doing the ABS and one comparison.
13689 But ABS (X) <= C is a range comparison, which becomes a subtraction
13690 and a comparison, and is probably faster. */
13691 if (code == LE_EXPR
13692 && TREE_CODE (arg1) == INTEGER_CST
13693 && TREE_CODE (arg0) == ABS_EXPR
13694 && ! TREE_SIDE_EFFECTS (arg0)
13695 && (0 != (tem = negate_expr (arg1)))
13696 && TREE_CODE (tem) == INTEGER_CST
13697 && !TREE_OVERFLOW (tem))
13698 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13699 build2 (GE_EXPR, type,
13700 TREE_OPERAND (arg0, 0), tem),
13701 build2 (LE_EXPR, type,
13702 TREE_OPERAND (arg0, 0), arg1));
13703
13704 /* Convert ABS_EXPR<x> >= 0 to true. */
13705 strict_overflow_p = false;
13706 if (code == GE_EXPR
13707 && (integer_zerop (arg1)
13708 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13709 && real_zerop (arg1)))
13710 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13711 {
13712 if (strict_overflow_p)
13713 fold_overflow_warning (("assuming signed overflow does not occur "
13714 "when simplifying comparison of "
13715 "absolute value and zero"),
13716 WARN_STRICT_OVERFLOW_CONDITIONAL);
13717 return omit_one_operand_loc (loc, type,
13718 constant_boolean_node (true, type),
13719 arg0);
13720 }
13721
13722 /* Convert ABS_EXPR<x> < 0 to false. */
13723 strict_overflow_p = false;
13724 if (code == LT_EXPR
13725 && (integer_zerop (arg1) || real_zerop (arg1))
13726 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13727 {
13728 if (strict_overflow_p)
13729 fold_overflow_warning (("assuming signed overflow does not occur "
13730 "when simplifying comparison of "
13731 "absolute value and zero"),
13732 WARN_STRICT_OVERFLOW_CONDITIONAL);
13733 return omit_one_operand_loc (loc, type,
13734 constant_boolean_node (false, type),
13735 arg0);
13736 }
13737
13738 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13739 and similarly for >= into !=. */
13740 if ((code == LT_EXPR || code == GE_EXPR)
13741 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13742 && TREE_CODE (arg1) == LSHIFT_EXPR
13743 && integer_onep (TREE_OPERAND (arg1, 0)))
13744 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13745 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13746 TREE_OPERAND (arg1, 1)),
13747 build_zero_cst (TREE_TYPE (arg0)));
13748
13749 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13750 otherwise Y might be >= # of bits in X's type and thus e.g.
13751 (unsigned char) (1 << Y) for Y 15 might be 0.
13752 If the cast is widening, then 1 << Y should have unsigned type,
13753 otherwise if Y is number of bits in the signed shift type minus 1,
13754 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13755 31 might be 0xffffffff80000000. */
13756 if ((code == LT_EXPR || code == GE_EXPR)
13757 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13758 && CONVERT_EXPR_P (arg1)
13759 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13760 && (TYPE_PRECISION (TREE_TYPE (arg1))
13761 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13762 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13763 || (TYPE_PRECISION (TREE_TYPE (arg1))
13764 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13765 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13766 {
13767 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13768 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13769 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13770 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13771 build_zero_cst (TREE_TYPE (arg0)));
13772 }
13773
13774 return NULL_TREE;
13775
13776 case UNORDERED_EXPR:
13777 case ORDERED_EXPR:
13778 case UNLT_EXPR:
13779 case UNLE_EXPR:
13780 case UNGT_EXPR:
13781 case UNGE_EXPR:
13782 case UNEQ_EXPR:
13783 case LTGT_EXPR:
13784 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13785 {
13786 t1 = fold_relational_const (code, type, arg0, arg1);
13787 if (t1 != NULL_TREE)
13788 return t1;
13789 }
13790
13791 /* If the first operand is NaN, the result is constant. */
13792 if (TREE_CODE (arg0) == REAL_CST
13793 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13794 && (code != LTGT_EXPR || ! flag_trapping_math))
13795 {
13796 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13797 ? integer_zero_node
13798 : integer_one_node;
13799 return omit_one_operand_loc (loc, type, t1, arg1);
13800 }
13801
13802 /* If the second operand is NaN, the result is constant. */
13803 if (TREE_CODE (arg1) == REAL_CST
13804 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13805 && (code != LTGT_EXPR || ! flag_trapping_math))
13806 {
13807 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13808 ? integer_zero_node
13809 : integer_one_node;
13810 return omit_one_operand_loc (loc, type, t1, arg0);
13811 }
13812
13813 /* Simplify unordered comparison of something with itself. */
13814 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13815 && operand_equal_p (arg0, arg1, 0))
13816 return constant_boolean_node (1, type);
13817
13818 if (code == LTGT_EXPR
13819 && !flag_trapping_math
13820 && operand_equal_p (arg0, arg1, 0))
13821 return constant_boolean_node (0, type);
13822
13823 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13824 {
13825 tree targ0 = strip_float_extensions (arg0);
13826 tree targ1 = strip_float_extensions (arg1);
13827 tree newtype = TREE_TYPE (targ0);
13828
13829 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13830 newtype = TREE_TYPE (targ1);
13831
13832 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13833 return fold_build2_loc (loc, code, type,
13834 fold_convert_loc (loc, newtype, targ0),
13835 fold_convert_loc (loc, newtype, targ1));
13836 }
13837
13838 return NULL_TREE;
13839
13840 case COMPOUND_EXPR:
13841 /* When pedantic, a compound expression can be neither an lvalue
13842 nor an integer constant expression. */
13843 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13844 return NULL_TREE;
13845 /* Don't let (0, 0) be null pointer constant. */
13846 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13847 : fold_convert_loc (loc, type, arg1);
13848 return pedantic_non_lvalue_loc (loc, tem);
13849
13850 case COMPLEX_EXPR:
13851 if ((TREE_CODE (arg0) == REAL_CST
13852 && TREE_CODE (arg1) == REAL_CST)
13853 || (TREE_CODE (arg0) == INTEGER_CST
13854 && TREE_CODE (arg1) == INTEGER_CST))
13855 return build_complex (type, arg0, arg1);
13856 if (TREE_CODE (arg0) == REALPART_EXPR
13857 && TREE_CODE (arg1) == IMAGPART_EXPR
13858 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13859 && operand_equal_p (TREE_OPERAND (arg0, 0),
13860 TREE_OPERAND (arg1, 0), 0))
13861 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13862 TREE_OPERAND (arg1, 0));
13863 return NULL_TREE;
13864
13865 case ASSERT_EXPR:
13866 /* An ASSERT_EXPR should never be passed to fold_binary. */
13867 gcc_unreachable ();
13868
13869 case VEC_PACK_TRUNC_EXPR:
13870 case VEC_PACK_FIX_TRUNC_EXPR:
13871 {
13872 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13873 tree *elts;
13874
13875 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13876 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13877 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13878 return NULL_TREE;
13879
13880 elts = XALLOCAVEC (tree, nelts);
13881 if (!vec_cst_ctor_to_array (arg0, elts)
13882 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13883 return NULL_TREE;
13884
13885 for (i = 0; i < nelts; i++)
13886 {
13887 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13888 ? NOP_EXPR : FIX_TRUNC_EXPR,
13889 TREE_TYPE (type), elts[i]);
13890 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13891 return NULL_TREE;
13892 }
13893
13894 return build_vector (type, elts);
13895 }
13896
13897 case VEC_WIDEN_MULT_LO_EXPR:
13898 case VEC_WIDEN_MULT_HI_EXPR:
13899 case VEC_WIDEN_MULT_EVEN_EXPR:
13900 case VEC_WIDEN_MULT_ODD_EXPR:
13901 {
13902 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13903 unsigned int out, ofs, scale;
13904 tree *elts;
13905
13906 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13907 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13908 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13909 return NULL_TREE;
13910
13911 elts = XALLOCAVEC (tree, nelts * 4);
13912 if (!vec_cst_ctor_to_array (arg0, elts)
13913 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13914 return NULL_TREE;
13915
13916 if (code == VEC_WIDEN_MULT_LO_EXPR)
13917 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13918 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13919 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13920 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13921 scale = 1, ofs = 0;
13922 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13923 scale = 1, ofs = 1;
13924
13925 for (out = 0; out < nelts; out++)
13926 {
13927 unsigned int in1 = (out << scale) + ofs;
13928 unsigned int in2 = in1 + nelts * 2;
13929 tree t1, t2;
13930
13931 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13932 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13933
13934 if (t1 == NULL_TREE || t2 == NULL_TREE)
13935 return NULL_TREE;
13936 elts[out] = const_binop (MULT_EXPR, t1, t2);
13937 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13938 return NULL_TREE;
13939 }
13940
13941 return build_vector (type, elts);
13942 }
13943
13944 default:
13945 return NULL_TREE;
13946 } /* switch (code) */
13947 }
13948
13949 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13950 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13951 of GOTO_EXPR. */
13952
13953 static tree
13954 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13955 {
13956 switch (TREE_CODE (*tp))
13957 {
13958 case LABEL_EXPR:
13959 return *tp;
13960
13961 case GOTO_EXPR:
13962 *walk_subtrees = 0;
13963
13964 /* ... fall through ... */
13965
13966 default:
13967 return NULL_TREE;
13968 }
13969 }
13970
13971 /* Return whether the sub-tree ST contains a label which is accessible from
13972 outside the sub-tree. */
13973
13974 static bool
13975 contains_label_p (tree st)
13976 {
13977 return
13978 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13979 }
13980
13981 /* Fold a ternary expression of code CODE and type TYPE with operands
13982 OP0, OP1, and OP2. Return the folded expression if folding is
13983 successful. Otherwise, return NULL_TREE. */
13984
13985 tree
13986 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13987 tree op0, tree op1, tree op2)
13988 {
13989 tree tem;
13990 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13991 enum tree_code_class kind = TREE_CODE_CLASS (code);
13992
13993 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13994 && TREE_CODE_LENGTH (code) == 3);
13995
13996 /* Strip any conversions that don't change the mode. This is safe
13997 for every expression, except for a comparison expression because
13998 its signedness is derived from its operands. So, in the latter
13999 case, only strip conversions that don't change the signedness.
14000
14001 Note that this is done as an internal manipulation within the
14002 constant folder, in order to find the simplest representation of
14003 the arguments so that their form can be studied. In any cases,
14004 the appropriate type conversions should be put back in the tree
14005 that will get out of the constant folder. */
14006 if (op0)
14007 {
14008 arg0 = op0;
14009 STRIP_NOPS (arg0);
14010 }
14011
14012 if (op1)
14013 {
14014 arg1 = op1;
14015 STRIP_NOPS (arg1);
14016 }
14017
14018 if (op2)
14019 {
14020 arg2 = op2;
14021 STRIP_NOPS (arg2);
14022 }
14023
14024 switch (code)
14025 {
14026 case COMPONENT_REF:
14027 if (TREE_CODE (arg0) == CONSTRUCTOR
14028 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14029 {
14030 unsigned HOST_WIDE_INT idx;
14031 tree field, value;
14032 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14033 if (field == arg1)
14034 return value;
14035 }
14036 return NULL_TREE;
14037
14038 case COND_EXPR:
14039 case VEC_COND_EXPR:
14040 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14041 so all simple results must be passed through pedantic_non_lvalue. */
14042 if (TREE_CODE (arg0) == INTEGER_CST)
14043 {
14044 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14045 tem = integer_zerop (arg0) ? op2 : op1;
14046 /* Only optimize constant conditions when the selected branch
14047 has the same type as the COND_EXPR. This avoids optimizing
14048 away "c ? x : throw", where the throw has a void type.
14049 Avoid throwing away that operand which contains label. */
14050 if ((!TREE_SIDE_EFFECTS (unused_op)
14051 || !contains_label_p (unused_op))
14052 && (! VOID_TYPE_P (TREE_TYPE (tem))
14053 || VOID_TYPE_P (type)))
14054 return pedantic_non_lvalue_loc (loc, tem);
14055 return NULL_TREE;
14056 }
14057 else if (TREE_CODE (arg0) == VECTOR_CST)
14058 {
14059 if (integer_all_onesp (arg0))
14060 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14061 if (integer_zerop (arg0))
14062 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14063
14064 if ((TREE_CODE (arg1) == VECTOR_CST
14065 || TREE_CODE (arg1) == CONSTRUCTOR)
14066 && (TREE_CODE (arg2) == VECTOR_CST
14067 || TREE_CODE (arg2) == CONSTRUCTOR))
14068 {
14069 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14070 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14071 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14072 for (i = 0; i < nelts; i++)
14073 {
14074 tree val = VECTOR_CST_ELT (arg0, i);
14075 if (integer_all_onesp (val))
14076 sel[i] = i;
14077 else if (integer_zerop (val))
14078 sel[i] = nelts + i;
14079 else /* Currently unreachable. */
14080 return NULL_TREE;
14081 }
14082 tree t = fold_vec_perm (type, arg1, arg2, sel);
14083 if (t != NULL_TREE)
14084 return t;
14085 }
14086 }
14087
14088 if (operand_equal_p (arg1, op2, 0))
14089 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14090
14091 /* If we have A op B ? A : C, we may be able to convert this to a
14092 simpler expression, depending on the operation and the values
14093 of B and C. Signed zeros prevent all of these transformations,
14094 for reasons given above each one.
14095
14096 Also try swapping the arguments and inverting the conditional. */
14097 if (COMPARISON_CLASS_P (arg0)
14098 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14099 arg1, TREE_OPERAND (arg0, 1))
14100 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14101 {
14102 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14103 if (tem)
14104 return tem;
14105 }
14106
14107 if (COMPARISON_CLASS_P (arg0)
14108 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14109 op2,
14110 TREE_OPERAND (arg0, 1))
14111 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14112 {
14113 location_t loc0 = expr_location_or (arg0, loc);
14114 tem = fold_invert_truthvalue (loc0, arg0);
14115 if (tem && COMPARISON_CLASS_P (tem))
14116 {
14117 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14118 if (tem)
14119 return tem;
14120 }
14121 }
14122
14123 /* If the second operand is simpler than the third, swap them
14124 since that produces better jump optimization results. */
14125 if (truth_value_p (TREE_CODE (arg0))
14126 && tree_swap_operands_p (op1, op2, false))
14127 {
14128 location_t loc0 = expr_location_or (arg0, loc);
14129 /* See if this can be inverted. If it can't, possibly because
14130 it was a floating-point inequality comparison, don't do
14131 anything. */
14132 tem = fold_invert_truthvalue (loc0, arg0);
14133 if (tem)
14134 return fold_build3_loc (loc, code, type, tem, op2, op1);
14135 }
14136
14137 /* Convert A ? 1 : 0 to simply A. */
14138 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14139 : (integer_onep (op1)
14140 && !VECTOR_TYPE_P (type)))
14141 && integer_zerop (op2)
14142 /* If we try to convert OP0 to our type, the
14143 call to fold will try to move the conversion inside
14144 a COND, which will recurse. In that case, the COND_EXPR
14145 is probably the best choice, so leave it alone. */
14146 && type == TREE_TYPE (arg0))
14147 return pedantic_non_lvalue_loc (loc, arg0);
14148
14149 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14150 over COND_EXPR in cases such as floating point comparisons. */
14151 if (integer_zerop (op1)
14152 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14153 : (integer_onep (op2)
14154 && !VECTOR_TYPE_P (type)))
14155 && truth_value_p (TREE_CODE (arg0)))
14156 return pedantic_non_lvalue_loc (loc,
14157 fold_convert_loc (loc, type,
14158 invert_truthvalue_loc (loc,
14159 arg0)));
14160
14161 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14162 if (TREE_CODE (arg0) == LT_EXPR
14163 && integer_zerop (TREE_OPERAND (arg0, 1))
14164 && integer_zerop (op2)
14165 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14166 {
14167 /* sign_bit_p looks through both zero and sign extensions,
14168 but for this optimization only sign extensions are
14169 usable. */
14170 tree tem2 = TREE_OPERAND (arg0, 0);
14171 while (tem != tem2)
14172 {
14173 if (TREE_CODE (tem2) != NOP_EXPR
14174 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14175 {
14176 tem = NULL_TREE;
14177 break;
14178 }
14179 tem2 = TREE_OPERAND (tem2, 0);
14180 }
14181 /* sign_bit_p only checks ARG1 bits within A's precision.
14182 If <sign bit of A> has wider type than A, bits outside
14183 of A's precision in <sign bit of A> need to be checked.
14184 If they are all 0, this optimization needs to be done
14185 in unsigned A's type, if they are all 1 in signed A's type,
14186 otherwise this can't be done. */
14187 if (tem
14188 && TYPE_PRECISION (TREE_TYPE (tem))
14189 < TYPE_PRECISION (TREE_TYPE (arg1))
14190 && TYPE_PRECISION (TREE_TYPE (tem))
14191 < TYPE_PRECISION (type))
14192 {
14193 int inner_width, outer_width;
14194 tree tem_type;
14195
14196 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14197 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14198 if (outer_width > TYPE_PRECISION (type))
14199 outer_width = TYPE_PRECISION (type);
14200
14201 wide_int mask = wi::shifted_mask
14202 (inner_width, outer_width - inner_width, false,
14203 TYPE_PRECISION (TREE_TYPE (arg1)));
14204
14205 wide_int common = mask & arg1;
14206 if (common == mask)
14207 {
14208 tem_type = signed_type_for (TREE_TYPE (tem));
14209 tem = fold_convert_loc (loc, tem_type, tem);
14210 }
14211 else if (common == 0)
14212 {
14213 tem_type = unsigned_type_for (TREE_TYPE (tem));
14214 tem = fold_convert_loc (loc, tem_type, tem);
14215 }
14216 else
14217 tem = NULL;
14218 }
14219
14220 if (tem)
14221 return
14222 fold_convert_loc (loc, type,
14223 fold_build2_loc (loc, BIT_AND_EXPR,
14224 TREE_TYPE (tem), tem,
14225 fold_convert_loc (loc,
14226 TREE_TYPE (tem),
14227 arg1)));
14228 }
14229
14230 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14231 already handled above. */
14232 if (TREE_CODE (arg0) == BIT_AND_EXPR
14233 && integer_onep (TREE_OPERAND (arg0, 1))
14234 && integer_zerop (op2)
14235 && integer_pow2p (arg1))
14236 {
14237 tree tem = TREE_OPERAND (arg0, 0);
14238 STRIP_NOPS (tem);
14239 if (TREE_CODE (tem) == RSHIFT_EXPR
14240 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14241 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14242 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14243 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14244 TREE_OPERAND (tem, 0), arg1);
14245 }
14246
14247 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14248 is probably obsolete because the first operand should be a
14249 truth value (that's why we have the two cases above), but let's
14250 leave it in until we can confirm this for all front-ends. */
14251 if (integer_zerop (op2)
14252 && TREE_CODE (arg0) == NE_EXPR
14253 && integer_zerop (TREE_OPERAND (arg0, 1))
14254 && integer_pow2p (arg1)
14255 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14256 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14257 arg1, OEP_ONLY_CONST))
14258 return pedantic_non_lvalue_loc (loc,
14259 fold_convert_loc (loc, type,
14260 TREE_OPERAND (arg0, 0)));
14261
14262 /* Disable the transformations below for vectors, since
14263 fold_binary_op_with_conditional_arg may undo them immediately,
14264 yielding an infinite loop. */
14265 if (code == VEC_COND_EXPR)
14266 return NULL_TREE;
14267
14268 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14269 if (integer_zerop (op2)
14270 && truth_value_p (TREE_CODE (arg0))
14271 && truth_value_p (TREE_CODE (arg1))
14272 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14273 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14274 : TRUTH_ANDIF_EXPR,
14275 type, fold_convert_loc (loc, type, arg0), arg1);
14276
14277 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14278 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14279 && truth_value_p (TREE_CODE (arg0))
14280 && truth_value_p (TREE_CODE (arg1))
14281 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14282 {
14283 location_t loc0 = expr_location_or (arg0, loc);
14284 /* Only perform transformation if ARG0 is easily inverted. */
14285 tem = fold_invert_truthvalue (loc0, arg0);
14286 if (tem)
14287 return fold_build2_loc (loc, code == VEC_COND_EXPR
14288 ? BIT_IOR_EXPR
14289 : TRUTH_ORIF_EXPR,
14290 type, fold_convert_loc (loc, type, tem),
14291 arg1);
14292 }
14293
14294 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14295 if (integer_zerop (arg1)
14296 && truth_value_p (TREE_CODE (arg0))
14297 && truth_value_p (TREE_CODE (op2))
14298 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14299 {
14300 location_t loc0 = expr_location_or (arg0, loc);
14301 /* Only perform transformation if ARG0 is easily inverted. */
14302 tem = fold_invert_truthvalue (loc0, arg0);
14303 if (tem)
14304 return fold_build2_loc (loc, code == VEC_COND_EXPR
14305 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14306 type, fold_convert_loc (loc, type, tem),
14307 op2);
14308 }
14309
14310 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14311 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14312 && truth_value_p (TREE_CODE (arg0))
14313 && truth_value_p (TREE_CODE (op2))
14314 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14315 return fold_build2_loc (loc, code == VEC_COND_EXPR
14316 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14317 type, fold_convert_loc (loc, type, arg0), op2);
14318
14319 return NULL_TREE;
14320
14321 case CALL_EXPR:
14322 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14323 of fold_ternary on them. */
14324 gcc_unreachable ();
14325
14326 case BIT_FIELD_REF:
14327 if ((TREE_CODE (arg0) == VECTOR_CST
14328 || (TREE_CODE (arg0) == CONSTRUCTOR
14329 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14330 && (type == TREE_TYPE (TREE_TYPE (arg0))
14331 || (TREE_CODE (type) == VECTOR_TYPE
14332 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14333 {
14334 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14335 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14336 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14337 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14338
14339 if (n != 0
14340 && (idx % width) == 0
14341 && (n % width) == 0
14342 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14343 {
14344 idx = idx / width;
14345 n = n / width;
14346
14347 if (TREE_CODE (arg0) == VECTOR_CST)
14348 {
14349 if (n == 1)
14350 return VECTOR_CST_ELT (arg0, idx);
14351
14352 tree *vals = XALLOCAVEC (tree, n);
14353 for (unsigned i = 0; i < n; ++i)
14354 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14355 return build_vector (type, vals);
14356 }
14357
14358 /* Constructor elements can be subvectors. */
14359 unsigned HOST_WIDE_INT k = 1;
14360 if (CONSTRUCTOR_NELTS (arg0) != 0)
14361 {
14362 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14363 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14364 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14365 }
14366
14367 /* We keep an exact subset of the constructor elements. */
14368 if ((idx % k) == 0 && (n % k) == 0)
14369 {
14370 if (CONSTRUCTOR_NELTS (arg0) == 0)
14371 return build_constructor (type, NULL);
14372 idx /= k;
14373 n /= k;
14374 if (n == 1)
14375 {
14376 if (idx < CONSTRUCTOR_NELTS (arg0))
14377 return CONSTRUCTOR_ELT (arg0, idx)->value;
14378 return build_zero_cst (type);
14379 }
14380
14381 vec<constructor_elt, va_gc> *vals;
14382 vec_alloc (vals, n);
14383 for (unsigned i = 0;
14384 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14385 ++i)
14386 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14387 CONSTRUCTOR_ELT
14388 (arg0, idx + i)->value);
14389 return build_constructor (type, vals);
14390 }
14391 /* The bitfield references a single constructor element. */
14392 else if (idx + n <= (idx / k + 1) * k)
14393 {
14394 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14395 return build_zero_cst (type);
14396 else if (n == k)
14397 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14398 else
14399 return fold_build3_loc (loc, code, type,
14400 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14401 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14402 }
14403 }
14404 }
14405
14406 /* A bit-field-ref that referenced the full argument can be stripped. */
14407 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14408 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14409 && integer_zerop (op2))
14410 return fold_convert_loc (loc, type, arg0);
14411
14412 /* On constants we can use native encode/interpret to constant
14413 fold (nearly) all BIT_FIELD_REFs. */
14414 if (CONSTANT_CLASS_P (arg0)
14415 && can_native_interpret_type_p (type)
14416 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14417 /* This limitation should not be necessary, we just need to
14418 round this up to mode size. */
14419 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14420 /* Need bit-shifting of the buffer to relax the following. */
14421 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14422 {
14423 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14424 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14425 unsigned HOST_WIDE_INT clen;
14426 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14427 /* ??? We cannot tell native_encode_expr to start at
14428 some random byte only. So limit us to a reasonable amount
14429 of work. */
14430 if (clen <= 4096)
14431 {
14432 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14433 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14434 if (len > 0
14435 && len * BITS_PER_UNIT >= bitpos + bitsize)
14436 {
14437 tree v = native_interpret_expr (type,
14438 b + bitpos / BITS_PER_UNIT,
14439 bitsize / BITS_PER_UNIT);
14440 if (v)
14441 return v;
14442 }
14443 }
14444 }
14445
14446 return NULL_TREE;
14447
14448 case FMA_EXPR:
14449 /* For integers we can decompose the FMA if possible. */
14450 if (TREE_CODE (arg0) == INTEGER_CST
14451 && TREE_CODE (arg1) == INTEGER_CST)
14452 return fold_build2_loc (loc, PLUS_EXPR, type,
14453 const_binop (MULT_EXPR, arg0, arg1), arg2);
14454 if (integer_zerop (arg2))
14455 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14456
14457 return fold_fma (loc, type, arg0, arg1, arg2);
14458
14459 case VEC_PERM_EXPR:
14460 if (TREE_CODE (arg2) == VECTOR_CST)
14461 {
14462 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14463 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14464 bool need_mask_canon = false;
14465 bool all_in_vec0 = true;
14466 bool all_in_vec1 = true;
14467 bool maybe_identity = true;
14468 bool single_arg = (op0 == op1);
14469 bool changed = false;
14470
14471 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14472 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14473 for (i = 0; i < nelts; i++)
14474 {
14475 tree val = VECTOR_CST_ELT (arg2, i);
14476 if (TREE_CODE (val) != INTEGER_CST)
14477 return NULL_TREE;
14478
14479 /* Make sure that the perm value is in an acceptable
14480 range. */
14481 wide_int t = val;
14482 if (wi::gtu_p (t, mask))
14483 {
14484 need_mask_canon = true;
14485 sel[i] = t.to_uhwi () & mask;
14486 }
14487 else
14488 sel[i] = t.to_uhwi ();
14489
14490 if (sel[i] < nelts)
14491 all_in_vec1 = false;
14492 else
14493 all_in_vec0 = false;
14494
14495 if ((sel[i] & (nelts-1)) != i)
14496 maybe_identity = false;
14497 }
14498
14499 if (maybe_identity)
14500 {
14501 if (all_in_vec0)
14502 return op0;
14503 if (all_in_vec1)
14504 return op1;
14505 }
14506
14507 if (all_in_vec0)
14508 op1 = op0;
14509 else if (all_in_vec1)
14510 {
14511 op0 = op1;
14512 for (i = 0; i < nelts; i++)
14513 sel[i] -= nelts;
14514 need_mask_canon = true;
14515 }
14516
14517 if ((TREE_CODE (op0) == VECTOR_CST
14518 || TREE_CODE (op0) == CONSTRUCTOR)
14519 && (TREE_CODE (op1) == VECTOR_CST
14520 || TREE_CODE (op1) == CONSTRUCTOR))
14521 {
14522 tree t = fold_vec_perm (type, op0, op1, sel);
14523 if (t != NULL_TREE)
14524 return t;
14525 }
14526
14527 if (op0 == op1 && !single_arg)
14528 changed = true;
14529
14530 if (need_mask_canon && arg2 == op2)
14531 {
14532 tree *tsel = XALLOCAVEC (tree, nelts);
14533 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14534 for (i = 0; i < nelts; i++)
14535 tsel[i] = build_int_cst (eltype, sel[i]);
14536 op2 = build_vector (TREE_TYPE (arg2), tsel);
14537 changed = true;
14538 }
14539
14540 if (changed)
14541 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14542 }
14543 return NULL_TREE;
14544
14545 default:
14546 return NULL_TREE;
14547 } /* switch (code) */
14548 }
14549
14550 /* Perform constant folding and related simplification of EXPR.
14551 The related simplifications include x*1 => x, x*0 => 0, etc.,
14552 and application of the associative law.
14553 NOP_EXPR conversions may be removed freely (as long as we
14554 are careful not to change the type of the overall expression).
14555 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14556 but we can constant-fold them if they have constant operands. */
14557
14558 #ifdef ENABLE_FOLD_CHECKING
14559 # define fold(x) fold_1 (x)
14560 static tree fold_1 (tree);
14561 static
14562 #endif
14563 tree
14564 fold (tree expr)
14565 {
14566 const tree t = expr;
14567 enum tree_code code = TREE_CODE (t);
14568 enum tree_code_class kind = TREE_CODE_CLASS (code);
14569 tree tem;
14570 location_t loc = EXPR_LOCATION (expr);
14571
14572 /* Return right away if a constant. */
14573 if (kind == tcc_constant)
14574 return t;
14575
14576 /* CALL_EXPR-like objects with variable numbers of operands are
14577 treated specially. */
14578 if (kind == tcc_vl_exp)
14579 {
14580 if (code == CALL_EXPR)
14581 {
14582 tem = fold_call_expr (loc, expr, false);
14583 return tem ? tem : expr;
14584 }
14585 return expr;
14586 }
14587
14588 if (IS_EXPR_CODE_CLASS (kind))
14589 {
14590 tree type = TREE_TYPE (t);
14591 tree op0, op1, op2;
14592
14593 switch (TREE_CODE_LENGTH (code))
14594 {
14595 case 1:
14596 op0 = TREE_OPERAND (t, 0);
14597 tem = fold_unary_loc (loc, code, type, op0);
14598 return tem ? tem : expr;
14599 case 2:
14600 op0 = TREE_OPERAND (t, 0);
14601 op1 = TREE_OPERAND (t, 1);
14602 tem = fold_binary_loc (loc, code, type, op0, op1);
14603 return tem ? tem : expr;
14604 case 3:
14605 op0 = TREE_OPERAND (t, 0);
14606 op1 = TREE_OPERAND (t, 1);
14607 op2 = TREE_OPERAND (t, 2);
14608 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14609 return tem ? tem : expr;
14610 default:
14611 break;
14612 }
14613 }
14614
14615 switch (code)
14616 {
14617 case ARRAY_REF:
14618 {
14619 tree op0 = TREE_OPERAND (t, 0);
14620 tree op1 = TREE_OPERAND (t, 1);
14621
14622 if (TREE_CODE (op1) == INTEGER_CST
14623 && TREE_CODE (op0) == CONSTRUCTOR
14624 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14625 {
14626 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14627 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14628 unsigned HOST_WIDE_INT begin = 0;
14629
14630 /* Find a matching index by means of a binary search. */
14631 while (begin != end)
14632 {
14633 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14634 tree index = (*elts)[middle].index;
14635
14636 if (TREE_CODE (index) == INTEGER_CST
14637 && tree_int_cst_lt (index, op1))
14638 begin = middle + 1;
14639 else if (TREE_CODE (index) == INTEGER_CST
14640 && tree_int_cst_lt (op1, index))
14641 end = middle;
14642 else if (TREE_CODE (index) == RANGE_EXPR
14643 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14644 begin = middle + 1;
14645 else if (TREE_CODE (index) == RANGE_EXPR
14646 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14647 end = middle;
14648 else
14649 return (*elts)[middle].value;
14650 }
14651 }
14652
14653 return t;
14654 }
14655
14656 /* Return a VECTOR_CST if possible. */
14657 case CONSTRUCTOR:
14658 {
14659 tree type = TREE_TYPE (t);
14660 if (TREE_CODE (type) != VECTOR_TYPE)
14661 return t;
14662
14663 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14664 unsigned HOST_WIDE_INT idx, pos = 0;
14665 tree value;
14666
14667 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14668 {
14669 if (!CONSTANT_CLASS_P (value))
14670 return t;
14671 if (TREE_CODE (value) == VECTOR_CST)
14672 {
14673 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14674 vec[pos++] = VECTOR_CST_ELT (value, i);
14675 }
14676 else
14677 vec[pos++] = value;
14678 }
14679 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14680 vec[pos] = build_zero_cst (TREE_TYPE (type));
14681
14682 return build_vector (type, vec);
14683 }
14684
14685 case CONST_DECL:
14686 return fold (DECL_INITIAL (t));
14687
14688 default:
14689 return t;
14690 } /* switch (code) */
14691 }
14692
14693 #ifdef ENABLE_FOLD_CHECKING
14694 #undef fold
14695
14696 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14697 hash_table <pointer_hash <tree_node> >);
14698 static void fold_check_failed (const_tree, const_tree);
14699 void print_fold_checksum (const_tree);
14700
14701 /* When --enable-checking=fold, compute a digest of expr before
14702 and after actual fold call to see if fold did not accidentally
14703 change original expr. */
14704
14705 tree
14706 fold (tree expr)
14707 {
14708 tree ret;
14709 struct md5_ctx ctx;
14710 unsigned char checksum_before[16], checksum_after[16];
14711 hash_table <pointer_hash <tree_node> > ht;
14712
14713 ht.create (32);
14714 md5_init_ctx (&ctx);
14715 fold_checksum_tree (expr, &ctx, ht);
14716 md5_finish_ctx (&ctx, checksum_before);
14717 ht.empty ();
14718
14719 ret = fold_1 (expr);
14720
14721 md5_init_ctx (&ctx);
14722 fold_checksum_tree (expr, &ctx, ht);
14723 md5_finish_ctx (&ctx, checksum_after);
14724 ht.dispose ();
14725
14726 if (memcmp (checksum_before, checksum_after, 16))
14727 fold_check_failed (expr, ret);
14728
14729 return ret;
14730 }
14731
14732 void
14733 print_fold_checksum (const_tree expr)
14734 {
14735 struct md5_ctx ctx;
14736 unsigned char checksum[16], cnt;
14737 hash_table <pointer_hash <tree_node> > ht;
14738
14739 ht.create (32);
14740 md5_init_ctx (&ctx);
14741 fold_checksum_tree (expr, &ctx, ht);
14742 md5_finish_ctx (&ctx, checksum);
14743 ht.dispose ();
14744 for (cnt = 0; cnt < 16; ++cnt)
14745 fprintf (stderr, "%02x", checksum[cnt]);
14746 putc ('\n', stderr);
14747 }
14748
14749 static void
14750 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14751 {
14752 internal_error ("fold check: original tree changed by fold");
14753 }
14754
14755 static void
14756 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14757 hash_table <pointer_hash <tree_node> > ht)
14758 {
14759 tree_node **slot;
14760 enum tree_code code;
14761 union tree_node buf;
14762 int i, len;
14763
14764 recursive_label:
14765 if (expr == NULL)
14766 return;
14767 slot = ht.find_slot (expr, INSERT);
14768 if (*slot != NULL)
14769 return;
14770 *slot = CONST_CAST_TREE (expr);
14771 code = TREE_CODE (expr);
14772 if (TREE_CODE_CLASS (code) == tcc_declaration
14773 && DECL_ASSEMBLER_NAME_SET_P (expr))
14774 {
14775 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14776 memcpy ((char *) &buf, expr, tree_size (expr));
14777 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14778 expr = (tree) &buf;
14779 }
14780 else if (TREE_CODE_CLASS (code) == tcc_type
14781 && (TYPE_POINTER_TO (expr)
14782 || TYPE_REFERENCE_TO (expr)
14783 || TYPE_CACHED_VALUES_P (expr)
14784 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14785 || TYPE_NEXT_VARIANT (expr)))
14786 {
14787 /* Allow these fields to be modified. */
14788 tree tmp;
14789 memcpy ((char *) &buf, expr, tree_size (expr));
14790 expr = tmp = (tree) &buf;
14791 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14792 TYPE_POINTER_TO (tmp) = NULL;
14793 TYPE_REFERENCE_TO (tmp) = NULL;
14794 TYPE_NEXT_VARIANT (tmp) = NULL;
14795 if (TYPE_CACHED_VALUES_P (tmp))
14796 {
14797 TYPE_CACHED_VALUES_P (tmp) = 0;
14798 TYPE_CACHED_VALUES (tmp) = NULL;
14799 }
14800 }
14801 md5_process_bytes (expr, tree_size (expr), ctx);
14802 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14803 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14804 if (TREE_CODE_CLASS (code) != tcc_type
14805 && TREE_CODE_CLASS (code) != tcc_declaration
14806 && code != TREE_LIST
14807 && code != SSA_NAME
14808 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14809 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14810 switch (TREE_CODE_CLASS (code))
14811 {
14812 case tcc_constant:
14813 switch (code)
14814 {
14815 case STRING_CST:
14816 md5_process_bytes (TREE_STRING_POINTER (expr),
14817 TREE_STRING_LENGTH (expr), ctx);
14818 break;
14819 case COMPLEX_CST:
14820 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14821 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14822 break;
14823 case VECTOR_CST:
14824 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14825 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14826 break;
14827 default:
14828 break;
14829 }
14830 break;
14831 case tcc_exceptional:
14832 switch (code)
14833 {
14834 case TREE_LIST:
14835 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14836 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14837 expr = TREE_CHAIN (expr);
14838 goto recursive_label;
14839 break;
14840 case TREE_VEC:
14841 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14842 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14843 break;
14844 default:
14845 break;
14846 }
14847 break;
14848 case tcc_expression:
14849 case tcc_reference:
14850 case tcc_comparison:
14851 case tcc_unary:
14852 case tcc_binary:
14853 case tcc_statement:
14854 case tcc_vl_exp:
14855 len = TREE_OPERAND_LENGTH (expr);
14856 for (i = 0; i < len; ++i)
14857 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14858 break;
14859 case tcc_declaration:
14860 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14861 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14862 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14863 {
14864 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14865 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14866 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14867 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14868 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14869 }
14870 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14871 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14872
14873 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14874 {
14875 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14876 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14877 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14878 }
14879 break;
14880 case tcc_type:
14881 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14882 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14883 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14884 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14885 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14886 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14887 if (INTEGRAL_TYPE_P (expr)
14888 || SCALAR_FLOAT_TYPE_P (expr))
14889 {
14890 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14891 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14892 }
14893 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14894 if (TREE_CODE (expr) == RECORD_TYPE
14895 || TREE_CODE (expr) == UNION_TYPE
14896 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14897 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14898 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14899 break;
14900 default:
14901 break;
14902 }
14903 }
14904
14905 /* Helper function for outputting the checksum of a tree T. When
14906 debugging with gdb, you can "define mynext" to be "next" followed
14907 by "call debug_fold_checksum (op0)", then just trace down till the
14908 outputs differ. */
14909
14910 DEBUG_FUNCTION void
14911 debug_fold_checksum (const_tree t)
14912 {
14913 int i;
14914 unsigned char checksum[16];
14915 struct md5_ctx ctx;
14916 hash_table <pointer_hash <tree_node> > ht;
14917 ht.create (32);
14918
14919 md5_init_ctx (&ctx);
14920 fold_checksum_tree (t, &ctx, ht);
14921 md5_finish_ctx (&ctx, checksum);
14922 ht.empty ();
14923
14924 for (i = 0; i < 16; i++)
14925 fprintf (stderr, "%d ", checksum[i]);
14926
14927 fprintf (stderr, "\n");
14928 }
14929
14930 #endif
14931
14932 /* Fold a unary tree expression with code CODE of type TYPE with an
14933 operand OP0. LOC is the location of the resulting expression.
14934 Return a folded expression if successful. Otherwise, return a tree
14935 expression with code CODE of type TYPE with an operand OP0. */
14936
14937 tree
14938 fold_build1_stat_loc (location_t loc,
14939 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14940 {
14941 tree tem;
14942 #ifdef ENABLE_FOLD_CHECKING
14943 unsigned char checksum_before[16], checksum_after[16];
14944 struct md5_ctx ctx;
14945 hash_table <pointer_hash <tree_node> > ht;
14946
14947 ht.create (32);
14948 md5_init_ctx (&ctx);
14949 fold_checksum_tree (op0, &ctx, ht);
14950 md5_finish_ctx (&ctx, checksum_before);
14951 ht.empty ();
14952 #endif
14953
14954 tem = fold_unary_loc (loc, code, type, op0);
14955 if (!tem)
14956 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14957
14958 #ifdef ENABLE_FOLD_CHECKING
14959 md5_init_ctx (&ctx);
14960 fold_checksum_tree (op0, &ctx, ht);
14961 md5_finish_ctx (&ctx, checksum_after);
14962 ht.dispose ();
14963
14964 if (memcmp (checksum_before, checksum_after, 16))
14965 fold_check_failed (op0, tem);
14966 #endif
14967 return tem;
14968 }
14969
14970 /* Fold a binary tree expression with code CODE of type TYPE with
14971 operands OP0 and OP1. LOC is the location of the resulting
14972 expression. Return a folded expression if successful. Otherwise,
14973 return a tree expression with code CODE of type TYPE with operands
14974 OP0 and OP1. */
14975
14976 tree
14977 fold_build2_stat_loc (location_t loc,
14978 enum tree_code code, tree type, tree op0, tree op1
14979 MEM_STAT_DECL)
14980 {
14981 tree tem;
14982 #ifdef ENABLE_FOLD_CHECKING
14983 unsigned char checksum_before_op0[16],
14984 checksum_before_op1[16],
14985 checksum_after_op0[16],
14986 checksum_after_op1[16];
14987 struct md5_ctx ctx;
14988 hash_table <pointer_hash <tree_node> > ht;
14989
14990 ht.create (32);
14991 md5_init_ctx (&ctx);
14992 fold_checksum_tree (op0, &ctx, ht);
14993 md5_finish_ctx (&ctx, checksum_before_op0);
14994 ht.empty ();
14995
14996 md5_init_ctx (&ctx);
14997 fold_checksum_tree (op1, &ctx, ht);
14998 md5_finish_ctx (&ctx, checksum_before_op1);
14999 ht.empty ();
15000 #endif
15001
15002 tem = fold_binary_loc (loc, code, type, op0, op1);
15003 if (!tem)
15004 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15005
15006 #ifdef ENABLE_FOLD_CHECKING
15007 md5_init_ctx (&ctx);
15008 fold_checksum_tree (op0, &ctx, ht);
15009 md5_finish_ctx (&ctx, checksum_after_op0);
15010 ht.empty ();
15011
15012 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15013 fold_check_failed (op0, tem);
15014
15015 md5_init_ctx (&ctx);
15016 fold_checksum_tree (op1, &ctx, ht);
15017 md5_finish_ctx (&ctx, checksum_after_op1);
15018 ht.dispose ();
15019
15020 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15021 fold_check_failed (op1, tem);
15022 #endif
15023 return tem;
15024 }
15025
15026 /* Fold a ternary tree expression with code CODE of type TYPE with
15027 operands OP0, OP1, and OP2. Return a folded expression if
15028 successful. Otherwise, return a tree expression with code CODE of
15029 type TYPE with operands OP0, OP1, and OP2. */
15030
15031 tree
15032 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15033 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15034 {
15035 tree tem;
15036 #ifdef ENABLE_FOLD_CHECKING
15037 unsigned char checksum_before_op0[16],
15038 checksum_before_op1[16],
15039 checksum_before_op2[16],
15040 checksum_after_op0[16],
15041 checksum_after_op1[16],
15042 checksum_after_op2[16];
15043 struct md5_ctx ctx;
15044 hash_table <pointer_hash <tree_node> > ht;
15045
15046 ht.create (32);
15047 md5_init_ctx (&ctx);
15048 fold_checksum_tree (op0, &ctx, ht);
15049 md5_finish_ctx (&ctx, checksum_before_op0);
15050 ht.empty ();
15051
15052 md5_init_ctx (&ctx);
15053 fold_checksum_tree (op1, &ctx, ht);
15054 md5_finish_ctx (&ctx, checksum_before_op1);
15055 ht.empty ();
15056
15057 md5_init_ctx (&ctx);
15058 fold_checksum_tree (op2, &ctx, ht);
15059 md5_finish_ctx (&ctx, checksum_before_op2);
15060 ht.empty ();
15061 #endif
15062
15063 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15064 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15065 if (!tem)
15066 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15067
15068 #ifdef ENABLE_FOLD_CHECKING
15069 md5_init_ctx (&ctx);
15070 fold_checksum_tree (op0, &ctx, ht);
15071 md5_finish_ctx (&ctx, checksum_after_op0);
15072 ht.empty ();
15073
15074 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15075 fold_check_failed (op0, tem);
15076
15077 md5_init_ctx (&ctx);
15078 fold_checksum_tree (op1, &ctx, ht);
15079 md5_finish_ctx (&ctx, checksum_after_op1);
15080 ht.empty ();
15081
15082 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15083 fold_check_failed (op1, tem);
15084
15085 md5_init_ctx (&ctx);
15086 fold_checksum_tree (op2, &ctx, ht);
15087 md5_finish_ctx (&ctx, checksum_after_op2);
15088 ht.dispose ();
15089
15090 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15091 fold_check_failed (op2, tem);
15092 #endif
15093 return tem;
15094 }
15095
15096 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15097 arguments in ARGARRAY, and a null static chain.
15098 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15099 of type TYPE from the given operands as constructed by build_call_array. */
15100
15101 tree
15102 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15103 int nargs, tree *argarray)
15104 {
15105 tree tem;
15106 #ifdef ENABLE_FOLD_CHECKING
15107 unsigned char checksum_before_fn[16],
15108 checksum_before_arglist[16],
15109 checksum_after_fn[16],
15110 checksum_after_arglist[16];
15111 struct md5_ctx ctx;
15112 hash_table <pointer_hash <tree_node> > ht;
15113 int i;
15114
15115 ht.create (32);
15116 md5_init_ctx (&ctx);
15117 fold_checksum_tree (fn, &ctx, ht);
15118 md5_finish_ctx (&ctx, checksum_before_fn);
15119 ht.empty ();
15120
15121 md5_init_ctx (&ctx);
15122 for (i = 0; i < nargs; i++)
15123 fold_checksum_tree (argarray[i], &ctx, ht);
15124 md5_finish_ctx (&ctx, checksum_before_arglist);
15125 ht.empty ();
15126 #endif
15127
15128 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15129
15130 #ifdef ENABLE_FOLD_CHECKING
15131 md5_init_ctx (&ctx);
15132 fold_checksum_tree (fn, &ctx, ht);
15133 md5_finish_ctx (&ctx, checksum_after_fn);
15134 ht.empty ();
15135
15136 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15137 fold_check_failed (fn, tem);
15138
15139 md5_init_ctx (&ctx);
15140 for (i = 0; i < nargs; i++)
15141 fold_checksum_tree (argarray[i], &ctx, ht);
15142 md5_finish_ctx (&ctx, checksum_after_arglist);
15143 ht.dispose ();
15144
15145 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15146 fold_check_failed (NULL_TREE, tem);
15147 #endif
15148 return tem;
15149 }
15150
15151 /* Perform constant folding and related simplification of initializer
15152 expression EXPR. These behave identically to "fold_buildN" but ignore
15153 potential run-time traps and exceptions that fold must preserve. */
15154
15155 #define START_FOLD_INIT \
15156 int saved_signaling_nans = flag_signaling_nans;\
15157 int saved_trapping_math = flag_trapping_math;\
15158 int saved_rounding_math = flag_rounding_math;\
15159 int saved_trapv = flag_trapv;\
15160 int saved_folding_initializer = folding_initializer;\
15161 flag_signaling_nans = 0;\
15162 flag_trapping_math = 0;\
15163 flag_rounding_math = 0;\
15164 flag_trapv = 0;\
15165 folding_initializer = 1;
15166
15167 #define END_FOLD_INIT \
15168 flag_signaling_nans = saved_signaling_nans;\
15169 flag_trapping_math = saved_trapping_math;\
15170 flag_rounding_math = saved_rounding_math;\
15171 flag_trapv = saved_trapv;\
15172 folding_initializer = saved_folding_initializer;
15173
15174 tree
15175 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15176 tree type, tree op)
15177 {
15178 tree result;
15179 START_FOLD_INIT;
15180
15181 result = fold_build1_loc (loc, code, type, op);
15182
15183 END_FOLD_INIT;
15184 return result;
15185 }
15186
15187 tree
15188 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15189 tree type, tree op0, tree op1)
15190 {
15191 tree result;
15192 START_FOLD_INIT;
15193
15194 result = fold_build2_loc (loc, code, type, op0, op1);
15195
15196 END_FOLD_INIT;
15197 return result;
15198 }
15199
15200 tree
15201 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15202 int nargs, tree *argarray)
15203 {
15204 tree result;
15205 START_FOLD_INIT;
15206
15207 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15208
15209 END_FOLD_INIT;
15210 return result;
15211 }
15212
15213 #undef START_FOLD_INIT
15214 #undef END_FOLD_INIT
15215
15216 /* Determine if first argument is a multiple of second argument. Return 0 if
15217 it is not, or we cannot easily determined it to be.
15218
15219 An example of the sort of thing we care about (at this point; this routine
15220 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15221 fold cases do now) is discovering that
15222
15223 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15224
15225 is a multiple of
15226
15227 SAVE_EXPR (J * 8)
15228
15229 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15230
15231 This code also handles discovering that
15232
15233 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15234
15235 is a multiple of 8 so we don't have to worry about dealing with a
15236 possible remainder.
15237
15238 Note that we *look* inside a SAVE_EXPR only to determine how it was
15239 calculated; it is not safe for fold to do much of anything else with the
15240 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15241 at run time. For example, the latter example above *cannot* be implemented
15242 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15243 evaluation time of the original SAVE_EXPR is not necessarily the same at
15244 the time the new expression is evaluated. The only optimization of this
15245 sort that would be valid is changing
15246
15247 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15248
15249 divided by 8 to
15250
15251 SAVE_EXPR (I) * SAVE_EXPR (J)
15252
15253 (where the same SAVE_EXPR (J) is used in the original and the
15254 transformed version). */
15255
15256 int
15257 multiple_of_p (tree type, const_tree top, const_tree bottom)
15258 {
15259 if (operand_equal_p (top, bottom, 0))
15260 return 1;
15261
15262 if (TREE_CODE (type) != INTEGER_TYPE)
15263 return 0;
15264
15265 switch (TREE_CODE (top))
15266 {
15267 case BIT_AND_EXPR:
15268 /* Bitwise and provides a power of two multiple. If the mask is
15269 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15270 if (!integer_pow2p (bottom))
15271 return 0;
15272 /* FALLTHRU */
15273
15274 case MULT_EXPR:
15275 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15276 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15277
15278 case PLUS_EXPR:
15279 case MINUS_EXPR:
15280 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15281 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15282
15283 case LSHIFT_EXPR:
15284 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15285 {
15286 tree op1, t1;
15287
15288 op1 = TREE_OPERAND (top, 1);
15289 /* const_binop may not detect overflow correctly,
15290 so check for it explicitly here. */
15291 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15292 && 0 != (t1 = fold_convert (type,
15293 const_binop (LSHIFT_EXPR,
15294 size_one_node,
15295 op1)))
15296 && !TREE_OVERFLOW (t1))
15297 return multiple_of_p (type, t1, bottom);
15298 }
15299 return 0;
15300
15301 case NOP_EXPR:
15302 /* Can't handle conversions from non-integral or wider integral type. */
15303 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15304 || (TYPE_PRECISION (type)
15305 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15306 return 0;
15307
15308 /* .. fall through ... */
15309
15310 case SAVE_EXPR:
15311 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15312
15313 case COND_EXPR:
15314 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15315 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15316
15317 case INTEGER_CST:
15318 if (TREE_CODE (bottom) != INTEGER_CST
15319 || integer_zerop (bottom)
15320 || (TYPE_UNSIGNED (type)
15321 && (tree_int_cst_sgn (top) < 0
15322 || tree_int_cst_sgn (bottom) < 0)))
15323 return 0;
15324 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15325 top, bottom));
15326
15327 default:
15328 return 0;
15329 }
15330 }
15331
15332 /* Return true if CODE or TYPE is known to be non-negative. */
15333
15334 static bool
15335 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15336 {
15337 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15338 && truth_value_p (code))
15339 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15340 have a signed:1 type (where the value is -1 and 0). */
15341 return true;
15342 return false;
15343 }
15344
15345 /* Return true if (CODE OP0) is known to be non-negative. If the return
15346 value is based on the assumption that signed overflow is undefined,
15347 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15348 *STRICT_OVERFLOW_P. */
15349
15350 bool
15351 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15352 bool *strict_overflow_p)
15353 {
15354 if (TYPE_UNSIGNED (type))
15355 return true;
15356
15357 switch (code)
15358 {
15359 case ABS_EXPR:
15360 /* We can't return 1 if flag_wrapv is set because
15361 ABS_EXPR<INT_MIN> = INT_MIN. */
15362 if (!INTEGRAL_TYPE_P (type))
15363 return true;
15364 if (TYPE_OVERFLOW_UNDEFINED (type))
15365 {
15366 *strict_overflow_p = true;
15367 return true;
15368 }
15369 break;
15370
15371 case NON_LVALUE_EXPR:
15372 case FLOAT_EXPR:
15373 case FIX_TRUNC_EXPR:
15374 return tree_expr_nonnegative_warnv_p (op0,
15375 strict_overflow_p);
15376
15377 case NOP_EXPR:
15378 {
15379 tree inner_type = TREE_TYPE (op0);
15380 tree outer_type = type;
15381
15382 if (TREE_CODE (outer_type) == REAL_TYPE)
15383 {
15384 if (TREE_CODE (inner_type) == REAL_TYPE)
15385 return tree_expr_nonnegative_warnv_p (op0,
15386 strict_overflow_p);
15387 if (INTEGRAL_TYPE_P (inner_type))
15388 {
15389 if (TYPE_UNSIGNED (inner_type))
15390 return true;
15391 return tree_expr_nonnegative_warnv_p (op0,
15392 strict_overflow_p);
15393 }
15394 }
15395 else if (INTEGRAL_TYPE_P (outer_type))
15396 {
15397 if (TREE_CODE (inner_type) == REAL_TYPE)
15398 return tree_expr_nonnegative_warnv_p (op0,
15399 strict_overflow_p);
15400 if (INTEGRAL_TYPE_P (inner_type))
15401 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15402 && TYPE_UNSIGNED (inner_type);
15403 }
15404 }
15405 break;
15406
15407 default:
15408 return tree_simple_nonnegative_warnv_p (code, type);
15409 }
15410
15411 /* We don't know sign of `t', so be conservative and return false. */
15412 return false;
15413 }
15414
15415 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15416 value is based on the assumption that signed overflow is undefined,
15417 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15418 *STRICT_OVERFLOW_P. */
15419
15420 bool
15421 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15422 tree op1, bool *strict_overflow_p)
15423 {
15424 if (TYPE_UNSIGNED (type))
15425 return true;
15426
15427 switch (code)
15428 {
15429 case POINTER_PLUS_EXPR:
15430 case PLUS_EXPR:
15431 if (FLOAT_TYPE_P (type))
15432 return (tree_expr_nonnegative_warnv_p (op0,
15433 strict_overflow_p)
15434 && tree_expr_nonnegative_warnv_p (op1,
15435 strict_overflow_p));
15436
15437 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15438 both unsigned and at least 2 bits shorter than the result. */
15439 if (TREE_CODE (type) == INTEGER_TYPE
15440 && TREE_CODE (op0) == NOP_EXPR
15441 && TREE_CODE (op1) == NOP_EXPR)
15442 {
15443 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15444 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15445 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15446 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15447 {
15448 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15449 TYPE_PRECISION (inner2)) + 1;
15450 return prec < TYPE_PRECISION (type);
15451 }
15452 }
15453 break;
15454
15455 case MULT_EXPR:
15456 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15457 {
15458 /* x * x is always non-negative for floating point x
15459 or without overflow. */
15460 if (operand_equal_p (op0, op1, 0)
15461 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15462 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15463 {
15464 if (TYPE_OVERFLOW_UNDEFINED (type))
15465 *strict_overflow_p = true;
15466 return true;
15467 }
15468 }
15469
15470 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15471 both unsigned and their total bits is shorter than the result. */
15472 if (TREE_CODE (type) == INTEGER_TYPE
15473 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15474 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15475 {
15476 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15477 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15478 : TREE_TYPE (op0);
15479 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15480 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15481 : TREE_TYPE (op1);
15482
15483 bool unsigned0 = TYPE_UNSIGNED (inner0);
15484 bool unsigned1 = TYPE_UNSIGNED (inner1);
15485
15486 if (TREE_CODE (op0) == INTEGER_CST)
15487 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15488
15489 if (TREE_CODE (op1) == INTEGER_CST)
15490 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15491
15492 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15493 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15494 {
15495 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15496 ? tree_int_cst_min_precision (op0, UNSIGNED)
15497 : TYPE_PRECISION (inner0);
15498
15499 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15500 ? tree_int_cst_min_precision (op1, UNSIGNED)
15501 : TYPE_PRECISION (inner1);
15502
15503 return precision0 + precision1 < TYPE_PRECISION (type);
15504 }
15505 }
15506 return false;
15507
15508 case BIT_AND_EXPR:
15509 case MAX_EXPR:
15510 return (tree_expr_nonnegative_warnv_p (op0,
15511 strict_overflow_p)
15512 || tree_expr_nonnegative_warnv_p (op1,
15513 strict_overflow_p));
15514
15515 case BIT_IOR_EXPR:
15516 case BIT_XOR_EXPR:
15517 case MIN_EXPR:
15518 case RDIV_EXPR:
15519 case TRUNC_DIV_EXPR:
15520 case CEIL_DIV_EXPR:
15521 case FLOOR_DIV_EXPR:
15522 case ROUND_DIV_EXPR:
15523 return (tree_expr_nonnegative_warnv_p (op0,
15524 strict_overflow_p)
15525 && tree_expr_nonnegative_warnv_p (op1,
15526 strict_overflow_p));
15527
15528 case TRUNC_MOD_EXPR:
15529 case CEIL_MOD_EXPR:
15530 case FLOOR_MOD_EXPR:
15531 case ROUND_MOD_EXPR:
15532 return tree_expr_nonnegative_warnv_p (op0,
15533 strict_overflow_p);
15534 default:
15535 return tree_simple_nonnegative_warnv_p (code, type);
15536 }
15537
15538 /* We don't know sign of `t', so be conservative and return false. */
15539 return false;
15540 }
15541
15542 /* Return true if T is known to be non-negative. If the return
15543 value is based on the assumption that signed overflow is undefined,
15544 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15545 *STRICT_OVERFLOW_P. */
15546
15547 bool
15548 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15549 {
15550 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15551 return true;
15552
15553 switch (TREE_CODE (t))
15554 {
15555 case INTEGER_CST:
15556 return tree_int_cst_sgn (t) >= 0;
15557
15558 case REAL_CST:
15559 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15560
15561 case FIXED_CST:
15562 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15563
15564 case COND_EXPR:
15565 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15566 strict_overflow_p)
15567 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15568 strict_overflow_p));
15569 default:
15570 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15571 TREE_TYPE (t));
15572 }
15573 /* We don't know sign of `t', so be conservative and return false. */
15574 return false;
15575 }
15576
15577 /* Return true if T is known to be non-negative. If the return
15578 value is based on the assumption that signed overflow is undefined,
15579 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15580 *STRICT_OVERFLOW_P. */
15581
15582 bool
15583 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15584 tree arg0, tree arg1, bool *strict_overflow_p)
15585 {
15586 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15587 switch (DECL_FUNCTION_CODE (fndecl))
15588 {
15589 CASE_FLT_FN (BUILT_IN_ACOS):
15590 CASE_FLT_FN (BUILT_IN_ACOSH):
15591 CASE_FLT_FN (BUILT_IN_CABS):
15592 CASE_FLT_FN (BUILT_IN_COSH):
15593 CASE_FLT_FN (BUILT_IN_ERFC):
15594 CASE_FLT_FN (BUILT_IN_EXP):
15595 CASE_FLT_FN (BUILT_IN_EXP10):
15596 CASE_FLT_FN (BUILT_IN_EXP2):
15597 CASE_FLT_FN (BUILT_IN_FABS):
15598 CASE_FLT_FN (BUILT_IN_FDIM):
15599 CASE_FLT_FN (BUILT_IN_HYPOT):
15600 CASE_FLT_FN (BUILT_IN_POW10):
15601 CASE_INT_FN (BUILT_IN_FFS):
15602 CASE_INT_FN (BUILT_IN_PARITY):
15603 CASE_INT_FN (BUILT_IN_POPCOUNT):
15604 CASE_INT_FN (BUILT_IN_CLZ):
15605 CASE_INT_FN (BUILT_IN_CLRSB):
15606 case BUILT_IN_BSWAP32:
15607 case BUILT_IN_BSWAP64:
15608 /* Always true. */
15609 return true;
15610
15611 CASE_FLT_FN (BUILT_IN_SQRT):
15612 /* sqrt(-0.0) is -0.0. */
15613 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15614 return true;
15615 return tree_expr_nonnegative_warnv_p (arg0,
15616 strict_overflow_p);
15617
15618 CASE_FLT_FN (BUILT_IN_ASINH):
15619 CASE_FLT_FN (BUILT_IN_ATAN):
15620 CASE_FLT_FN (BUILT_IN_ATANH):
15621 CASE_FLT_FN (BUILT_IN_CBRT):
15622 CASE_FLT_FN (BUILT_IN_CEIL):
15623 CASE_FLT_FN (BUILT_IN_ERF):
15624 CASE_FLT_FN (BUILT_IN_EXPM1):
15625 CASE_FLT_FN (BUILT_IN_FLOOR):
15626 CASE_FLT_FN (BUILT_IN_FMOD):
15627 CASE_FLT_FN (BUILT_IN_FREXP):
15628 CASE_FLT_FN (BUILT_IN_ICEIL):
15629 CASE_FLT_FN (BUILT_IN_IFLOOR):
15630 CASE_FLT_FN (BUILT_IN_IRINT):
15631 CASE_FLT_FN (BUILT_IN_IROUND):
15632 CASE_FLT_FN (BUILT_IN_LCEIL):
15633 CASE_FLT_FN (BUILT_IN_LDEXP):
15634 CASE_FLT_FN (BUILT_IN_LFLOOR):
15635 CASE_FLT_FN (BUILT_IN_LLCEIL):
15636 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15637 CASE_FLT_FN (BUILT_IN_LLRINT):
15638 CASE_FLT_FN (BUILT_IN_LLROUND):
15639 CASE_FLT_FN (BUILT_IN_LRINT):
15640 CASE_FLT_FN (BUILT_IN_LROUND):
15641 CASE_FLT_FN (BUILT_IN_MODF):
15642 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15643 CASE_FLT_FN (BUILT_IN_RINT):
15644 CASE_FLT_FN (BUILT_IN_ROUND):
15645 CASE_FLT_FN (BUILT_IN_SCALB):
15646 CASE_FLT_FN (BUILT_IN_SCALBLN):
15647 CASE_FLT_FN (BUILT_IN_SCALBN):
15648 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15649 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15650 CASE_FLT_FN (BUILT_IN_SINH):
15651 CASE_FLT_FN (BUILT_IN_TANH):
15652 CASE_FLT_FN (BUILT_IN_TRUNC):
15653 /* True if the 1st argument is nonnegative. */
15654 return tree_expr_nonnegative_warnv_p (arg0,
15655 strict_overflow_p);
15656
15657 CASE_FLT_FN (BUILT_IN_FMAX):
15658 /* True if the 1st OR 2nd arguments are nonnegative. */
15659 return (tree_expr_nonnegative_warnv_p (arg0,
15660 strict_overflow_p)
15661 || (tree_expr_nonnegative_warnv_p (arg1,
15662 strict_overflow_p)));
15663
15664 CASE_FLT_FN (BUILT_IN_FMIN):
15665 /* True if the 1st AND 2nd arguments are nonnegative. */
15666 return (tree_expr_nonnegative_warnv_p (arg0,
15667 strict_overflow_p)
15668 && (tree_expr_nonnegative_warnv_p (arg1,
15669 strict_overflow_p)));
15670
15671 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15672 /* True if the 2nd argument is nonnegative. */
15673 return tree_expr_nonnegative_warnv_p (arg1,
15674 strict_overflow_p);
15675
15676 CASE_FLT_FN (BUILT_IN_POWI):
15677 /* True if the 1st argument is nonnegative or the second
15678 argument is an even integer. */
15679 if (TREE_CODE (arg1) == INTEGER_CST
15680 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15681 return true;
15682 return tree_expr_nonnegative_warnv_p (arg0,
15683 strict_overflow_p);
15684
15685 CASE_FLT_FN (BUILT_IN_POW):
15686 /* True if the 1st argument is nonnegative or the second
15687 argument is an even integer valued real. */
15688 if (TREE_CODE (arg1) == REAL_CST)
15689 {
15690 REAL_VALUE_TYPE c;
15691 HOST_WIDE_INT n;
15692
15693 c = TREE_REAL_CST (arg1);
15694 n = real_to_integer (&c);
15695 if ((n & 1) == 0)
15696 {
15697 REAL_VALUE_TYPE cint;
15698 real_from_integer (&cint, VOIDmode, n, SIGNED);
15699 if (real_identical (&c, &cint))
15700 return true;
15701 }
15702 }
15703 return tree_expr_nonnegative_warnv_p (arg0,
15704 strict_overflow_p);
15705
15706 default:
15707 break;
15708 }
15709 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15710 type);
15711 }
15712
15713 /* Return true if T is known to be non-negative. If the return
15714 value is based on the assumption that signed overflow is undefined,
15715 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15716 *STRICT_OVERFLOW_P. */
15717
15718 static bool
15719 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15720 {
15721 enum tree_code code = TREE_CODE (t);
15722 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15723 return true;
15724
15725 switch (code)
15726 {
15727 case TARGET_EXPR:
15728 {
15729 tree temp = TARGET_EXPR_SLOT (t);
15730 t = TARGET_EXPR_INITIAL (t);
15731
15732 /* If the initializer is non-void, then it's a normal expression
15733 that will be assigned to the slot. */
15734 if (!VOID_TYPE_P (t))
15735 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15736
15737 /* Otherwise, the initializer sets the slot in some way. One common
15738 way is an assignment statement at the end of the initializer. */
15739 while (1)
15740 {
15741 if (TREE_CODE (t) == BIND_EXPR)
15742 t = expr_last (BIND_EXPR_BODY (t));
15743 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15744 || TREE_CODE (t) == TRY_CATCH_EXPR)
15745 t = expr_last (TREE_OPERAND (t, 0));
15746 else if (TREE_CODE (t) == STATEMENT_LIST)
15747 t = expr_last (t);
15748 else
15749 break;
15750 }
15751 if (TREE_CODE (t) == MODIFY_EXPR
15752 && TREE_OPERAND (t, 0) == temp)
15753 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15754 strict_overflow_p);
15755
15756 return false;
15757 }
15758
15759 case CALL_EXPR:
15760 {
15761 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15762 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15763
15764 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15765 get_callee_fndecl (t),
15766 arg0,
15767 arg1,
15768 strict_overflow_p);
15769 }
15770 case COMPOUND_EXPR:
15771 case MODIFY_EXPR:
15772 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15773 strict_overflow_p);
15774 case BIND_EXPR:
15775 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15776 strict_overflow_p);
15777 case SAVE_EXPR:
15778 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15779 strict_overflow_p);
15780
15781 default:
15782 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15783 TREE_TYPE (t));
15784 }
15785
15786 /* We don't know sign of `t', so be conservative and return false. */
15787 return false;
15788 }
15789
15790 /* Return true if T is known to be non-negative. If the return
15791 value is based on the assumption that signed overflow is undefined,
15792 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15793 *STRICT_OVERFLOW_P. */
15794
15795 bool
15796 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15797 {
15798 enum tree_code code;
15799 if (t == error_mark_node)
15800 return false;
15801
15802 code = TREE_CODE (t);
15803 switch (TREE_CODE_CLASS (code))
15804 {
15805 case tcc_binary:
15806 case tcc_comparison:
15807 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15808 TREE_TYPE (t),
15809 TREE_OPERAND (t, 0),
15810 TREE_OPERAND (t, 1),
15811 strict_overflow_p);
15812
15813 case tcc_unary:
15814 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15815 TREE_TYPE (t),
15816 TREE_OPERAND (t, 0),
15817 strict_overflow_p);
15818
15819 case tcc_constant:
15820 case tcc_declaration:
15821 case tcc_reference:
15822 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15823
15824 default:
15825 break;
15826 }
15827
15828 switch (code)
15829 {
15830 case TRUTH_AND_EXPR:
15831 case TRUTH_OR_EXPR:
15832 case TRUTH_XOR_EXPR:
15833 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15834 TREE_TYPE (t),
15835 TREE_OPERAND (t, 0),
15836 TREE_OPERAND (t, 1),
15837 strict_overflow_p);
15838 case TRUTH_NOT_EXPR:
15839 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15840 TREE_TYPE (t),
15841 TREE_OPERAND (t, 0),
15842 strict_overflow_p);
15843
15844 case COND_EXPR:
15845 case CONSTRUCTOR:
15846 case OBJ_TYPE_REF:
15847 case ASSERT_EXPR:
15848 case ADDR_EXPR:
15849 case WITH_SIZE_EXPR:
15850 case SSA_NAME:
15851 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15852
15853 default:
15854 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15855 }
15856 }
15857
15858 /* Return true if `t' is known to be non-negative. Handle warnings
15859 about undefined signed overflow. */
15860
15861 bool
15862 tree_expr_nonnegative_p (tree t)
15863 {
15864 bool ret, strict_overflow_p;
15865
15866 strict_overflow_p = false;
15867 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15868 if (strict_overflow_p)
15869 fold_overflow_warning (("assuming signed overflow does not occur when "
15870 "determining that expression is always "
15871 "non-negative"),
15872 WARN_STRICT_OVERFLOW_MISC);
15873 return ret;
15874 }
15875
15876
15877 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15878 For floating point we further ensure that T is not denormal.
15879 Similar logic is present in nonzero_address in rtlanal.h.
15880
15881 If the return value is based on the assumption that signed overflow
15882 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15883 change *STRICT_OVERFLOW_P. */
15884
15885 bool
15886 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15887 bool *strict_overflow_p)
15888 {
15889 switch (code)
15890 {
15891 case ABS_EXPR:
15892 return tree_expr_nonzero_warnv_p (op0,
15893 strict_overflow_p);
15894
15895 case NOP_EXPR:
15896 {
15897 tree inner_type = TREE_TYPE (op0);
15898 tree outer_type = type;
15899
15900 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15901 && tree_expr_nonzero_warnv_p (op0,
15902 strict_overflow_p));
15903 }
15904 break;
15905
15906 case NON_LVALUE_EXPR:
15907 return tree_expr_nonzero_warnv_p (op0,
15908 strict_overflow_p);
15909
15910 default:
15911 break;
15912 }
15913
15914 return false;
15915 }
15916
15917 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15918 For floating point we further ensure that T is not denormal.
15919 Similar logic is present in nonzero_address in rtlanal.h.
15920
15921 If the return value is based on the assumption that signed overflow
15922 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15923 change *STRICT_OVERFLOW_P. */
15924
15925 bool
15926 tree_binary_nonzero_warnv_p (enum tree_code code,
15927 tree type,
15928 tree op0,
15929 tree op1, bool *strict_overflow_p)
15930 {
15931 bool sub_strict_overflow_p;
15932 switch (code)
15933 {
15934 case POINTER_PLUS_EXPR:
15935 case PLUS_EXPR:
15936 if (TYPE_OVERFLOW_UNDEFINED (type))
15937 {
15938 /* With the presence of negative values it is hard
15939 to say something. */
15940 sub_strict_overflow_p = false;
15941 if (!tree_expr_nonnegative_warnv_p (op0,
15942 &sub_strict_overflow_p)
15943 || !tree_expr_nonnegative_warnv_p (op1,
15944 &sub_strict_overflow_p))
15945 return false;
15946 /* One of operands must be positive and the other non-negative. */
15947 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15948 overflows, on a twos-complement machine the sum of two
15949 nonnegative numbers can never be zero. */
15950 return (tree_expr_nonzero_warnv_p (op0,
15951 strict_overflow_p)
15952 || tree_expr_nonzero_warnv_p (op1,
15953 strict_overflow_p));
15954 }
15955 break;
15956
15957 case MULT_EXPR:
15958 if (TYPE_OVERFLOW_UNDEFINED (type))
15959 {
15960 if (tree_expr_nonzero_warnv_p (op0,
15961 strict_overflow_p)
15962 && tree_expr_nonzero_warnv_p (op1,
15963 strict_overflow_p))
15964 {
15965 *strict_overflow_p = true;
15966 return true;
15967 }
15968 }
15969 break;
15970
15971 case MIN_EXPR:
15972 sub_strict_overflow_p = false;
15973 if (tree_expr_nonzero_warnv_p (op0,
15974 &sub_strict_overflow_p)
15975 && tree_expr_nonzero_warnv_p (op1,
15976 &sub_strict_overflow_p))
15977 {
15978 if (sub_strict_overflow_p)
15979 *strict_overflow_p = true;
15980 }
15981 break;
15982
15983 case MAX_EXPR:
15984 sub_strict_overflow_p = false;
15985 if (tree_expr_nonzero_warnv_p (op0,
15986 &sub_strict_overflow_p))
15987 {
15988 if (sub_strict_overflow_p)
15989 *strict_overflow_p = true;
15990
15991 /* When both operands are nonzero, then MAX must be too. */
15992 if (tree_expr_nonzero_warnv_p (op1,
15993 strict_overflow_p))
15994 return true;
15995
15996 /* MAX where operand 0 is positive is positive. */
15997 return tree_expr_nonnegative_warnv_p (op0,
15998 strict_overflow_p);
15999 }
16000 /* MAX where operand 1 is positive is positive. */
16001 else if (tree_expr_nonzero_warnv_p (op1,
16002 &sub_strict_overflow_p)
16003 && tree_expr_nonnegative_warnv_p (op1,
16004 &sub_strict_overflow_p))
16005 {
16006 if (sub_strict_overflow_p)
16007 *strict_overflow_p = true;
16008 return true;
16009 }
16010 break;
16011
16012 case BIT_IOR_EXPR:
16013 return (tree_expr_nonzero_warnv_p (op1,
16014 strict_overflow_p)
16015 || tree_expr_nonzero_warnv_p (op0,
16016 strict_overflow_p));
16017
16018 default:
16019 break;
16020 }
16021
16022 return false;
16023 }
16024
16025 /* Return true when T is an address and is known to be nonzero.
16026 For floating point we further ensure that T is not denormal.
16027 Similar logic is present in nonzero_address in rtlanal.h.
16028
16029 If the return value is based on the assumption that signed overflow
16030 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16031 change *STRICT_OVERFLOW_P. */
16032
16033 bool
16034 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16035 {
16036 bool sub_strict_overflow_p;
16037 switch (TREE_CODE (t))
16038 {
16039 case INTEGER_CST:
16040 return !integer_zerop (t);
16041
16042 case ADDR_EXPR:
16043 {
16044 tree base = TREE_OPERAND (t, 0);
16045 if (!DECL_P (base))
16046 base = get_base_address (base);
16047
16048 if (!base)
16049 return false;
16050
16051 /* Weak declarations may link to NULL. Other things may also be NULL
16052 so protect with -fdelete-null-pointer-checks; but not variables
16053 allocated on the stack. */
16054 if (DECL_P (base)
16055 && (flag_delete_null_pointer_checks
16056 || (DECL_CONTEXT (base)
16057 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16058 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16059 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16060
16061 /* Constants are never weak. */
16062 if (CONSTANT_CLASS_P (base))
16063 return true;
16064
16065 return false;
16066 }
16067
16068 case COND_EXPR:
16069 sub_strict_overflow_p = false;
16070 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16071 &sub_strict_overflow_p)
16072 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16073 &sub_strict_overflow_p))
16074 {
16075 if (sub_strict_overflow_p)
16076 *strict_overflow_p = true;
16077 return true;
16078 }
16079 break;
16080
16081 default:
16082 break;
16083 }
16084 return false;
16085 }
16086
16087 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16088 attempt to fold the expression to a constant without modifying TYPE,
16089 OP0 or OP1.
16090
16091 If the expression could be simplified to a constant, then return
16092 the constant. If the expression would not be simplified to a
16093 constant, then return NULL_TREE. */
16094
16095 tree
16096 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16097 {
16098 tree tem = fold_binary (code, type, op0, op1);
16099 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16100 }
16101
16102 /* Given the components of a unary expression CODE, TYPE and OP0,
16103 attempt to fold the expression to a constant without modifying
16104 TYPE or OP0.
16105
16106 If the expression could be simplified to a constant, then return
16107 the constant. If the expression would not be simplified to a
16108 constant, then return NULL_TREE. */
16109
16110 tree
16111 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16112 {
16113 tree tem = fold_unary (code, type, op0);
16114 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16115 }
16116
16117 /* If EXP represents referencing an element in a constant string
16118 (either via pointer arithmetic or array indexing), return the
16119 tree representing the value accessed, otherwise return NULL. */
16120
16121 tree
16122 fold_read_from_constant_string (tree exp)
16123 {
16124 if ((TREE_CODE (exp) == INDIRECT_REF
16125 || TREE_CODE (exp) == ARRAY_REF)
16126 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16127 {
16128 tree exp1 = TREE_OPERAND (exp, 0);
16129 tree index;
16130 tree string;
16131 location_t loc = EXPR_LOCATION (exp);
16132
16133 if (TREE_CODE (exp) == INDIRECT_REF)
16134 string = string_constant (exp1, &index);
16135 else
16136 {
16137 tree low_bound = array_ref_low_bound (exp);
16138 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16139
16140 /* Optimize the special-case of a zero lower bound.
16141
16142 We convert the low_bound to sizetype to avoid some problems
16143 with constant folding. (E.g. suppose the lower bound is 1,
16144 and its mode is QI. Without the conversion,l (ARRAY
16145 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16146 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16147 if (! integer_zerop (low_bound))
16148 index = size_diffop_loc (loc, index,
16149 fold_convert_loc (loc, sizetype, low_bound));
16150
16151 string = exp1;
16152 }
16153
16154 if (string
16155 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16156 && TREE_CODE (string) == STRING_CST
16157 && TREE_CODE (index) == INTEGER_CST
16158 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16159 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16160 == MODE_INT)
16161 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16162 return build_int_cst_type (TREE_TYPE (exp),
16163 (TREE_STRING_POINTER (string)
16164 [TREE_INT_CST_LOW (index)]));
16165 }
16166 return NULL;
16167 }
16168
16169 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16170 an integer constant, real, or fixed-point constant.
16171
16172 TYPE is the type of the result. */
16173
16174 static tree
16175 fold_negate_const (tree arg0, tree type)
16176 {
16177 tree t = NULL_TREE;
16178
16179 switch (TREE_CODE (arg0))
16180 {
16181 case INTEGER_CST:
16182 {
16183 bool overflow;
16184 wide_int val = wi::neg (arg0, &overflow);
16185 t = force_fit_type (type, val, 1,
16186 (overflow | TREE_OVERFLOW (arg0))
16187 && !TYPE_UNSIGNED (type));
16188 break;
16189 }
16190
16191 case REAL_CST:
16192 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16193 break;
16194
16195 case FIXED_CST:
16196 {
16197 FIXED_VALUE_TYPE f;
16198 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16199 &(TREE_FIXED_CST (arg0)), NULL,
16200 TYPE_SATURATING (type));
16201 t = build_fixed (type, f);
16202 /* Propagate overflow flags. */
16203 if (overflow_p | TREE_OVERFLOW (arg0))
16204 TREE_OVERFLOW (t) = 1;
16205 break;
16206 }
16207
16208 default:
16209 gcc_unreachable ();
16210 }
16211
16212 return t;
16213 }
16214
16215 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16216 an integer constant or real constant.
16217
16218 TYPE is the type of the result. */
16219
16220 tree
16221 fold_abs_const (tree arg0, tree type)
16222 {
16223 tree t = NULL_TREE;
16224
16225 switch (TREE_CODE (arg0))
16226 {
16227 case INTEGER_CST:
16228 {
16229 /* If the value is unsigned or non-negative, then the absolute value
16230 is the same as the ordinary value. */
16231 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16232 t = arg0;
16233
16234 /* If the value is negative, then the absolute value is
16235 its negation. */
16236 else
16237 {
16238 bool overflow;
16239 wide_int val = wi::neg (arg0, &overflow);
16240 t = force_fit_type (type, val, -1,
16241 overflow | TREE_OVERFLOW (arg0));
16242 }
16243 }
16244 break;
16245
16246 case REAL_CST:
16247 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16248 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16249 else
16250 t = arg0;
16251 break;
16252
16253 default:
16254 gcc_unreachable ();
16255 }
16256
16257 return t;
16258 }
16259
16260 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16261 constant. TYPE is the type of the result. */
16262
16263 static tree
16264 fold_not_const (const_tree arg0, tree type)
16265 {
16266 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16267
16268 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16269 }
16270
16271 /* Given CODE, a relational operator, the target type, TYPE and two
16272 constant operands OP0 and OP1, return the result of the
16273 relational operation. If the result is not a compile time
16274 constant, then return NULL_TREE. */
16275
16276 static tree
16277 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16278 {
16279 int result, invert;
16280
16281 /* From here on, the only cases we handle are when the result is
16282 known to be a constant. */
16283
16284 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16285 {
16286 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16287 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16288
16289 /* Handle the cases where either operand is a NaN. */
16290 if (real_isnan (c0) || real_isnan (c1))
16291 {
16292 switch (code)
16293 {
16294 case EQ_EXPR:
16295 case ORDERED_EXPR:
16296 result = 0;
16297 break;
16298
16299 case NE_EXPR:
16300 case UNORDERED_EXPR:
16301 case UNLT_EXPR:
16302 case UNLE_EXPR:
16303 case UNGT_EXPR:
16304 case UNGE_EXPR:
16305 case UNEQ_EXPR:
16306 result = 1;
16307 break;
16308
16309 case LT_EXPR:
16310 case LE_EXPR:
16311 case GT_EXPR:
16312 case GE_EXPR:
16313 case LTGT_EXPR:
16314 if (flag_trapping_math)
16315 return NULL_TREE;
16316 result = 0;
16317 break;
16318
16319 default:
16320 gcc_unreachable ();
16321 }
16322
16323 return constant_boolean_node (result, type);
16324 }
16325
16326 return constant_boolean_node (real_compare (code, c0, c1), type);
16327 }
16328
16329 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16330 {
16331 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16332 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16333 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16334 }
16335
16336 /* Handle equality/inequality of complex constants. */
16337 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16338 {
16339 tree rcond = fold_relational_const (code, type,
16340 TREE_REALPART (op0),
16341 TREE_REALPART (op1));
16342 tree icond = fold_relational_const (code, type,
16343 TREE_IMAGPART (op0),
16344 TREE_IMAGPART (op1));
16345 if (code == EQ_EXPR)
16346 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16347 else if (code == NE_EXPR)
16348 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16349 else
16350 return NULL_TREE;
16351 }
16352
16353 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16354 {
16355 unsigned count = VECTOR_CST_NELTS (op0);
16356 tree *elts = XALLOCAVEC (tree, count);
16357 gcc_assert (VECTOR_CST_NELTS (op1) == count
16358 && TYPE_VECTOR_SUBPARTS (type) == count);
16359
16360 for (unsigned i = 0; i < count; i++)
16361 {
16362 tree elem_type = TREE_TYPE (type);
16363 tree elem0 = VECTOR_CST_ELT (op0, i);
16364 tree elem1 = VECTOR_CST_ELT (op1, i);
16365
16366 tree tem = fold_relational_const (code, elem_type,
16367 elem0, elem1);
16368
16369 if (tem == NULL_TREE)
16370 return NULL_TREE;
16371
16372 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16373 }
16374
16375 return build_vector (type, elts);
16376 }
16377
16378 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16379
16380 To compute GT, swap the arguments and do LT.
16381 To compute GE, do LT and invert the result.
16382 To compute LE, swap the arguments, do LT and invert the result.
16383 To compute NE, do EQ and invert the result.
16384
16385 Therefore, the code below must handle only EQ and LT. */
16386
16387 if (code == LE_EXPR || code == GT_EXPR)
16388 {
16389 tree tem = op0;
16390 op0 = op1;
16391 op1 = tem;
16392 code = swap_tree_comparison (code);
16393 }
16394
16395 /* Note that it is safe to invert for real values here because we
16396 have already handled the one case that it matters. */
16397
16398 invert = 0;
16399 if (code == NE_EXPR || code == GE_EXPR)
16400 {
16401 invert = 1;
16402 code = invert_tree_comparison (code, false);
16403 }
16404
16405 /* Compute a result for LT or EQ if args permit;
16406 Otherwise return T. */
16407 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16408 {
16409 if (code == EQ_EXPR)
16410 result = tree_int_cst_equal (op0, op1);
16411 else
16412 result = INT_CST_LT (op0, op1);
16413 }
16414 else
16415 return NULL_TREE;
16416
16417 if (invert)
16418 result ^= 1;
16419 return constant_boolean_node (result, type);
16420 }
16421
16422 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16423 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16424 itself. */
16425
16426 tree
16427 fold_build_cleanup_point_expr (tree type, tree expr)
16428 {
16429 /* If the expression does not have side effects then we don't have to wrap
16430 it with a cleanup point expression. */
16431 if (!TREE_SIDE_EFFECTS (expr))
16432 return expr;
16433
16434 /* If the expression is a return, check to see if the expression inside the
16435 return has no side effects or the right hand side of the modify expression
16436 inside the return. If either don't have side effects set we don't need to
16437 wrap the expression in a cleanup point expression. Note we don't check the
16438 left hand side of the modify because it should always be a return decl. */
16439 if (TREE_CODE (expr) == RETURN_EXPR)
16440 {
16441 tree op = TREE_OPERAND (expr, 0);
16442 if (!op || !TREE_SIDE_EFFECTS (op))
16443 return expr;
16444 op = TREE_OPERAND (op, 1);
16445 if (!TREE_SIDE_EFFECTS (op))
16446 return expr;
16447 }
16448
16449 return build1 (CLEANUP_POINT_EXPR, type, expr);
16450 }
16451
16452 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16453 of an indirection through OP0, or NULL_TREE if no simplification is
16454 possible. */
16455
16456 tree
16457 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16458 {
16459 tree sub = op0;
16460 tree subtype;
16461
16462 STRIP_NOPS (sub);
16463 subtype = TREE_TYPE (sub);
16464 if (!POINTER_TYPE_P (subtype))
16465 return NULL_TREE;
16466
16467 if (TREE_CODE (sub) == ADDR_EXPR)
16468 {
16469 tree op = TREE_OPERAND (sub, 0);
16470 tree optype = TREE_TYPE (op);
16471 /* *&CONST_DECL -> to the value of the const decl. */
16472 if (TREE_CODE (op) == CONST_DECL)
16473 return DECL_INITIAL (op);
16474 /* *&p => p; make sure to handle *&"str"[cst] here. */
16475 if (type == optype)
16476 {
16477 tree fop = fold_read_from_constant_string (op);
16478 if (fop)
16479 return fop;
16480 else
16481 return op;
16482 }
16483 /* *(foo *)&fooarray => fooarray[0] */
16484 else if (TREE_CODE (optype) == ARRAY_TYPE
16485 && type == TREE_TYPE (optype)
16486 && (!in_gimple_form
16487 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16488 {
16489 tree type_domain = TYPE_DOMAIN (optype);
16490 tree min_val = size_zero_node;
16491 if (type_domain && TYPE_MIN_VALUE (type_domain))
16492 min_val = TYPE_MIN_VALUE (type_domain);
16493 if (in_gimple_form
16494 && TREE_CODE (min_val) != INTEGER_CST)
16495 return NULL_TREE;
16496 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16497 NULL_TREE, NULL_TREE);
16498 }
16499 /* *(foo *)&complexfoo => __real__ complexfoo */
16500 else if (TREE_CODE (optype) == COMPLEX_TYPE
16501 && type == TREE_TYPE (optype))
16502 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16503 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16504 else if (TREE_CODE (optype) == VECTOR_TYPE
16505 && type == TREE_TYPE (optype))
16506 {
16507 tree part_width = TYPE_SIZE (type);
16508 tree index = bitsize_int (0);
16509 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16510 }
16511 }
16512
16513 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16514 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16515 {
16516 tree op00 = TREE_OPERAND (sub, 0);
16517 tree op01 = TREE_OPERAND (sub, 1);
16518
16519 STRIP_NOPS (op00);
16520 if (TREE_CODE (op00) == ADDR_EXPR)
16521 {
16522 tree op00type;
16523 op00 = TREE_OPERAND (op00, 0);
16524 op00type = TREE_TYPE (op00);
16525
16526 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16527 if (TREE_CODE (op00type) == VECTOR_TYPE
16528 && type == TREE_TYPE (op00type))
16529 {
16530 HOST_WIDE_INT offset = tree_to_shwi (op01);
16531 tree part_width = TYPE_SIZE (type);
16532 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16533 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16534 tree index = bitsize_int (indexi);
16535
16536 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16537 return fold_build3_loc (loc,
16538 BIT_FIELD_REF, type, op00,
16539 part_width, index);
16540
16541 }
16542 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16543 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16544 && type == TREE_TYPE (op00type))
16545 {
16546 tree size = TYPE_SIZE_UNIT (type);
16547 if (tree_int_cst_equal (size, op01))
16548 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16549 }
16550 /* ((foo *)&fooarray)[1] => fooarray[1] */
16551 else if (TREE_CODE (op00type) == ARRAY_TYPE
16552 && type == TREE_TYPE (op00type))
16553 {
16554 tree type_domain = TYPE_DOMAIN (op00type);
16555 tree min_val = size_zero_node;
16556 if (type_domain && TYPE_MIN_VALUE (type_domain))
16557 min_val = TYPE_MIN_VALUE (type_domain);
16558 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16559 TYPE_SIZE_UNIT (type));
16560 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16561 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16562 NULL_TREE, NULL_TREE);
16563 }
16564 }
16565 }
16566
16567 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16568 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16569 && type == TREE_TYPE (TREE_TYPE (subtype))
16570 && (!in_gimple_form
16571 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16572 {
16573 tree type_domain;
16574 tree min_val = size_zero_node;
16575 sub = build_fold_indirect_ref_loc (loc, sub);
16576 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16577 if (type_domain && TYPE_MIN_VALUE (type_domain))
16578 min_val = TYPE_MIN_VALUE (type_domain);
16579 if (in_gimple_form
16580 && TREE_CODE (min_val) != INTEGER_CST)
16581 return NULL_TREE;
16582 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16583 NULL_TREE);
16584 }
16585
16586 return NULL_TREE;
16587 }
16588
16589 /* Builds an expression for an indirection through T, simplifying some
16590 cases. */
16591
16592 tree
16593 build_fold_indirect_ref_loc (location_t loc, tree t)
16594 {
16595 tree type = TREE_TYPE (TREE_TYPE (t));
16596 tree sub = fold_indirect_ref_1 (loc, type, t);
16597
16598 if (sub)
16599 return sub;
16600
16601 return build1_loc (loc, INDIRECT_REF, type, t);
16602 }
16603
16604 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16605
16606 tree
16607 fold_indirect_ref_loc (location_t loc, tree t)
16608 {
16609 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16610
16611 if (sub)
16612 return sub;
16613 else
16614 return t;
16615 }
16616
16617 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16618 whose result is ignored. The type of the returned tree need not be
16619 the same as the original expression. */
16620
16621 tree
16622 fold_ignored_result (tree t)
16623 {
16624 if (!TREE_SIDE_EFFECTS (t))
16625 return integer_zero_node;
16626
16627 for (;;)
16628 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16629 {
16630 case tcc_unary:
16631 t = TREE_OPERAND (t, 0);
16632 break;
16633
16634 case tcc_binary:
16635 case tcc_comparison:
16636 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16637 t = TREE_OPERAND (t, 0);
16638 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16639 t = TREE_OPERAND (t, 1);
16640 else
16641 return t;
16642 break;
16643
16644 case tcc_expression:
16645 switch (TREE_CODE (t))
16646 {
16647 case COMPOUND_EXPR:
16648 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16649 return t;
16650 t = TREE_OPERAND (t, 0);
16651 break;
16652
16653 case COND_EXPR:
16654 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16655 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16656 return t;
16657 t = TREE_OPERAND (t, 0);
16658 break;
16659
16660 default:
16661 return t;
16662 }
16663 break;
16664
16665 default:
16666 return t;
16667 }
16668 }
16669
16670 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16671
16672 tree
16673 round_up_loc (location_t loc, tree value, int divisor)
16674 {
16675 tree div = NULL_TREE;
16676
16677 gcc_assert (divisor > 0);
16678 if (divisor == 1)
16679 return value;
16680
16681 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16682 have to do anything. Only do this when we are not given a const,
16683 because in that case, this check is more expensive than just
16684 doing it. */
16685 if (TREE_CODE (value) != INTEGER_CST)
16686 {
16687 div = build_int_cst (TREE_TYPE (value), divisor);
16688
16689 if (multiple_of_p (TREE_TYPE (value), value, div))
16690 return value;
16691 }
16692
16693 /* If divisor is a power of two, simplify this to bit manipulation. */
16694 if (divisor == (divisor & -divisor))
16695 {
16696 if (TREE_CODE (value) == INTEGER_CST)
16697 {
16698 wide_int val = value;
16699 bool overflow_p;
16700
16701 if ((val & (divisor - 1)) == 0)
16702 return value;
16703
16704 overflow_p = TREE_OVERFLOW (value);
16705 val &= ~(divisor - 1);
16706 val += divisor;
16707 if (val == 0)
16708 overflow_p = true;
16709
16710 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16711 }
16712 else
16713 {
16714 tree t;
16715
16716 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16717 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16718 t = build_int_cst (TREE_TYPE (value), -divisor);
16719 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16720 }
16721 }
16722 else
16723 {
16724 if (!div)
16725 div = build_int_cst (TREE_TYPE (value), divisor);
16726 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16727 value = size_binop_loc (loc, MULT_EXPR, value, div);
16728 }
16729
16730 return value;
16731 }
16732
16733 /* Likewise, but round down. */
16734
16735 tree
16736 round_down_loc (location_t loc, tree value, int divisor)
16737 {
16738 tree div = NULL_TREE;
16739
16740 gcc_assert (divisor > 0);
16741 if (divisor == 1)
16742 return value;
16743
16744 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16745 have to do anything. Only do this when we are not given a const,
16746 because in that case, this check is more expensive than just
16747 doing it. */
16748 if (TREE_CODE (value) != INTEGER_CST)
16749 {
16750 div = build_int_cst (TREE_TYPE (value), divisor);
16751
16752 if (multiple_of_p (TREE_TYPE (value), value, div))
16753 return value;
16754 }
16755
16756 /* If divisor is a power of two, simplify this to bit manipulation. */
16757 if (divisor == (divisor & -divisor))
16758 {
16759 tree t;
16760
16761 t = build_int_cst (TREE_TYPE (value), -divisor);
16762 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16763 }
16764 else
16765 {
16766 if (!div)
16767 div = build_int_cst (TREE_TYPE (value), divisor);
16768 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16769 value = size_binop_loc (loc, MULT_EXPR, value, div);
16770 }
16771
16772 return value;
16773 }
16774
16775 /* Returns the pointer to the base of the object addressed by EXP and
16776 extracts the information about the offset of the access, storing it
16777 to PBITPOS and POFFSET. */
16778
16779 static tree
16780 split_address_to_core_and_offset (tree exp,
16781 HOST_WIDE_INT *pbitpos, tree *poffset)
16782 {
16783 tree core;
16784 enum machine_mode mode;
16785 int unsignedp, volatilep;
16786 HOST_WIDE_INT bitsize;
16787 location_t loc = EXPR_LOCATION (exp);
16788
16789 if (TREE_CODE (exp) == ADDR_EXPR)
16790 {
16791 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16792 poffset, &mode, &unsignedp, &volatilep,
16793 false);
16794 core = build_fold_addr_expr_loc (loc, core);
16795 }
16796 else
16797 {
16798 core = exp;
16799 *pbitpos = 0;
16800 *poffset = NULL_TREE;
16801 }
16802
16803 return core;
16804 }
16805
16806 /* Returns true if addresses of E1 and E2 differ by a constant, false
16807 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16808
16809 bool
16810 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16811 {
16812 tree core1, core2;
16813 HOST_WIDE_INT bitpos1, bitpos2;
16814 tree toffset1, toffset2, tdiff, type;
16815
16816 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16817 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16818
16819 if (bitpos1 % BITS_PER_UNIT != 0
16820 || bitpos2 % BITS_PER_UNIT != 0
16821 || !operand_equal_p (core1, core2, 0))
16822 return false;
16823
16824 if (toffset1 && toffset2)
16825 {
16826 type = TREE_TYPE (toffset1);
16827 if (type != TREE_TYPE (toffset2))
16828 toffset2 = fold_convert (type, toffset2);
16829
16830 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16831 if (!cst_and_fits_in_hwi (tdiff))
16832 return false;
16833
16834 *diff = int_cst_value (tdiff);
16835 }
16836 else if (toffset1 || toffset2)
16837 {
16838 /* If only one of the offsets is non-constant, the difference cannot
16839 be a constant. */
16840 return false;
16841 }
16842 else
16843 *diff = 0;
16844
16845 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16846 return true;
16847 }
16848
16849 /* Simplify the floating point expression EXP when the sign of the
16850 result is not significant. Return NULL_TREE if no simplification
16851 is possible. */
16852
16853 tree
16854 fold_strip_sign_ops (tree exp)
16855 {
16856 tree arg0, arg1;
16857 location_t loc = EXPR_LOCATION (exp);
16858
16859 switch (TREE_CODE (exp))
16860 {
16861 case ABS_EXPR:
16862 case NEGATE_EXPR:
16863 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16864 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16865
16866 case MULT_EXPR:
16867 case RDIV_EXPR:
16868 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16869 return NULL_TREE;
16870 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16871 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16872 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16873 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16874 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16875 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16876 break;
16877
16878 case COMPOUND_EXPR:
16879 arg0 = TREE_OPERAND (exp, 0);
16880 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16881 if (arg1)
16882 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16883 break;
16884
16885 case COND_EXPR:
16886 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16887 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16888 if (arg0 || arg1)
16889 return fold_build3_loc (loc,
16890 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16891 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16892 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16893 break;
16894
16895 case CALL_EXPR:
16896 {
16897 const enum built_in_function fcode = builtin_mathfn_code (exp);
16898 switch (fcode)
16899 {
16900 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16901 /* Strip copysign function call, return the 1st argument. */
16902 arg0 = CALL_EXPR_ARG (exp, 0);
16903 arg1 = CALL_EXPR_ARG (exp, 1);
16904 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16905
16906 default:
16907 /* Strip sign ops from the argument of "odd" math functions. */
16908 if (negate_mathfn_p (fcode))
16909 {
16910 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16911 if (arg0)
16912 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16913 }
16914 break;
16915 }
16916 }
16917 break;
16918
16919 default:
16920 break;
16921 }
16922 return NULL_TREE;
16923 }