]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fold-const.c
fix merge on wide-int branch
[thirdparty/gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-ssa.h"
62
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
66
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
87 };
88
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static tree sign_bit_p (tree, const_tree);
111 static int simple_operand_p (const_tree);
112 static bool simple_operand_p_2 (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree range_predecessor (tree);
115 static tree range_successor (tree);
116 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
117 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
118 static tree unextend (tree, int, int, tree);
119 static tree optimize_minmax_comparison (location_t, enum tree_code,
120 tree, tree, tree);
121 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
122 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
123 static tree fold_binary_op_with_conditional_arg (location_t,
124 enum tree_code, tree,
125 tree, tree,
126 tree, tree, int);
127 static tree fold_mathfn_compare (location_t,
128 enum built_in_function, enum tree_code,
129 tree, tree, tree);
130 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
131 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
132 static bool reorder_operands_p (const_tree, const_tree);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (const_tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static tree fold_convert_const (enum tree_code, tree, tree);
137
138 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
139 Otherwise, return LOC. */
140
141 static location_t
142 expr_location_or (tree t, location_t loc)
143 {
144 location_t tloc = EXPR_LOCATION (t);
145 return tloc == UNKNOWN_LOCATION ? loc : tloc;
146 }
147
148 /* Similar to protected_set_expr_location, but never modify x in place,
149 if location can and needs to be set, unshare it. */
150
151 static inline tree
152 protected_set_expr_location_unshare (tree x, location_t loc)
153 {
154 if (CAN_HAVE_LOCATION_P (x)
155 && EXPR_LOCATION (x) != loc
156 && !(TREE_CODE (x) == SAVE_EXPR
157 || TREE_CODE (x) == TARGET_EXPR
158 || TREE_CODE (x) == BIND_EXPR))
159 {
160 x = copy_node (x);
161 SET_EXPR_LOCATION (x, loc);
162 }
163 return x;
164 }
165 \f
166 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
167 division and returns the quotient. Otherwise returns
168 NULL_TREE. */
169
170 tree
171 div_if_zero_remainder (const_tree arg1, const_tree arg2)
172 {
173 wide_int quo;
174 wide_int warg1 = arg1;
175 wide_int warg2 = arg2;
176 signop sgn = TYPE_SIGN (TREE_TYPE (arg1));
177 signop sgn2 = TYPE_SIGN (TREE_TYPE (arg2));
178
179 if (sgn != sgn2)
180 {
181 /* When signedness mismatches, we promote the unsigned value to
182 a signed value. We preserve the value by extending the
183 precision by 1 bit, iff the top bit is set. */
184 if (sgn == UNSIGNED)
185 {
186 if (wi::neg_p (warg1))
187 warg1 = wide_int::from (warg1, warg1.get_precision () + 1, sgn);
188 sgn = SIGNED;
189 }
190 else
191 {
192 if (wi::neg_p (warg2))
193 warg2 = wide_int::from (warg2, warg2.get_precision () + 1, sgn2);
194 }
195 }
196
197 if (wi::multiple_of_p (warg1, warg2, sgn, &quo))
198 return wide_int_to_tree (TREE_TYPE (arg1), quo);
199
200 return NULL_TREE;
201 }
202 \f
203 /* This is nonzero if we should defer warnings about undefined
204 overflow. This facility exists because these warnings are a
205 special case. The code to estimate loop iterations does not want
206 to issue any warnings, since it works with expressions which do not
207 occur in user code. Various bits of cleanup code call fold(), but
208 only use the result if it has certain characteristics (e.g., is a
209 constant); that code only wants to issue a warning if the result is
210 used. */
211
212 static int fold_deferring_overflow_warnings;
213
214 /* If a warning about undefined overflow is deferred, this is the
215 warning. Note that this may cause us to turn two warnings into
216 one, but that is fine since it is sufficient to only give one
217 warning per expression. */
218
219 static const char* fold_deferred_overflow_warning;
220
221 /* If a warning about undefined overflow is deferred, this is the
222 level at which the warning should be emitted. */
223
224 static enum warn_strict_overflow_code fold_deferred_overflow_code;
225
226 /* Start deferring overflow warnings. We could use a stack here to
227 permit nested calls, but at present it is not necessary. */
228
229 void
230 fold_defer_overflow_warnings (void)
231 {
232 ++fold_deferring_overflow_warnings;
233 }
234
235 /* Stop deferring overflow warnings. If there is a pending warning,
236 and ISSUE is true, then issue the warning if appropriate. STMT is
237 the statement with which the warning should be associated (used for
238 location information); STMT may be NULL. CODE is the level of the
239 warning--a warn_strict_overflow_code value. This function will use
240 the smaller of CODE and the deferred code when deciding whether to
241 issue the warning. CODE may be zero to mean to always use the
242 deferred code. */
243
244 void
245 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
246 {
247 const char *warnmsg;
248 location_t locus;
249
250 gcc_assert (fold_deferring_overflow_warnings > 0);
251 --fold_deferring_overflow_warnings;
252 if (fold_deferring_overflow_warnings > 0)
253 {
254 if (fold_deferred_overflow_warning != NULL
255 && code != 0
256 && code < (int) fold_deferred_overflow_code)
257 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
258 return;
259 }
260
261 warnmsg = fold_deferred_overflow_warning;
262 fold_deferred_overflow_warning = NULL;
263
264 if (!issue || warnmsg == NULL)
265 return;
266
267 if (gimple_no_warning_p (stmt))
268 return;
269
270 /* Use the smallest code level when deciding to issue the
271 warning. */
272 if (code == 0 || code > (int) fold_deferred_overflow_code)
273 code = fold_deferred_overflow_code;
274
275 if (!issue_strict_overflow_warning (code))
276 return;
277
278 if (stmt == NULL)
279 locus = input_location;
280 else
281 locus = gimple_location (stmt);
282 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
283 }
284
285 /* Stop deferring overflow warnings, ignoring any deferred
286 warnings. */
287
288 void
289 fold_undefer_and_ignore_overflow_warnings (void)
290 {
291 fold_undefer_overflow_warnings (false, NULL, 0);
292 }
293
294 /* Whether we are deferring overflow warnings. */
295
296 bool
297 fold_deferring_overflow_warnings_p (void)
298 {
299 return fold_deferring_overflow_warnings > 0;
300 }
301
302 /* This is called when we fold something based on the fact that signed
303 overflow is undefined. */
304
305 static void
306 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
307 {
308 if (fold_deferring_overflow_warnings > 0)
309 {
310 if (fold_deferred_overflow_warning == NULL
311 || wc < fold_deferred_overflow_code)
312 {
313 fold_deferred_overflow_warning = gmsgid;
314 fold_deferred_overflow_code = wc;
315 }
316 }
317 else if (issue_strict_overflow_warning (wc))
318 warning (OPT_Wstrict_overflow, gmsgid);
319 }
320 \f
321 /* Return true if the built-in mathematical function specified by CODE
322 is odd, i.e. -f(x) == f(-x). */
323
324 static bool
325 negate_mathfn_p (enum built_in_function code)
326 {
327 switch (code)
328 {
329 CASE_FLT_FN (BUILT_IN_ASIN):
330 CASE_FLT_FN (BUILT_IN_ASINH):
331 CASE_FLT_FN (BUILT_IN_ATAN):
332 CASE_FLT_FN (BUILT_IN_ATANH):
333 CASE_FLT_FN (BUILT_IN_CASIN):
334 CASE_FLT_FN (BUILT_IN_CASINH):
335 CASE_FLT_FN (BUILT_IN_CATAN):
336 CASE_FLT_FN (BUILT_IN_CATANH):
337 CASE_FLT_FN (BUILT_IN_CBRT):
338 CASE_FLT_FN (BUILT_IN_CPROJ):
339 CASE_FLT_FN (BUILT_IN_CSIN):
340 CASE_FLT_FN (BUILT_IN_CSINH):
341 CASE_FLT_FN (BUILT_IN_CTAN):
342 CASE_FLT_FN (BUILT_IN_CTANH):
343 CASE_FLT_FN (BUILT_IN_ERF):
344 CASE_FLT_FN (BUILT_IN_LLROUND):
345 CASE_FLT_FN (BUILT_IN_LROUND):
346 CASE_FLT_FN (BUILT_IN_ROUND):
347 CASE_FLT_FN (BUILT_IN_SIN):
348 CASE_FLT_FN (BUILT_IN_SINH):
349 CASE_FLT_FN (BUILT_IN_TAN):
350 CASE_FLT_FN (BUILT_IN_TANH):
351 CASE_FLT_FN (BUILT_IN_TRUNC):
352 return true;
353
354 CASE_FLT_FN (BUILT_IN_LLRINT):
355 CASE_FLT_FN (BUILT_IN_LRINT):
356 CASE_FLT_FN (BUILT_IN_NEARBYINT):
357 CASE_FLT_FN (BUILT_IN_RINT):
358 return !flag_rounding_math;
359
360 default:
361 break;
362 }
363 return false;
364 }
365
366 /* Check whether we may negate an integer constant T without causing
367 overflow. */
368
369 bool
370 may_negate_without_overflow_p (const_tree t)
371 {
372 tree type;
373
374 gcc_assert (TREE_CODE (t) == INTEGER_CST);
375
376 type = TREE_TYPE (t);
377 if (TYPE_UNSIGNED (type))
378 return false;
379
380 return !wi::only_sign_bit_p (t);
381 }
382
383 /* Determine whether an expression T can be cheaply negated using
384 the function negate_expr without introducing undefined overflow. */
385
386 static bool
387 negate_expr_p (tree t)
388 {
389 tree type;
390
391 if (t == 0)
392 return false;
393
394 type = TREE_TYPE (t);
395
396 STRIP_SIGN_NOPS (t);
397 switch (TREE_CODE (t))
398 {
399 case INTEGER_CST:
400 if (TYPE_OVERFLOW_WRAPS (type))
401 return true;
402
403 /* Check that -CST will not overflow type. */
404 return may_negate_without_overflow_p (t);
405 case BIT_NOT_EXPR:
406 return (INTEGRAL_TYPE_P (type)
407 && TYPE_OVERFLOW_WRAPS (type));
408
409 case FIXED_CST:
410 case NEGATE_EXPR:
411 return true;
412
413 case REAL_CST:
414 /* We want to canonicalize to positive real constants. Pretend
415 that only negative ones can be easily negated. */
416 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
417
418 case COMPLEX_CST:
419 return negate_expr_p (TREE_REALPART (t))
420 && negate_expr_p (TREE_IMAGPART (t));
421
422 case VECTOR_CST:
423 {
424 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
425 return true;
426
427 int count = TYPE_VECTOR_SUBPARTS (type), i;
428
429 for (i = 0; i < count; i++)
430 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
431 return false;
432
433 return true;
434 }
435
436 case COMPLEX_EXPR:
437 return negate_expr_p (TREE_OPERAND (t, 0))
438 && negate_expr_p (TREE_OPERAND (t, 1));
439
440 case CONJ_EXPR:
441 return negate_expr_p (TREE_OPERAND (t, 0));
442
443 case PLUS_EXPR:
444 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
445 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
446 return false;
447 /* -(A + B) -> (-B) - A. */
448 if (negate_expr_p (TREE_OPERAND (t, 1))
449 && reorder_operands_p (TREE_OPERAND (t, 0),
450 TREE_OPERAND (t, 1)))
451 return true;
452 /* -(A + B) -> (-A) - B. */
453 return negate_expr_p (TREE_OPERAND (t, 0));
454
455 case MINUS_EXPR:
456 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
457 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
458 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
459 && reorder_operands_p (TREE_OPERAND (t, 0),
460 TREE_OPERAND (t, 1));
461
462 case MULT_EXPR:
463 if (TYPE_UNSIGNED (TREE_TYPE (t)))
464 break;
465
466 /* Fall through. */
467
468 case RDIV_EXPR:
469 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
470 return negate_expr_p (TREE_OPERAND (t, 1))
471 || negate_expr_p (TREE_OPERAND (t, 0));
472 break;
473
474 case TRUNC_DIV_EXPR:
475 case ROUND_DIV_EXPR:
476 case FLOOR_DIV_EXPR:
477 case CEIL_DIV_EXPR:
478 case EXACT_DIV_EXPR:
479 /* In general we can't negate A / B, because if A is INT_MIN and
480 B is 1, we may turn this into INT_MIN / -1 which is undefined
481 and actually traps on some architectures. But if overflow is
482 undefined, we can negate, because - (INT_MIN / 1) is an
483 overflow. */
484 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
485 {
486 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
487 break;
488 /* If overflow is undefined then we have to be careful because
489 we ask whether it's ok to associate the negate with the
490 division which is not ok for example for
491 -((a - b) / c) where (-(a - b)) / c may invoke undefined
492 overflow because of negating INT_MIN. So do not use
493 negate_expr_p here but open-code the two important cases. */
494 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
495 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
496 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
497 return true;
498 }
499 else if (negate_expr_p (TREE_OPERAND (t, 0)))
500 return true;
501 return negate_expr_p (TREE_OPERAND (t, 1));
502
503 case NOP_EXPR:
504 /* Negate -((double)float) as (double)(-float). */
505 if (TREE_CODE (type) == REAL_TYPE)
506 {
507 tree tem = strip_float_extensions (t);
508 if (tem != t)
509 return negate_expr_p (tem);
510 }
511 break;
512
513 case CALL_EXPR:
514 /* Negate -f(x) as f(-x). */
515 if (negate_mathfn_p (builtin_mathfn_code (t)))
516 return negate_expr_p (CALL_EXPR_ARG (t, 0));
517 break;
518
519 case RSHIFT_EXPR:
520 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
521 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
522 {
523 tree op1 = TREE_OPERAND (t, 1);
524 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
525 return true;
526 }
527 break;
528
529 default:
530 break;
531 }
532 return false;
533 }
534
535 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
536 simplification is possible.
537 If negate_expr_p would return true for T, NULL_TREE will never be
538 returned. */
539
540 static tree
541 fold_negate_expr (location_t loc, tree t)
542 {
543 tree type = TREE_TYPE (t);
544 tree tem;
545
546 switch (TREE_CODE (t))
547 {
548 /* Convert - (~A) to A + 1. */
549 case BIT_NOT_EXPR:
550 if (INTEGRAL_TYPE_P (type))
551 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
552 build_one_cst (type));
553 break;
554
555 case INTEGER_CST:
556 tem = fold_negate_const (t, type);
557 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
558 || !TYPE_OVERFLOW_TRAPS (type))
559 return tem;
560 break;
561
562 case REAL_CST:
563 tem = fold_negate_const (t, type);
564 /* Two's complement FP formats, such as c4x, may overflow. */
565 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
566 return tem;
567 break;
568
569 case FIXED_CST:
570 tem = fold_negate_const (t, type);
571 return tem;
572
573 case COMPLEX_CST:
574 {
575 tree rpart = negate_expr (TREE_REALPART (t));
576 tree ipart = negate_expr (TREE_IMAGPART (t));
577
578 if ((TREE_CODE (rpart) == REAL_CST
579 && TREE_CODE (ipart) == REAL_CST)
580 || (TREE_CODE (rpart) == INTEGER_CST
581 && TREE_CODE (ipart) == INTEGER_CST))
582 return build_complex (type, rpart, ipart);
583 }
584 break;
585
586 case VECTOR_CST:
587 {
588 int count = TYPE_VECTOR_SUBPARTS (type), i;
589 tree *elts = XALLOCAVEC (tree, count);
590
591 for (i = 0; i < count; i++)
592 {
593 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
594 if (elts[i] == NULL_TREE)
595 return NULL_TREE;
596 }
597
598 return build_vector (type, elts);
599 }
600
601 case COMPLEX_EXPR:
602 if (negate_expr_p (t))
603 return fold_build2_loc (loc, COMPLEX_EXPR, type,
604 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
605 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
606 break;
607
608 case CONJ_EXPR:
609 if (negate_expr_p (t))
610 return fold_build1_loc (loc, CONJ_EXPR, type,
611 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
612 break;
613
614 case NEGATE_EXPR:
615 return TREE_OPERAND (t, 0);
616
617 case PLUS_EXPR:
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
620 {
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t, 1))
623 && reorder_operands_p (TREE_OPERAND (t, 0),
624 TREE_OPERAND (t, 1)))
625 {
626 tem = negate_expr (TREE_OPERAND (t, 1));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 0));
629 }
630
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t, 0)))
633 {
634 tem = negate_expr (TREE_OPERAND (t, 0));
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 tem, TREE_OPERAND (t, 1));
637 }
638 }
639 break;
640
641 case MINUS_EXPR:
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
644 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
645 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
648 break;
649
650 case MULT_EXPR:
651 if (TYPE_UNSIGNED (type))
652 break;
653
654 /* Fall through. */
655
656 case RDIV_EXPR:
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
658 {
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 TREE_OPERAND (t, 0), negate_expr (tem));
663 tem = TREE_OPERAND (t, 0);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
667 }
668 break;
669
670 case TRUNC_DIV_EXPR:
671 case ROUND_DIV_EXPR:
672 case FLOOR_DIV_EXPR:
673 case CEIL_DIV_EXPR:
674 case EXACT_DIV_EXPR:
675 /* In general we can't negate A / B, because if A is INT_MIN and
676 B is 1, we may turn this into INT_MIN / -1 which is undefined
677 and actually traps on some architectures. But if overflow is
678 undefined, we can negate, because - (INT_MIN / 1) is an
679 overflow. */
680 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
681 {
682 const char * const warnmsg = G_("assuming signed overflow does not "
683 "occur when negating a division");
684 tem = TREE_OPERAND (t, 1);
685 if (negate_expr_p (tem))
686 {
687 if (INTEGRAL_TYPE_P (type)
688 && (TREE_CODE (tem) != INTEGER_CST
689 || integer_onep (tem)))
690 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
691 return fold_build2_loc (loc, TREE_CODE (t), type,
692 TREE_OPERAND (t, 0), negate_expr (tem));
693 }
694 /* If overflow is undefined then we have to be careful because
695 we ask whether it's ok to associate the negate with the
696 division which is not ok for example for
697 -((a - b) / c) where (-(a - b)) / c may invoke undefined
698 overflow because of negating INT_MIN. So do not use
699 negate_expr_p here but open-code the two important cases. */
700 tem = TREE_OPERAND (t, 0);
701 if ((INTEGRAL_TYPE_P (type)
702 && (TREE_CODE (tem) == NEGATE_EXPR
703 || (TREE_CODE (tem) == INTEGER_CST
704 && may_negate_without_overflow_p (tem))))
705 || !INTEGRAL_TYPE_P (type))
706 return fold_build2_loc (loc, TREE_CODE (t), type,
707 negate_expr (tem), TREE_OPERAND (t, 1));
708 }
709 break;
710
711 case NOP_EXPR:
712 /* Convert -((double)float) into (double)(-float). */
713 if (TREE_CODE (type) == REAL_TYPE)
714 {
715 tem = strip_float_extensions (t);
716 if (tem != t && negate_expr_p (tem))
717 return fold_convert_loc (loc, type, negate_expr (tem));
718 }
719 break;
720
721 case CALL_EXPR:
722 /* Negate -f(x) as f(-x). */
723 if (negate_mathfn_p (builtin_mathfn_code (t))
724 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
725 {
726 tree fndecl, arg;
727
728 fndecl = get_callee_fndecl (t);
729 arg = negate_expr (CALL_EXPR_ARG (t, 0));
730 return build_call_expr_loc (loc, fndecl, 1, arg);
731 }
732 break;
733
734 case RSHIFT_EXPR:
735 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
736 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
737 {
738 tree op1 = TREE_OPERAND (t, 1);
739 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
740 {
741 tree ntype = TYPE_UNSIGNED (type)
742 ? signed_type_for (type)
743 : unsigned_type_for (type);
744 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
745 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
746 return fold_convert_loc (loc, type, temp);
747 }
748 }
749 break;
750
751 default:
752 break;
753 }
754
755 return NULL_TREE;
756 }
757
758 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
759 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
760 return NULL_TREE. */
761
762 static tree
763 negate_expr (tree t)
764 {
765 tree type, tem;
766 location_t loc;
767
768 if (t == NULL_TREE)
769 return NULL_TREE;
770
771 loc = EXPR_LOCATION (t);
772 type = TREE_TYPE (t);
773 STRIP_SIGN_NOPS (t);
774
775 tem = fold_negate_expr (loc, t);
776 if (!tem)
777 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
778 return fold_convert_loc (loc, type, tem);
779 }
780 \f
781 /* Split a tree IN into a constant, literal and variable parts that could be
782 combined with CODE to make IN. "constant" means an expression with
783 TREE_CONSTANT but that isn't an actual constant. CODE must be a
784 commutative arithmetic operation. Store the constant part into *CONP,
785 the literal in *LITP and return the variable part. If a part isn't
786 present, set it to null. If the tree does not decompose in this way,
787 return the entire tree as the variable part and the other parts as null.
788
789 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
790 case, we negate an operand that was subtracted. Except if it is a
791 literal for which we use *MINUS_LITP instead.
792
793 If NEGATE_P is true, we are negating all of IN, again except a literal
794 for which we use *MINUS_LITP instead.
795
796 If IN is itself a literal or constant, return it as appropriate.
797
798 Note that we do not guarantee that any of the three values will be the
799 same type as IN, but they will have the same signedness and mode. */
800
801 static tree
802 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
803 tree *minus_litp, int negate_p)
804 {
805 tree var = 0;
806
807 *conp = 0;
808 *litp = 0;
809 *minus_litp = 0;
810
811 /* Strip any conversions that don't change the machine mode or signedness. */
812 STRIP_SIGN_NOPS (in);
813
814 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
815 || TREE_CODE (in) == FIXED_CST)
816 *litp = in;
817 else if (TREE_CODE (in) == code
818 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
819 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
820 /* We can associate addition and subtraction together (even
821 though the C standard doesn't say so) for integers because
822 the value is not affected. For reals, the value might be
823 affected, so we can't. */
824 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
825 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
826 {
827 tree op0 = TREE_OPERAND (in, 0);
828 tree op1 = TREE_OPERAND (in, 1);
829 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
830 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
831
832 /* First see if either of the operands is a literal, then a constant. */
833 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
834 || TREE_CODE (op0) == FIXED_CST)
835 *litp = op0, op0 = 0;
836 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
837 || TREE_CODE (op1) == FIXED_CST)
838 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
839
840 if (op0 != 0 && TREE_CONSTANT (op0))
841 *conp = op0, op0 = 0;
842 else if (op1 != 0 && TREE_CONSTANT (op1))
843 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
844
845 /* If we haven't dealt with either operand, this is not a case we can
846 decompose. Otherwise, VAR is either of the ones remaining, if any. */
847 if (op0 != 0 && op1 != 0)
848 var = in;
849 else if (op0 != 0)
850 var = op0;
851 else
852 var = op1, neg_var_p = neg1_p;
853
854 /* Now do any needed negations. */
855 if (neg_litp_p)
856 *minus_litp = *litp, *litp = 0;
857 if (neg_conp_p)
858 *conp = negate_expr (*conp);
859 if (neg_var_p)
860 var = negate_expr (var);
861 }
862 else if (TREE_CODE (in) == BIT_NOT_EXPR
863 && code == PLUS_EXPR)
864 {
865 /* -X - 1 is folded to ~X, undo that here. */
866 *minus_litp = build_one_cst (TREE_TYPE (in));
867 var = negate_expr (TREE_OPERAND (in, 0));
868 }
869 else if (TREE_CONSTANT (in))
870 *conp = in;
871 else
872 var = in;
873
874 if (negate_p)
875 {
876 if (*litp)
877 *minus_litp = *litp, *litp = 0;
878 else if (*minus_litp)
879 *litp = *minus_litp, *minus_litp = 0;
880 *conp = negate_expr (*conp);
881 var = negate_expr (var);
882 }
883
884 return var;
885 }
886
887 /* Re-associate trees split by the above function. T1 and T2 are
888 either expressions to associate or null. Return the new
889 expression, if any. LOC is the location of the new expression. If
890 we build an operation, do it in TYPE and with CODE. */
891
892 static tree
893 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
894 {
895 if (t1 == 0)
896 return t2;
897 else if (t2 == 0)
898 return t1;
899
900 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
901 try to fold this since we will have infinite recursion. But do
902 deal with any NEGATE_EXPRs. */
903 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
904 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 {
906 if (code == PLUS_EXPR)
907 {
908 if (TREE_CODE (t1) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t2),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t1, 0)));
913 else if (TREE_CODE (t2) == NEGATE_EXPR)
914 return build2_loc (loc, MINUS_EXPR, type,
915 fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type,
917 TREE_OPERAND (t2, 0)));
918 else if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
920 }
921 else if (code == MINUS_EXPR)
922 {
923 if (integer_zerop (t2))
924 return fold_convert_loc (loc, type, t1);
925 }
926
927 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
928 fold_convert_loc (loc, type, t2));
929 }
930
931 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
932 fold_convert_loc (loc, type, t2));
933 }
934 \f
935 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
936 for use in int_const_binop, size_binop and size_diffop. */
937
938 static bool
939 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 {
941 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
942 return false;
943 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
944 return false;
945
946 switch (code)
947 {
948 case LSHIFT_EXPR:
949 case RSHIFT_EXPR:
950 case LROTATE_EXPR:
951 case RROTATE_EXPR:
952 return true;
953
954 default:
955 break;
956 }
957
958 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
959 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
960 && TYPE_MODE (type1) == TYPE_MODE (type2);
961 }
962
963
964 /* Combine two integer constants ARG1 and ARG2 under operation CODE
965 to produce a new constant. Return NULL_TREE if we don't know how
966 to evaluate CODE at compile-time. */
967
968 static tree
969 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
970 int overflowable)
971 {
972 wide_int op1, arg2, res;
973 tree t;
974 tree type = TREE_TYPE (arg1);
975 signop sign = TYPE_SIGN (type);
976 bool overflow = false;
977
978 op1 = arg1;
979 arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
980 TYPE_SIGN (TREE_TYPE (parg2)));
981
982 switch (code)
983 {
984 case BIT_IOR_EXPR:
985 res = op1 | arg2;
986 break;
987
988 case BIT_XOR_EXPR:
989 res = op1 ^ arg2;
990 break;
991
992 case BIT_AND_EXPR:
993 res = op1 & arg2;
994 break;
995
996 case RSHIFT_EXPR:
997 case LSHIFT_EXPR:
998 if (wi::neg_p (arg2))
999 {
1000 arg2 = -arg2;
1001 if (code == RSHIFT_EXPR)
1002 code = LSHIFT_EXPR;
1003 else
1004 code = RSHIFT_EXPR;
1005 }
1006
1007 if (code == RSHIFT_EXPR)
1008 /* It's unclear from the C standard whether shifts can overflow.
1009 The following code ignores overflow; perhaps a C standard
1010 interpretation ruling is needed. */
1011 res = wi::rshift (op1, arg2, sign, GET_MODE_BITSIZE (TYPE_MODE (type)));
1012 else
1013 res = wi::lshift (op1, arg2, GET_MODE_BITSIZE (TYPE_MODE (type)));
1014 break;
1015
1016 case RROTATE_EXPR:
1017 case LROTATE_EXPR:
1018 if (wi::neg_p (arg2))
1019 {
1020 arg2 = -arg2;
1021 if (code == RROTATE_EXPR)
1022 code = LROTATE_EXPR;
1023 else
1024 code = RROTATE_EXPR;
1025 }
1026
1027 if (code == RROTATE_EXPR)
1028 res = wi::rrotate (op1, arg2);
1029 else
1030 res = wi::lrotate (op1, arg2);
1031 break;
1032
1033 case PLUS_EXPR:
1034 res = wi::add (op1, arg2, sign, &overflow);
1035 break;
1036
1037 case MINUS_EXPR:
1038 res = wi::sub (op1, arg2, sign, &overflow);
1039 break;
1040
1041 case MULT_EXPR:
1042 res = wi::mul (op1, arg2, sign, &overflow);
1043 break;
1044
1045 case MULT_HIGHPART_EXPR:
1046 res = wi::mul_high (op1, arg2, sign);
1047 break;
1048
1049 case TRUNC_DIV_EXPR:
1050 case EXACT_DIV_EXPR:
1051 res = wi::div_trunc (op1, arg2, sign, &overflow);
1052 if (overflow)
1053 return NULL_TREE;
1054 break;
1055
1056 case FLOOR_DIV_EXPR:
1057 res = wi::div_floor (op1, arg2, sign, &overflow);
1058 if (overflow)
1059 return NULL_TREE;
1060 break;
1061
1062 case CEIL_DIV_EXPR:
1063 res = wi::div_ceil (op1, arg2, sign, &overflow);
1064 if (overflow)
1065 return NULL_TREE;
1066 break;
1067
1068 case ROUND_DIV_EXPR:
1069 res = wi::div_round (op1, arg2, sign, &overflow);
1070 if (overflow)
1071 return NULL_TREE;
1072 break;
1073
1074 case TRUNC_MOD_EXPR:
1075 res = wi::mod_trunc (op1, arg2, sign, &overflow);
1076 if (overflow)
1077 return NULL_TREE;
1078 break;
1079
1080 case FLOOR_MOD_EXPR:
1081 res = wi::mod_floor (op1, arg2, sign, &overflow);
1082 if (overflow)
1083 return NULL_TREE;
1084 break;
1085
1086 case CEIL_MOD_EXPR:
1087 res = wi::mod_ceil (op1, arg2, sign, &overflow);
1088 if (overflow)
1089 return NULL_TREE;
1090 break;
1091
1092 case ROUND_MOD_EXPR:
1093 res = wi::mod_round (op1, arg2, sign, &overflow);
1094 if (overflow)
1095 return NULL_TREE;
1096 break;
1097
1098 case MIN_EXPR:
1099 res = wi::min (op1, arg2, sign);
1100 break;
1101
1102 case MAX_EXPR:
1103 res = wi::max (op1, arg2, sign);
1104 break;
1105
1106 default:
1107 return NULL_TREE;
1108 }
1109
1110 t = force_fit_type (type, res, overflowable,
1111 (((sign == SIGNED || overflowable == -1)
1112 && overflow)
1113 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1114
1115 return t;
1116 }
1117
1118 tree
1119 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1120 {
1121 return int_const_binop_1 (code, arg1, arg2, 1);
1122 }
1123
1124 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1125 constant. We assume ARG1 and ARG2 have the same data type, or at least
1126 are the same kind of constant and the same machine mode. Return zero if
1127 combining the constants is not allowed in the current operating mode. */
1128
1129 static tree
1130 const_binop (enum tree_code code, tree arg1, tree arg2)
1131 {
1132 /* Sanity check for the recursive cases. */
1133 if (!arg1 || !arg2)
1134 return NULL_TREE;
1135
1136 STRIP_NOPS (arg1);
1137 STRIP_NOPS (arg2);
1138
1139 if (TREE_CODE (arg1) == INTEGER_CST)
1140 return int_const_binop (code, arg1, arg2);
1141
1142 if (TREE_CODE (arg1) == REAL_CST)
1143 {
1144 enum machine_mode mode;
1145 REAL_VALUE_TYPE d1;
1146 REAL_VALUE_TYPE d2;
1147 REAL_VALUE_TYPE value;
1148 REAL_VALUE_TYPE result;
1149 bool inexact;
1150 tree t, type;
1151
1152 /* The following codes are handled by real_arithmetic. */
1153 switch (code)
1154 {
1155 case PLUS_EXPR:
1156 case MINUS_EXPR:
1157 case MULT_EXPR:
1158 case RDIV_EXPR:
1159 case MIN_EXPR:
1160 case MAX_EXPR:
1161 break;
1162
1163 default:
1164 return NULL_TREE;
1165 }
1166
1167 d1 = TREE_REAL_CST (arg1);
1168 d2 = TREE_REAL_CST (arg2);
1169
1170 type = TREE_TYPE (arg1);
1171 mode = TYPE_MODE (type);
1172
1173 /* Don't perform operation if we honor signaling NaNs and
1174 either operand is a NaN. */
1175 if (HONOR_SNANS (mode)
1176 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1177 return NULL_TREE;
1178
1179 /* Don't perform operation if it would raise a division
1180 by zero exception. */
1181 if (code == RDIV_EXPR
1182 && REAL_VALUES_EQUAL (d2, dconst0)
1183 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1184 return NULL_TREE;
1185
1186 /* If either operand is a NaN, just return it. Otherwise, set up
1187 for floating-point trap; we return an overflow. */
1188 if (REAL_VALUE_ISNAN (d1))
1189 return arg1;
1190 else if (REAL_VALUE_ISNAN (d2))
1191 return arg2;
1192
1193 inexact = real_arithmetic (&value, code, &d1, &d2);
1194 real_convert (&result, mode, &value);
1195
1196 /* Don't constant fold this floating point operation if
1197 the result has overflowed and flag_trapping_math. */
1198 if (flag_trapping_math
1199 && MODE_HAS_INFINITIES (mode)
1200 && REAL_VALUE_ISINF (result)
1201 && !REAL_VALUE_ISINF (d1)
1202 && !REAL_VALUE_ISINF (d2))
1203 return NULL_TREE;
1204
1205 /* Don't constant fold this floating point operation if the
1206 result may dependent upon the run-time rounding mode and
1207 flag_rounding_math is set, or if GCC's software emulation
1208 is unable to accurately represent the result. */
1209 if ((flag_rounding_math
1210 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1211 && (inexact || !real_identical (&result, &value)))
1212 return NULL_TREE;
1213
1214 t = build_real (type, result);
1215
1216 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1217 return t;
1218 }
1219
1220 if (TREE_CODE (arg1) == FIXED_CST)
1221 {
1222 FIXED_VALUE_TYPE f1;
1223 FIXED_VALUE_TYPE f2;
1224 FIXED_VALUE_TYPE result;
1225 tree t, type;
1226 int sat_p;
1227 bool overflow_p;
1228
1229 /* The following codes are handled by fixed_arithmetic. */
1230 switch (code)
1231 {
1232 case PLUS_EXPR:
1233 case MINUS_EXPR:
1234 case MULT_EXPR:
1235 case TRUNC_DIV_EXPR:
1236 f2 = TREE_FIXED_CST (arg2);
1237 break;
1238
1239 case LSHIFT_EXPR:
1240 case RSHIFT_EXPR:
1241 {
1242 wide_int w2 = arg2;
1243 f2.data.high = w2.elt (1);
1244 f2.data.low = w2.elt (0);
1245 f2.mode = SImode;
1246 }
1247 break;
1248
1249 default:
1250 return NULL_TREE;
1251 }
1252
1253 f1 = TREE_FIXED_CST (arg1);
1254 type = TREE_TYPE (arg1);
1255 sat_p = TYPE_SATURATING (type);
1256 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1257 t = build_fixed (type, result);
1258 /* Propagate overflow flags. */
1259 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1260 TREE_OVERFLOW (t) = 1;
1261 return t;
1262 }
1263
1264 if (TREE_CODE (arg1) == COMPLEX_CST)
1265 {
1266 tree type = TREE_TYPE (arg1);
1267 tree r1 = TREE_REALPART (arg1);
1268 tree i1 = TREE_IMAGPART (arg1);
1269 tree r2 = TREE_REALPART (arg2);
1270 tree i2 = TREE_IMAGPART (arg2);
1271 tree real, imag;
1272
1273 switch (code)
1274 {
1275 case PLUS_EXPR:
1276 case MINUS_EXPR:
1277 real = const_binop (code, r1, r2);
1278 imag = const_binop (code, i1, i2);
1279 break;
1280
1281 case MULT_EXPR:
1282 if (COMPLEX_FLOAT_TYPE_P (type))
1283 return do_mpc_arg2 (arg1, arg2, type,
1284 /* do_nonfinite= */ folding_initializer,
1285 mpc_mul);
1286
1287 real = const_binop (MINUS_EXPR,
1288 const_binop (MULT_EXPR, r1, r2),
1289 const_binop (MULT_EXPR, i1, i2));
1290 imag = const_binop (PLUS_EXPR,
1291 const_binop (MULT_EXPR, r1, i2),
1292 const_binop (MULT_EXPR, i1, r2));
1293 break;
1294
1295 case RDIV_EXPR:
1296 if (COMPLEX_FLOAT_TYPE_P (type))
1297 return do_mpc_arg2 (arg1, arg2, type,
1298 /* do_nonfinite= */ folding_initializer,
1299 mpc_div);
1300 /* Fallthru ... */
1301 case TRUNC_DIV_EXPR:
1302 case CEIL_DIV_EXPR:
1303 case FLOOR_DIV_EXPR:
1304 case ROUND_DIV_EXPR:
1305 if (flag_complex_method == 0)
1306 {
1307 /* Keep this algorithm in sync with
1308 tree-complex.c:expand_complex_div_straight().
1309
1310 Expand complex division to scalars, straightforward algorithm.
1311 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1312 t = br*br + bi*bi
1313 */
1314 tree magsquared
1315 = const_binop (PLUS_EXPR,
1316 const_binop (MULT_EXPR, r2, r2),
1317 const_binop (MULT_EXPR, i2, i2));
1318 tree t1
1319 = const_binop (PLUS_EXPR,
1320 const_binop (MULT_EXPR, r1, r2),
1321 const_binop (MULT_EXPR, i1, i2));
1322 tree t2
1323 = const_binop (MINUS_EXPR,
1324 const_binop (MULT_EXPR, i1, r2),
1325 const_binop (MULT_EXPR, r1, i2));
1326
1327 real = const_binop (code, t1, magsquared);
1328 imag = const_binop (code, t2, magsquared);
1329 }
1330 else
1331 {
1332 /* Keep this algorithm in sync with
1333 tree-complex.c:expand_complex_div_wide().
1334
1335 Expand complex division to scalars, modified algorithm to minimize
1336 overflow with wide input ranges. */
1337 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1338 fold_abs_const (r2, TREE_TYPE (type)),
1339 fold_abs_const (i2, TREE_TYPE (type)));
1340
1341 if (integer_nonzerop (compare))
1342 {
1343 /* In the TRUE branch, we compute
1344 ratio = br/bi;
1345 div = (br * ratio) + bi;
1346 tr = (ar * ratio) + ai;
1347 ti = (ai * ratio) - ar;
1348 tr = tr / div;
1349 ti = ti / div; */
1350 tree ratio = const_binop (code, r2, i2);
1351 tree div = const_binop (PLUS_EXPR, i2,
1352 const_binop (MULT_EXPR, r2, ratio));
1353 real = const_binop (MULT_EXPR, r1, ratio);
1354 real = const_binop (PLUS_EXPR, real, i1);
1355 real = const_binop (code, real, div);
1356
1357 imag = const_binop (MULT_EXPR, i1, ratio);
1358 imag = const_binop (MINUS_EXPR, imag, r1);
1359 imag = const_binop (code, imag, div);
1360 }
1361 else
1362 {
1363 /* In the FALSE branch, we compute
1364 ratio = d/c;
1365 divisor = (d * ratio) + c;
1366 tr = (b * ratio) + a;
1367 ti = b - (a * ratio);
1368 tr = tr / div;
1369 ti = ti / div; */
1370 tree ratio = const_binop (code, i2, r2);
1371 tree div = const_binop (PLUS_EXPR, r2,
1372 const_binop (MULT_EXPR, i2, ratio));
1373
1374 real = const_binop (MULT_EXPR, i1, ratio);
1375 real = const_binop (PLUS_EXPR, real, r1);
1376 real = const_binop (code, real, div);
1377
1378 imag = const_binop (MULT_EXPR, r1, ratio);
1379 imag = const_binop (MINUS_EXPR, i1, imag);
1380 imag = const_binop (code, imag, div);
1381 }
1382 }
1383 break;
1384
1385 default:
1386 return NULL_TREE;
1387 }
1388
1389 if (real && imag)
1390 return build_complex (type, real, imag);
1391 }
1392
1393 if (TREE_CODE (arg1) == VECTOR_CST
1394 && TREE_CODE (arg2) == VECTOR_CST)
1395 {
1396 tree type = TREE_TYPE (arg1);
1397 int count = TYPE_VECTOR_SUBPARTS (type), i;
1398 tree *elts = XALLOCAVEC (tree, count);
1399
1400 for (i = 0; i < count; i++)
1401 {
1402 tree elem1 = VECTOR_CST_ELT (arg1, i);
1403 tree elem2 = VECTOR_CST_ELT (arg2, i);
1404
1405 elts[i] = const_binop (code, elem1, elem2);
1406
1407 /* It is possible that const_binop cannot handle the given
1408 code and return NULL_TREE */
1409 if (elts[i] == NULL_TREE)
1410 return NULL_TREE;
1411 }
1412
1413 return build_vector (type, elts);
1414 }
1415
1416 /* Shifts allow a scalar offset for a vector. */
1417 if (TREE_CODE (arg1) == VECTOR_CST
1418 && TREE_CODE (arg2) == INTEGER_CST)
1419 {
1420 tree type = TREE_TYPE (arg1);
1421 int count = TYPE_VECTOR_SUBPARTS (type), i;
1422 tree *elts = XALLOCAVEC (tree, count);
1423
1424 if (code == VEC_LSHIFT_EXPR
1425 || code == VEC_RSHIFT_EXPR)
1426 {
1427 if (!tree_fits_uhwi_p (arg2))
1428 return NULL_TREE;
1429
1430 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1431 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1432 unsigned HOST_WIDE_INT innerc
1433 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1434 if (shiftc >= outerc || (shiftc % innerc) != 0)
1435 return NULL_TREE;
1436 int offset = shiftc / innerc;
1437 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1438 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1439 for !BYTES_BIG_ENDIAN picks first vector element, but
1440 for BYTES_BIG_ENDIAN last element from the vector. */
1441 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1442 offset = -offset;
1443 tree zero = build_zero_cst (TREE_TYPE (type));
1444 for (i = 0; i < count; i++)
1445 {
1446 if (i + offset < 0 || i + offset >= count)
1447 elts[i] = zero;
1448 else
1449 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1450 }
1451 }
1452 else
1453 for (i = 0; i < count; i++)
1454 {
1455 tree elem1 = VECTOR_CST_ELT (arg1, i);
1456
1457 elts[i] = const_binop (code, elem1, arg2);
1458
1459 /* It is possible that const_binop cannot handle the given
1460 code and return NULL_TREE */
1461 if (elts[i] == NULL_TREE)
1462 return NULL_TREE;
1463 }
1464
1465 return build_vector (type, elts);
1466 }
1467 return NULL_TREE;
1468 }
1469
1470 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1471 indicates which particular sizetype to create. */
1472
1473 tree
1474 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1475 {
1476 return build_int_cst (sizetype_tab[(int) kind], number);
1477 }
1478 \f
1479 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1480 is a tree code. The type of the result is taken from the operands.
1481 Both must be equivalent integer types, ala int_binop_types_match_p.
1482 If the operands are constant, so is the result. */
1483
1484 tree
1485 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1486 {
1487 tree type = TREE_TYPE (arg0);
1488
1489 if (arg0 == error_mark_node || arg1 == error_mark_node)
1490 return error_mark_node;
1491
1492 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1493 TREE_TYPE (arg1)));
1494
1495 /* Handle the special case of two integer constants faster. */
1496 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1497 {
1498 /* And some specific cases even faster than that. */
1499 if (code == PLUS_EXPR)
1500 {
1501 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1502 return arg1;
1503 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1504 return arg0;
1505 }
1506 else if (code == MINUS_EXPR)
1507 {
1508 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1509 return arg0;
1510 }
1511 else if (code == MULT_EXPR)
1512 {
1513 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1514 return arg1;
1515 }
1516
1517 /* Handle general case of two integer constants. For sizetype
1518 constant calculations we always want to know about overflow,
1519 even in the unsigned case. */
1520 return int_const_binop_1 (code, arg0, arg1, -1);
1521 }
1522
1523 return fold_build2_loc (loc, code, type, arg0, arg1);
1524 }
1525
1526 /* Given two values, either both of sizetype or both of bitsizetype,
1527 compute the difference between the two values. Return the value
1528 in signed type corresponding to the type of the operands. */
1529
1530 tree
1531 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1532 {
1533 tree type = TREE_TYPE (arg0);
1534 tree ctype;
1535
1536 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1537 TREE_TYPE (arg1)));
1538
1539 /* If the type is already signed, just do the simple thing. */
1540 if (!TYPE_UNSIGNED (type))
1541 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1542
1543 if (type == sizetype)
1544 ctype = ssizetype;
1545 else if (type == bitsizetype)
1546 ctype = sbitsizetype;
1547 else
1548 ctype = signed_type_for (type);
1549
1550 /* If either operand is not a constant, do the conversions to the signed
1551 type and subtract. The hardware will do the right thing with any
1552 overflow in the subtraction. */
1553 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1554 return size_binop_loc (loc, MINUS_EXPR,
1555 fold_convert_loc (loc, ctype, arg0),
1556 fold_convert_loc (loc, ctype, arg1));
1557
1558 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1559 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1560 overflow) and negate (which can't either). Special-case a result
1561 of zero while we're here. */
1562 if (tree_int_cst_equal (arg0, arg1))
1563 return build_int_cst (ctype, 0);
1564 else if (tree_int_cst_lt (arg1, arg0))
1565 return fold_convert_loc (loc, ctype,
1566 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1567 else
1568 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1569 fold_convert_loc (loc, ctype,
1570 size_binop_loc (loc,
1571 MINUS_EXPR,
1572 arg1, arg0)));
1573 }
1574 \f
1575 /* A subroutine of fold_convert_const handling conversions of an
1576 INTEGER_CST to another integer type. */
1577
1578 static tree
1579 fold_convert_const_int_from_int (tree type, const_tree arg1)
1580 {
1581 /* Given an integer constant, make new constant with new type,
1582 appropriately sign-extended or truncated. Use max_wide_int
1583 so that any extension is done according ARG1's type. */
1584 return force_fit_type (type, max_wide_int (arg1),
1585 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1586 TREE_OVERFLOW (arg1));
1587 }
1588
1589 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1590 to an integer type. */
1591
1592 static tree
1593 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1594 {
1595 bool overflow = false;
1596 tree t;
1597
1598 /* The following code implements the floating point to integer
1599 conversion rules required by the Java Language Specification,
1600 that IEEE NaNs are mapped to zero and values that overflow
1601 the target precision saturate, i.e. values greater than
1602 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1603 are mapped to INT_MIN. These semantics are allowed by the
1604 C and C++ standards that simply state that the behavior of
1605 FP-to-integer conversion is unspecified upon overflow. */
1606
1607 wide_int val;
1608 REAL_VALUE_TYPE r;
1609 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1610
1611 switch (code)
1612 {
1613 case FIX_TRUNC_EXPR:
1614 real_trunc (&r, VOIDmode, &x);
1615 break;
1616
1617 default:
1618 gcc_unreachable ();
1619 }
1620
1621 /* If R is NaN, return zero and show we have an overflow. */
1622 if (REAL_VALUE_ISNAN (r))
1623 {
1624 overflow = true;
1625 val = max_wide_int (0);
1626 }
1627
1628 /* See if R is less than the lower bound or greater than the
1629 upper bound. */
1630
1631 if (! overflow)
1632 {
1633 tree lt = TYPE_MIN_VALUE (type);
1634 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1635 if (REAL_VALUES_LESS (r, l))
1636 {
1637 overflow = true;
1638 val = max_wide_int (lt);
1639 }
1640 }
1641
1642 if (! overflow)
1643 {
1644 tree ut = TYPE_MAX_VALUE (type);
1645 if (ut)
1646 {
1647 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1648 if (REAL_VALUES_LESS (u, r))
1649 {
1650 overflow = true;
1651 val = max_wide_int (ut);
1652 }
1653 }
1654 }
1655
1656 if (! overflow)
1657 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1658
1659 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1660 return t;
1661 }
1662
1663 /* A subroutine of fold_convert_const handling conversions of a
1664 FIXED_CST to an integer type. */
1665
1666 static tree
1667 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1668 {
1669 tree t;
1670 double_int temp, temp_trunc;
1671 unsigned int mode;
1672
1673 /* Right shift FIXED_CST to temp by fbit. */
1674 temp = TREE_FIXED_CST (arg1).data;
1675 mode = TREE_FIXED_CST (arg1).mode;
1676 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1677 {
1678 temp = temp.rshift (GET_MODE_FBIT (mode),
1679 HOST_BITS_PER_DOUBLE_INT,
1680 SIGNED_FIXED_POINT_MODE_P (mode));
1681
1682 /* Left shift temp to temp_trunc by fbit. */
1683 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1684 HOST_BITS_PER_DOUBLE_INT,
1685 SIGNED_FIXED_POINT_MODE_P (mode));
1686 }
1687 else
1688 {
1689 temp = double_int_zero;
1690 temp_trunc = double_int_zero;
1691 }
1692
1693 /* If FIXED_CST is negative, we need to round the value toward 0.
1694 By checking if the fractional bits are not zero to add 1 to temp. */
1695 if (SIGNED_FIXED_POINT_MODE_P (mode)
1696 && temp_trunc.is_negative ()
1697 && TREE_FIXED_CST (arg1).data != temp_trunc)
1698 temp += double_int_one;
1699
1700 /* Given a fixed-point constant, make new constant with new type,
1701 appropriately sign-extended or truncated. */
1702 t = force_fit_type (type, temp, -1,
1703 (temp.is_negative ()
1704 && (TYPE_UNSIGNED (type)
1705 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1706 | TREE_OVERFLOW (arg1));
1707
1708 return t;
1709 }
1710
1711 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1712 to another floating point type. */
1713
1714 static tree
1715 fold_convert_const_real_from_real (tree type, const_tree arg1)
1716 {
1717 REAL_VALUE_TYPE value;
1718 tree t;
1719
1720 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1721 t = build_real (type, value);
1722
1723 /* If converting an infinity or NAN to a representation that doesn't
1724 have one, set the overflow bit so that we can produce some kind of
1725 error message at the appropriate point if necessary. It's not the
1726 most user-friendly message, but it's better than nothing. */
1727 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1728 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1729 TREE_OVERFLOW (t) = 1;
1730 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1731 && !MODE_HAS_NANS (TYPE_MODE (type)))
1732 TREE_OVERFLOW (t) = 1;
1733 /* Regular overflow, conversion produced an infinity in a mode that
1734 can't represent them. */
1735 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1736 && REAL_VALUE_ISINF (value)
1737 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1738 TREE_OVERFLOW (t) = 1;
1739 else
1740 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1741 return t;
1742 }
1743
1744 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1745 to a floating point type. */
1746
1747 static tree
1748 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1749 {
1750 REAL_VALUE_TYPE value;
1751 tree t;
1752
1753 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1754 t = build_real (type, value);
1755
1756 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1757 return t;
1758 }
1759
1760 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1761 to another fixed-point type. */
1762
1763 static tree
1764 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1765 {
1766 FIXED_VALUE_TYPE value;
1767 tree t;
1768 bool overflow_p;
1769
1770 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1771 TYPE_SATURATING (type));
1772 t = build_fixed (type, value);
1773
1774 /* Propagate overflow flags. */
1775 if (overflow_p | TREE_OVERFLOW (arg1))
1776 TREE_OVERFLOW (t) = 1;
1777 return t;
1778 }
1779
1780 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1781 to a fixed-point type. */
1782
1783 static tree
1784 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1785 {
1786 FIXED_VALUE_TYPE value;
1787 tree t;
1788 bool overflow_p;
1789 double_int di;
1790
1791 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1792
1793 di.low = TREE_INT_CST_ELT (arg1, 0);
1794 if (TREE_INT_CST_NUNITS (arg1) == 1)
1795 di.high = (HOST_WIDE_INT)di.low < 0 ? (HOST_WIDE_INT)-1 : 0;
1796 else
1797 di.high = TREE_INT_CST_ELT (arg1, 1);
1798
1799 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1800 di,
1801 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1802 TYPE_SATURATING (type));
1803 t = build_fixed (type, value);
1804
1805 /* Propagate overflow flags. */
1806 if (overflow_p | TREE_OVERFLOW (arg1))
1807 TREE_OVERFLOW (t) = 1;
1808 return t;
1809 }
1810
1811 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1812 to a fixed-point type. */
1813
1814 static tree
1815 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1816 {
1817 FIXED_VALUE_TYPE value;
1818 tree t;
1819 bool overflow_p;
1820
1821 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1822 &TREE_REAL_CST (arg1),
1823 TYPE_SATURATING (type));
1824 t = build_fixed (type, value);
1825
1826 /* Propagate overflow flags. */
1827 if (overflow_p | TREE_OVERFLOW (arg1))
1828 TREE_OVERFLOW (t) = 1;
1829 return t;
1830 }
1831
1832 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1833 type TYPE. If no simplification can be done return NULL_TREE. */
1834
1835 static tree
1836 fold_convert_const (enum tree_code code, tree type, tree arg1)
1837 {
1838 if (TREE_TYPE (arg1) == type)
1839 return arg1;
1840
1841 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1842 || TREE_CODE (type) == OFFSET_TYPE)
1843 {
1844 if (TREE_CODE (arg1) == INTEGER_CST)
1845 return fold_convert_const_int_from_int (type, arg1);
1846 else if (TREE_CODE (arg1) == REAL_CST)
1847 return fold_convert_const_int_from_real (code, type, arg1);
1848 else if (TREE_CODE (arg1) == FIXED_CST)
1849 return fold_convert_const_int_from_fixed (type, arg1);
1850 }
1851 else if (TREE_CODE (type) == REAL_TYPE)
1852 {
1853 if (TREE_CODE (arg1) == INTEGER_CST)
1854 return build_real_from_int_cst (type, arg1);
1855 else if (TREE_CODE (arg1) == REAL_CST)
1856 return fold_convert_const_real_from_real (type, arg1);
1857 else if (TREE_CODE (arg1) == FIXED_CST)
1858 return fold_convert_const_real_from_fixed (type, arg1);
1859 }
1860 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1861 {
1862 if (TREE_CODE (arg1) == FIXED_CST)
1863 return fold_convert_const_fixed_from_fixed (type, arg1);
1864 else if (TREE_CODE (arg1) == INTEGER_CST)
1865 return fold_convert_const_fixed_from_int (type, arg1);
1866 else if (TREE_CODE (arg1) == REAL_CST)
1867 return fold_convert_const_fixed_from_real (type, arg1);
1868 }
1869 return NULL_TREE;
1870 }
1871
1872 /* Construct a vector of zero elements of vector type TYPE. */
1873
1874 static tree
1875 build_zero_vector (tree type)
1876 {
1877 tree t;
1878
1879 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1880 return build_vector_from_val (type, t);
1881 }
1882
1883 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1884
1885 bool
1886 fold_convertible_p (const_tree type, const_tree arg)
1887 {
1888 tree orig = TREE_TYPE (arg);
1889
1890 if (type == orig)
1891 return true;
1892
1893 if (TREE_CODE (arg) == ERROR_MARK
1894 || TREE_CODE (type) == ERROR_MARK
1895 || TREE_CODE (orig) == ERROR_MARK)
1896 return false;
1897
1898 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1899 return true;
1900
1901 switch (TREE_CODE (type))
1902 {
1903 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1904 case POINTER_TYPE: case REFERENCE_TYPE:
1905 case OFFSET_TYPE:
1906 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1907 || TREE_CODE (orig) == OFFSET_TYPE)
1908 return true;
1909 return (TREE_CODE (orig) == VECTOR_TYPE
1910 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1911
1912 case REAL_TYPE:
1913 case FIXED_POINT_TYPE:
1914 case COMPLEX_TYPE:
1915 case VECTOR_TYPE:
1916 case VOID_TYPE:
1917 return TREE_CODE (type) == TREE_CODE (orig);
1918
1919 default:
1920 return false;
1921 }
1922 }
1923
1924 /* Convert expression ARG to type TYPE. Used by the middle-end for
1925 simple conversions in preference to calling the front-end's convert. */
1926
1927 tree
1928 fold_convert_loc (location_t loc, tree type, tree arg)
1929 {
1930 tree orig = TREE_TYPE (arg);
1931 tree tem;
1932
1933 if (type == orig)
1934 return arg;
1935
1936 if (TREE_CODE (arg) == ERROR_MARK
1937 || TREE_CODE (type) == ERROR_MARK
1938 || TREE_CODE (orig) == ERROR_MARK)
1939 return error_mark_node;
1940
1941 switch (TREE_CODE (type))
1942 {
1943 case POINTER_TYPE:
1944 case REFERENCE_TYPE:
1945 /* Handle conversions between pointers to different address spaces. */
1946 if (POINTER_TYPE_P (orig)
1947 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1948 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1949 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1950 /* fall through */
1951
1952 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1953 case OFFSET_TYPE:
1954 if (TREE_CODE (arg) == INTEGER_CST)
1955 {
1956 tem = fold_convert_const (NOP_EXPR, type, arg);
1957 if (tem != NULL_TREE)
1958 return tem;
1959 }
1960 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1961 || TREE_CODE (orig) == OFFSET_TYPE)
1962 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1963 if (TREE_CODE (orig) == COMPLEX_TYPE)
1964 return fold_convert_loc (loc, type,
1965 fold_build1_loc (loc, REALPART_EXPR,
1966 TREE_TYPE (orig), arg));
1967 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1968 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1969 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1970
1971 case REAL_TYPE:
1972 if (TREE_CODE (arg) == INTEGER_CST)
1973 {
1974 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1975 if (tem != NULL_TREE)
1976 return tem;
1977 }
1978 else if (TREE_CODE (arg) == REAL_CST)
1979 {
1980 tem = fold_convert_const (NOP_EXPR, type, arg);
1981 if (tem != NULL_TREE)
1982 return tem;
1983 }
1984 else if (TREE_CODE (arg) == FIXED_CST)
1985 {
1986 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1987 if (tem != NULL_TREE)
1988 return tem;
1989 }
1990
1991 switch (TREE_CODE (orig))
1992 {
1993 case INTEGER_TYPE:
1994 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1995 case POINTER_TYPE: case REFERENCE_TYPE:
1996 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1997
1998 case REAL_TYPE:
1999 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2000
2001 case FIXED_POINT_TYPE:
2002 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2003
2004 case COMPLEX_TYPE:
2005 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2006 return fold_convert_loc (loc, type, tem);
2007
2008 default:
2009 gcc_unreachable ();
2010 }
2011
2012 case FIXED_POINT_TYPE:
2013 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2014 || TREE_CODE (arg) == REAL_CST)
2015 {
2016 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2017 if (tem != NULL_TREE)
2018 goto fold_convert_exit;
2019 }
2020
2021 switch (TREE_CODE (orig))
2022 {
2023 case FIXED_POINT_TYPE:
2024 case INTEGER_TYPE:
2025 case ENUMERAL_TYPE:
2026 case BOOLEAN_TYPE:
2027 case REAL_TYPE:
2028 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2029
2030 case COMPLEX_TYPE:
2031 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2032 return fold_convert_loc (loc, type, tem);
2033
2034 default:
2035 gcc_unreachable ();
2036 }
2037
2038 case COMPLEX_TYPE:
2039 switch (TREE_CODE (orig))
2040 {
2041 case INTEGER_TYPE:
2042 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2043 case POINTER_TYPE: case REFERENCE_TYPE:
2044 case REAL_TYPE:
2045 case FIXED_POINT_TYPE:
2046 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2047 fold_convert_loc (loc, TREE_TYPE (type), arg),
2048 fold_convert_loc (loc, TREE_TYPE (type),
2049 integer_zero_node));
2050 case COMPLEX_TYPE:
2051 {
2052 tree rpart, ipart;
2053
2054 if (TREE_CODE (arg) == COMPLEX_EXPR)
2055 {
2056 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2057 TREE_OPERAND (arg, 0));
2058 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2059 TREE_OPERAND (arg, 1));
2060 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2061 }
2062
2063 arg = save_expr (arg);
2064 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2065 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2066 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2067 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2068 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2069 }
2070
2071 default:
2072 gcc_unreachable ();
2073 }
2074
2075 case VECTOR_TYPE:
2076 if (integer_zerop (arg))
2077 return build_zero_vector (type);
2078 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2079 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2080 || TREE_CODE (orig) == VECTOR_TYPE);
2081 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2082
2083 case VOID_TYPE:
2084 tem = fold_ignored_result (arg);
2085 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2086
2087 default:
2088 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2089 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2090 gcc_unreachable ();
2091 }
2092 fold_convert_exit:
2093 protected_set_expr_location_unshare (tem, loc);
2094 return tem;
2095 }
2096 \f
2097 /* Return false if expr can be assumed not to be an lvalue, true
2098 otherwise. */
2099
2100 static bool
2101 maybe_lvalue_p (const_tree x)
2102 {
2103 /* We only need to wrap lvalue tree codes. */
2104 switch (TREE_CODE (x))
2105 {
2106 case VAR_DECL:
2107 case PARM_DECL:
2108 case RESULT_DECL:
2109 case LABEL_DECL:
2110 case FUNCTION_DECL:
2111 case SSA_NAME:
2112
2113 case COMPONENT_REF:
2114 case MEM_REF:
2115 case INDIRECT_REF:
2116 case ARRAY_REF:
2117 case ARRAY_RANGE_REF:
2118 case BIT_FIELD_REF:
2119 case OBJ_TYPE_REF:
2120
2121 case REALPART_EXPR:
2122 case IMAGPART_EXPR:
2123 case PREINCREMENT_EXPR:
2124 case PREDECREMENT_EXPR:
2125 case SAVE_EXPR:
2126 case TRY_CATCH_EXPR:
2127 case WITH_CLEANUP_EXPR:
2128 case COMPOUND_EXPR:
2129 case MODIFY_EXPR:
2130 case TARGET_EXPR:
2131 case COND_EXPR:
2132 case BIND_EXPR:
2133 break;
2134
2135 default:
2136 /* Assume the worst for front-end tree codes. */
2137 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2138 break;
2139 return false;
2140 }
2141
2142 return true;
2143 }
2144
2145 /* Return an expr equal to X but certainly not valid as an lvalue. */
2146
2147 tree
2148 non_lvalue_loc (location_t loc, tree x)
2149 {
2150 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2151 us. */
2152 if (in_gimple_form)
2153 return x;
2154
2155 if (! maybe_lvalue_p (x))
2156 return x;
2157 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2158 }
2159
2160 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2161 Zero means allow extended lvalues. */
2162
2163 int pedantic_lvalues;
2164
2165 /* When pedantic, return an expr equal to X but certainly not valid as a
2166 pedantic lvalue. Otherwise, return X. */
2167
2168 static tree
2169 pedantic_non_lvalue_loc (location_t loc, tree x)
2170 {
2171 if (pedantic_lvalues)
2172 return non_lvalue_loc (loc, x);
2173
2174 return protected_set_expr_location_unshare (x, loc);
2175 }
2176 \f
2177 /* Given a tree comparison code, return the code that is the logical inverse.
2178 It is generally not safe to do this for floating-point comparisons, except
2179 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2180 ERROR_MARK in this case. */
2181
2182 enum tree_code
2183 invert_tree_comparison (enum tree_code code, bool honor_nans)
2184 {
2185 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2186 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2187 return ERROR_MARK;
2188
2189 switch (code)
2190 {
2191 case EQ_EXPR:
2192 return NE_EXPR;
2193 case NE_EXPR:
2194 return EQ_EXPR;
2195 case GT_EXPR:
2196 return honor_nans ? UNLE_EXPR : LE_EXPR;
2197 case GE_EXPR:
2198 return honor_nans ? UNLT_EXPR : LT_EXPR;
2199 case LT_EXPR:
2200 return honor_nans ? UNGE_EXPR : GE_EXPR;
2201 case LE_EXPR:
2202 return honor_nans ? UNGT_EXPR : GT_EXPR;
2203 case LTGT_EXPR:
2204 return UNEQ_EXPR;
2205 case UNEQ_EXPR:
2206 return LTGT_EXPR;
2207 case UNGT_EXPR:
2208 return LE_EXPR;
2209 case UNGE_EXPR:
2210 return LT_EXPR;
2211 case UNLT_EXPR:
2212 return GE_EXPR;
2213 case UNLE_EXPR:
2214 return GT_EXPR;
2215 case ORDERED_EXPR:
2216 return UNORDERED_EXPR;
2217 case UNORDERED_EXPR:
2218 return ORDERED_EXPR;
2219 default:
2220 gcc_unreachable ();
2221 }
2222 }
2223
2224 /* Similar, but return the comparison that results if the operands are
2225 swapped. This is safe for floating-point. */
2226
2227 enum tree_code
2228 swap_tree_comparison (enum tree_code code)
2229 {
2230 switch (code)
2231 {
2232 case EQ_EXPR:
2233 case NE_EXPR:
2234 case ORDERED_EXPR:
2235 case UNORDERED_EXPR:
2236 case LTGT_EXPR:
2237 case UNEQ_EXPR:
2238 return code;
2239 case GT_EXPR:
2240 return LT_EXPR;
2241 case GE_EXPR:
2242 return LE_EXPR;
2243 case LT_EXPR:
2244 return GT_EXPR;
2245 case LE_EXPR:
2246 return GE_EXPR;
2247 case UNGT_EXPR:
2248 return UNLT_EXPR;
2249 case UNGE_EXPR:
2250 return UNLE_EXPR;
2251 case UNLT_EXPR:
2252 return UNGT_EXPR;
2253 case UNLE_EXPR:
2254 return UNGE_EXPR;
2255 default:
2256 gcc_unreachable ();
2257 }
2258 }
2259
2260
2261 /* Convert a comparison tree code from an enum tree_code representation
2262 into a compcode bit-based encoding. This function is the inverse of
2263 compcode_to_comparison. */
2264
2265 static enum comparison_code
2266 comparison_to_compcode (enum tree_code code)
2267 {
2268 switch (code)
2269 {
2270 case LT_EXPR:
2271 return COMPCODE_LT;
2272 case EQ_EXPR:
2273 return COMPCODE_EQ;
2274 case LE_EXPR:
2275 return COMPCODE_LE;
2276 case GT_EXPR:
2277 return COMPCODE_GT;
2278 case NE_EXPR:
2279 return COMPCODE_NE;
2280 case GE_EXPR:
2281 return COMPCODE_GE;
2282 case ORDERED_EXPR:
2283 return COMPCODE_ORD;
2284 case UNORDERED_EXPR:
2285 return COMPCODE_UNORD;
2286 case UNLT_EXPR:
2287 return COMPCODE_UNLT;
2288 case UNEQ_EXPR:
2289 return COMPCODE_UNEQ;
2290 case UNLE_EXPR:
2291 return COMPCODE_UNLE;
2292 case UNGT_EXPR:
2293 return COMPCODE_UNGT;
2294 case LTGT_EXPR:
2295 return COMPCODE_LTGT;
2296 case UNGE_EXPR:
2297 return COMPCODE_UNGE;
2298 default:
2299 gcc_unreachable ();
2300 }
2301 }
2302
2303 /* Convert a compcode bit-based encoding of a comparison operator back
2304 to GCC's enum tree_code representation. This function is the
2305 inverse of comparison_to_compcode. */
2306
2307 static enum tree_code
2308 compcode_to_comparison (enum comparison_code code)
2309 {
2310 switch (code)
2311 {
2312 case COMPCODE_LT:
2313 return LT_EXPR;
2314 case COMPCODE_EQ:
2315 return EQ_EXPR;
2316 case COMPCODE_LE:
2317 return LE_EXPR;
2318 case COMPCODE_GT:
2319 return GT_EXPR;
2320 case COMPCODE_NE:
2321 return NE_EXPR;
2322 case COMPCODE_GE:
2323 return GE_EXPR;
2324 case COMPCODE_ORD:
2325 return ORDERED_EXPR;
2326 case COMPCODE_UNORD:
2327 return UNORDERED_EXPR;
2328 case COMPCODE_UNLT:
2329 return UNLT_EXPR;
2330 case COMPCODE_UNEQ:
2331 return UNEQ_EXPR;
2332 case COMPCODE_UNLE:
2333 return UNLE_EXPR;
2334 case COMPCODE_UNGT:
2335 return UNGT_EXPR;
2336 case COMPCODE_LTGT:
2337 return LTGT_EXPR;
2338 case COMPCODE_UNGE:
2339 return UNGE_EXPR;
2340 default:
2341 gcc_unreachable ();
2342 }
2343 }
2344
2345 /* Return a tree for the comparison which is the combination of
2346 doing the AND or OR (depending on CODE) of the two operations LCODE
2347 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2348 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2349 if this makes the transformation invalid. */
2350
2351 tree
2352 combine_comparisons (location_t loc,
2353 enum tree_code code, enum tree_code lcode,
2354 enum tree_code rcode, tree truth_type,
2355 tree ll_arg, tree lr_arg)
2356 {
2357 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2358 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2359 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2360 int compcode;
2361
2362 switch (code)
2363 {
2364 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2365 compcode = lcompcode & rcompcode;
2366 break;
2367
2368 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2369 compcode = lcompcode | rcompcode;
2370 break;
2371
2372 default:
2373 return NULL_TREE;
2374 }
2375
2376 if (!honor_nans)
2377 {
2378 /* Eliminate unordered comparisons, as well as LTGT and ORD
2379 which are not used unless the mode has NaNs. */
2380 compcode &= ~COMPCODE_UNORD;
2381 if (compcode == COMPCODE_LTGT)
2382 compcode = COMPCODE_NE;
2383 else if (compcode == COMPCODE_ORD)
2384 compcode = COMPCODE_TRUE;
2385 }
2386 else if (flag_trapping_math)
2387 {
2388 /* Check that the original operation and the optimized ones will trap
2389 under the same condition. */
2390 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2391 && (lcompcode != COMPCODE_EQ)
2392 && (lcompcode != COMPCODE_ORD);
2393 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2394 && (rcompcode != COMPCODE_EQ)
2395 && (rcompcode != COMPCODE_ORD);
2396 bool trap = (compcode & COMPCODE_UNORD) == 0
2397 && (compcode != COMPCODE_EQ)
2398 && (compcode != COMPCODE_ORD);
2399
2400 /* In a short-circuited boolean expression the LHS might be
2401 such that the RHS, if evaluated, will never trap. For
2402 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2403 if neither x nor y is NaN. (This is a mixed blessing: for
2404 example, the expression above will never trap, hence
2405 optimizing it to x < y would be invalid). */
2406 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2407 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2408 rtrap = false;
2409
2410 /* If the comparison was short-circuited, and only the RHS
2411 trapped, we may now generate a spurious trap. */
2412 if (rtrap && !ltrap
2413 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2414 return NULL_TREE;
2415
2416 /* If we changed the conditions that cause a trap, we lose. */
2417 if ((ltrap || rtrap) != trap)
2418 return NULL_TREE;
2419 }
2420
2421 if (compcode == COMPCODE_TRUE)
2422 return constant_boolean_node (true, truth_type);
2423 else if (compcode == COMPCODE_FALSE)
2424 return constant_boolean_node (false, truth_type);
2425 else
2426 {
2427 enum tree_code tcode;
2428
2429 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2430 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2431 }
2432 }
2433 \f
2434 /* Return nonzero if two operands (typically of the same tree node)
2435 are necessarily equal. If either argument has side-effects this
2436 function returns zero. FLAGS modifies behavior as follows:
2437
2438 If OEP_ONLY_CONST is set, only return nonzero for constants.
2439 This function tests whether the operands are indistinguishable;
2440 it does not test whether they are equal using C's == operation.
2441 The distinction is important for IEEE floating point, because
2442 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2443 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2444
2445 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2446 even though it may hold multiple values during a function.
2447 This is because a GCC tree node guarantees that nothing else is
2448 executed between the evaluation of its "operands" (which may often
2449 be evaluated in arbitrary order). Hence if the operands themselves
2450 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2451 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2452 unset means assuming isochronic (or instantaneous) tree equivalence.
2453 Unless comparing arbitrary expression trees, such as from different
2454 statements, this flag can usually be left unset.
2455
2456 If OEP_PURE_SAME is set, then pure functions with identical arguments
2457 are considered the same. It is used when the caller has other ways
2458 to ensure that global memory is unchanged in between. */
2459
2460 int
2461 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2462 {
2463 /* If either is ERROR_MARK, they aren't equal. */
2464 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2465 || TREE_TYPE (arg0) == error_mark_node
2466 || TREE_TYPE (arg1) == error_mark_node)
2467 return 0;
2468
2469 /* Similar, if either does not have a type (like a released SSA name),
2470 they aren't equal. */
2471 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2472 return 0;
2473
2474 /* Check equality of integer constants before bailing out due to
2475 precision differences. */
2476 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2477 return tree_int_cst_equal (arg0, arg1);
2478
2479 /* If both types don't have the same signedness, then we can't consider
2480 them equal. We must check this before the STRIP_NOPS calls
2481 because they may change the signedness of the arguments. As pointers
2482 strictly don't have a signedness, require either two pointers or
2483 two non-pointers as well. */
2484 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2485 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2486 return 0;
2487
2488 /* We cannot consider pointers to different address space equal. */
2489 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2490 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2491 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2492 return 0;
2493
2494 /* If both types don't have the same precision, then it is not safe
2495 to strip NOPs. */
2496 if (element_precision (TREE_TYPE (arg0))
2497 != element_precision (TREE_TYPE (arg1)))
2498 return 0;
2499
2500 STRIP_NOPS (arg0);
2501 STRIP_NOPS (arg1);
2502
2503 /* In case both args are comparisons but with different comparison
2504 code, try to swap the comparison operands of one arg to produce
2505 a match and compare that variant. */
2506 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2507 && COMPARISON_CLASS_P (arg0)
2508 && COMPARISON_CLASS_P (arg1))
2509 {
2510 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2511
2512 if (TREE_CODE (arg0) == swap_code)
2513 return operand_equal_p (TREE_OPERAND (arg0, 0),
2514 TREE_OPERAND (arg1, 1), flags)
2515 && operand_equal_p (TREE_OPERAND (arg0, 1),
2516 TREE_OPERAND (arg1, 0), flags);
2517 }
2518
2519 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2520 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2521 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2522 return 0;
2523
2524 /* This is needed for conversions and for COMPONENT_REF.
2525 Might as well play it safe and always test this. */
2526 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2527 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2528 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2529 return 0;
2530
2531 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2532 We don't care about side effects in that case because the SAVE_EXPR
2533 takes care of that for us. In all other cases, two expressions are
2534 equal if they have no side effects. If we have two identical
2535 expressions with side effects that should be treated the same due
2536 to the only side effects being identical SAVE_EXPR's, that will
2537 be detected in the recursive calls below.
2538 If we are taking an invariant address of two identical objects
2539 they are necessarily equal as well. */
2540 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2541 && (TREE_CODE (arg0) == SAVE_EXPR
2542 || (flags & OEP_CONSTANT_ADDRESS_OF)
2543 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2544 return 1;
2545
2546 /* Next handle constant cases, those for which we can return 1 even
2547 if ONLY_CONST is set. */
2548 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2549 switch (TREE_CODE (arg0))
2550 {
2551 case INTEGER_CST:
2552 return tree_int_cst_equal (arg0, arg1);
2553
2554 case FIXED_CST:
2555 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2556 TREE_FIXED_CST (arg1));
2557
2558 case REAL_CST:
2559 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2560 TREE_REAL_CST (arg1)))
2561 return 1;
2562
2563
2564 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2565 {
2566 /* If we do not distinguish between signed and unsigned zero,
2567 consider them equal. */
2568 if (real_zerop (arg0) && real_zerop (arg1))
2569 return 1;
2570 }
2571 return 0;
2572
2573 case VECTOR_CST:
2574 {
2575 unsigned i;
2576
2577 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2578 return 0;
2579
2580 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2581 {
2582 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2583 VECTOR_CST_ELT (arg1, i), flags))
2584 return 0;
2585 }
2586 return 1;
2587 }
2588
2589 case COMPLEX_CST:
2590 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2591 flags)
2592 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2593 flags));
2594
2595 case STRING_CST:
2596 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2597 && ! memcmp (TREE_STRING_POINTER (arg0),
2598 TREE_STRING_POINTER (arg1),
2599 TREE_STRING_LENGTH (arg0)));
2600
2601 case ADDR_EXPR:
2602 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2603 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2604 ? OEP_CONSTANT_ADDRESS_OF : 0);
2605 default:
2606 break;
2607 }
2608
2609 if (flags & OEP_ONLY_CONST)
2610 return 0;
2611
2612 /* Define macros to test an operand from arg0 and arg1 for equality and a
2613 variant that allows null and views null as being different from any
2614 non-null value. In the latter case, if either is null, the both
2615 must be; otherwise, do the normal comparison. */
2616 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2617 TREE_OPERAND (arg1, N), flags)
2618
2619 #define OP_SAME_WITH_NULL(N) \
2620 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2621 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2622
2623 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2624 {
2625 case tcc_unary:
2626 /* Two conversions are equal only if signedness and modes match. */
2627 switch (TREE_CODE (arg0))
2628 {
2629 CASE_CONVERT:
2630 case FIX_TRUNC_EXPR:
2631 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2632 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2633 return 0;
2634 break;
2635 default:
2636 break;
2637 }
2638
2639 return OP_SAME (0);
2640
2641
2642 case tcc_comparison:
2643 case tcc_binary:
2644 if (OP_SAME (0) && OP_SAME (1))
2645 return 1;
2646
2647 /* For commutative ops, allow the other order. */
2648 return (commutative_tree_code (TREE_CODE (arg0))
2649 && operand_equal_p (TREE_OPERAND (arg0, 0),
2650 TREE_OPERAND (arg1, 1), flags)
2651 && operand_equal_p (TREE_OPERAND (arg0, 1),
2652 TREE_OPERAND (arg1, 0), flags));
2653
2654 case tcc_reference:
2655 /* If either of the pointer (or reference) expressions we are
2656 dereferencing contain a side effect, these cannot be equal,
2657 but their addresses can be. */
2658 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2659 && (TREE_SIDE_EFFECTS (arg0)
2660 || TREE_SIDE_EFFECTS (arg1)))
2661 return 0;
2662
2663 switch (TREE_CODE (arg0))
2664 {
2665 case INDIRECT_REF:
2666 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2667 return OP_SAME (0);
2668
2669 case REALPART_EXPR:
2670 case IMAGPART_EXPR:
2671 return OP_SAME (0);
2672
2673 case TARGET_MEM_REF:
2674 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2675 /* Require equal extra operands and then fall through to MEM_REF
2676 handling of the two common operands. */
2677 if (!OP_SAME_WITH_NULL (2)
2678 || !OP_SAME_WITH_NULL (3)
2679 || !OP_SAME_WITH_NULL (4))
2680 return 0;
2681 /* Fallthru. */
2682 case MEM_REF:
2683 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2684 /* Require equal access sizes, and similar pointer types.
2685 We can have incomplete types for array references of
2686 variable-sized arrays from the Fortran frontend
2687 though. Also verify the types are compatible. */
2688 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2689 || (TYPE_SIZE (TREE_TYPE (arg0))
2690 && TYPE_SIZE (TREE_TYPE (arg1))
2691 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2692 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2693 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2694 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2695 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2696 && OP_SAME (0) && OP_SAME (1));
2697
2698 case ARRAY_REF:
2699 case ARRAY_RANGE_REF:
2700 /* Operands 2 and 3 may be null.
2701 Compare the array index by value if it is constant first as we
2702 may have different types but same value here. */
2703 if (!OP_SAME (0))
2704 return 0;
2705 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2706 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2707 TREE_OPERAND (arg1, 1))
2708 || OP_SAME (1))
2709 && OP_SAME_WITH_NULL (2)
2710 && OP_SAME_WITH_NULL (3));
2711
2712 case COMPONENT_REF:
2713 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2714 may be NULL when we're called to compare MEM_EXPRs. */
2715 if (!OP_SAME_WITH_NULL (0))
2716 return 0;
2717 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2718 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2719
2720 case BIT_FIELD_REF:
2721 if (!OP_SAME (0))
2722 return 0;
2723 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2724 return OP_SAME (1) && OP_SAME (2);
2725
2726 default:
2727 return 0;
2728 }
2729
2730 case tcc_expression:
2731 switch (TREE_CODE (arg0))
2732 {
2733 case ADDR_EXPR:
2734 case TRUTH_NOT_EXPR:
2735 return OP_SAME (0);
2736
2737 case TRUTH_ANDIF_EXPR:
2738 case TRUTH_ORIF_EXPR:
2739 return OP_SAME (0) && OP_SAME (1);
2740
2741 case FMA_EXPR:
2742 case WIDEN_MULT_PLUS_EXPR:
2743 case WIDEN_MULT_MINUS_EXPR:
2744 if (!OP_SAME (2))
2745 return 0;
2746 /* The multiplcation operands are commutative. */
2747 /* FALLTHRU */
2748
2749 case TRUTH_AND_EXPR:
2750 case TRUTH_OR_EXPR:
2751 case TRUTH_XOR_EXPR:
2752 if (OP_SAME (0) && OP_SAME (1))
2753 return 1;
2754
2755 /* Otherwise take into account this is a commutative operation. */
2756 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2757 TREE_OPERAND (arg1, 1), flags)
2758 && operand_equal_p (TREE_OPERAND (arg0, 1),
2759 TREE_OPERAND (arg1, 0), flags));
2760
2761 case COND_EXPR:
2762 case VEC_COND_EXPR:
2763 case DOT_PROD_EXPR:
2764 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2765
2766 default:
2767 return 0;
2768 }
2769
2770 case tcc_vl_exp:
2771 switch (TREE_CODE (arg0))
2772 {
2773 case CALL_EXPR:
2774 /* If the CALL_EXPRs call different functions, then they
2775 clearly can not be equal. */
2776 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2777 flags))
2778 return 0;
2779
2780 {
2781 unsigned int cef = call_expr_flags (arg0);
2782 if (flags & OEP_PURE_SAME)
2783 cef &= ECF_CONST | ECF_PURE;
2784 else
2785 cef &= ECF_CONST;
2786 if (!cef)
2787 return 0;
2788 }
2789
2790 /* Now see if all the arguments are the same. */
2791 {
2792 const_call_expr_arg_iterator iter0, iter1;
2793 const_tree a0, a1;
2794 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2795 a1 = first_const_call_expr_arg (arg1, &iter1);
2796 a0 && a1;
2797 a0 = next_const_call_expr_arg (&iter0),
2798 a1 = next_const_call_expr_arg (&iter1))
2799 if (! operand_equal_p (a0, a1, flags))
2800 return 0;
2801
2802 /* If we get here and both argument lists are exhausted
2803 then the CALL_EXPRs are equal. */
2804 return ! (a0 || a1);
2805 }
2806 default:
2807 return 0;
2808 }
2809
2810 case tcc_declaration:
2811 /* Consider __builtin_sqrt equal to sqrt. */
2812 return (TREE_CODE (arg0) == FUNCTION_DECL
2813 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2814 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2815 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2816
2817 default:
2818 return 0;
2819 }
2820
2821 #undef OP_SAME
2822 #undef OP_SAME_WITH_NULL
2823 }
2824 \f
2825 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2826 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2827
2828 When in doubt, return 0. */
2829
2830 static int
2831 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2832 {
2833 int unsignedp1, unsignedpo;
2834 tree primarg0, primarg1, primother;
2835 unsigned int correct_width;
2836
2837 if (operand_equal_p (arg0, arg1, 0))
2838 return 1;
2839
2840 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2841 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2842 return 0;
2843
2844 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2845 and see if the inner values are the same. This removes any
2846 signedness comparison, which doesn't matter here. */
2847 primarg0 = arg0, primarg1 = arg1;
2848 STRIP_NOPS (primarg0);
2849 STRIP_NOPS (primarg1);
2850 if (operand_equal_p (primarg0, primarg1, 0))
2851 return 1;
2852
2853 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2854 actual comparison operand, ARG0.
2855
2856 First throw away any conversions to wider types
2857 already present in the operands. */
2858
2859 primarg1 = get_narrower (arg1, &unsignedp1);
2860 primother = get_narrower (other, &unsignedpo);
2861
2862 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2863 if (unsignedp1 == unsignedpo
2864 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2865 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2866 {
2867 tree type = TREE_TYPE (arg0);
2868
2869 /* Make sure shorter operand is extended the right way
2870 to match the longer operand. */
2871 primarg1 = fold_convert (signed_or_unsigned_type_for
2872 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2873
2874 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2875 return 1;
2876 }
2877
2878 return 0;
2879 }
2880 \f
2881 /* See if ARG is an expression that is either a comparison or is performing
2882 arithmetic on comparisons. The comparisons must only be comparing
2883 two different values, which will be stored in *CVAL1 and *CVAL2; if
2884 they are nonzero it means that some operands have already been found.
2885 No variables may be used anywhere else in the expression except in the
2886 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2887 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2888
2889 If this is true, return 1. Otherwise, return zero. */
2890
2891 static int
2892 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2893 {
2894 enum tree_code code = TREE_CODE (arg);
2895 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2896
2897 /* We can handle some of the tcc_expression cases here. */
2898 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2899 tclass = tcc_unary;
2900 else if (tclass == tcc_expression
2901 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2902 || code == COMPOUND_EXPR))
2903 tclass = tcc_binary;
2904
2905 else if (tclass == tcc_expression && code == SAVE_EXPR
2906 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2907 {
2908 /* If we've already found a CVAL1 or CVAL2, this expression is
2909 two complex to handle. */
2910 if (*cval1 || *cval2)
2911 return 0;
2912
2913 tclass = tcc_unary;
2914 *save_p = 1;
2915 }
2916
2917 switch (tclass)
2918 {
2919 case tcc_unary:
2920 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2921
2922 case tcc_binary:
2923 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2924 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2925 cval1, cval2, save_p));
2926
2927 case tcc_constant:
2928 return 1;
2929
2930 case tcc_expression:
2931 if (code == COND_EXPR)
2932 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2933 cval1, cval2, save_p)
2934 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2935 cval1, cval2, save_p)
2936 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2937 cval1, cval2, save_p));
2938 return 0;
2939
2940 case tcc_comparison:
2941 /* First see if we can handle the first operand, then the second. For
2942 the second operand, we know *CVAL1 can't be zero. It must be that
2943 one side of the comparison is each of the values; test for the
2944 case where this isn't true by failing if the two operands
2945 are the same. */
2946
2947 if (operand_equal_p (TREE_OPERAND (arg, 0),
2948 TREE_OPERAND (arg, 1), 0))
2949 return 0;
2950
2951 if (*cval1 == 0)
2952 *cval1 = TREE_OPERAND (arg, 0);
2953 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2954 ;
2955 else if (*cval2 == 0)
2956 *cval2 = TREE_OPERAND (arg, 0);
2957 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2958 ;
2959 else
2960 return 0;
2961
2962 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2963 ;
2964 else if (*cval2 == 0)
2965 *cval2 = TREE_OPERAND (arg, 1);
2966 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2967 ;
2968 else
2969 return 0;
2970
2971 return 1;
2972
2973 default:
2974 return 0;
2975 }
2976 }
2977 \f
2978 /* ARG is a tree that is known to contain just arithmetic operations and
2979 comparisons. Evaluate the operations in the tree substituting NEW0 for
2980 any occurrence of OLD0 as an operand of a comparison and likewise for
2981 NEW1 and OLD1. */
2982
2983 static tree
2984 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2985 tree old1, tree new1)
2986 {
2987 tree type = TREE_TYPE (arg);
2988 enum tree_code code = TREE_CODE (arg);
2989 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2990
2991 /* We can handle some of the tcc_expression cases here. */
2992 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2993 tclass = tcc_unary;
2994 else if (tclass == tcc_expression
2995 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2996 tclass = tcc_binary;
2997
2998 switch (tclass)
2999 {
3000 case tcc_unary:
3001 return fold_build1_loc (loc, code, type,
3002 eval_subst (loc, TREE_OPERAND (arg, 0),
3003 old0, new0, old1, new1));
3004
3005 case tcc_binary:
3006 return fold_build2_loc (loc, code, type,
3007 eval_subst (loc, TREE_OPERAND (arg, 0),
3008 old0, new0, old1, new1),
3009 eval_subst (loc, TREE_OPERAND (arg, 1),
3010 old0, new0, old1, new1));
3011
3012 case tcc_expression:
3013 switch (code)
3014 {
3015 case SAVE_EXPR:
3016 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3017 old1, new1);
3018
3019 case COMPOUND_EXPR:
3020 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3021 old1, new1);
3022
3023 case COND_EXPR:
3024 return fold_build3_loc (loc, code, type,
3025 eval_subst (loc, TREE_OPERAND (arg, 0),
3026 old0, new0, old1, new1),
3027 eval_subst (loc, TREE_OPERAND (arg, 1),
3028 old0, new0, old1, new1),
3029 eval_subst (loc, TREE_OPERAND (arg, 2),
3030 old0, new0, old1, new1));
3031 default:
3032 break;
3033 }
3034 /* Fall through - ??? */
3035
3036 case tcc_comparison:
3037 {
3038 tree arg0 = TREE_OPERAND (arg, 0);
3039 tree arg1 = TREE_OPERAND (arg, 1);
3040
3041 /* We need to check both for exact equality and tree equality. The
3042 former will be true if the operand has a side-effect. In that
3043 case, we know the operand occurred exactly once. */
3044
3045 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3046 arg0 = new0;
3047 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3048 arg0 = new1;
3049
3050 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3051 arg1 = new0;
3052 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3053 arg1 = new1;
3054
3055 return fold_build2_loc (loc, code, type, arg0, arg1);
3056 }
3057
3058 default:
3059 return arg;
3060 }
3061 }
3062 \f
3063 /* Return a tree for the case when the result of an expression is RESULT
3064 converted to TYPE and OMITTED was previously an operand of the expression
3065 but is now not needed (e.g., we folded OMITTED * 0).
3066
3067 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3068 the conversion of RESULT to TYPE. */
3069
3070 tree
3071 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3072 {
3073 tree t = fold_convert_loc (loc, type, result);
3074
3075 /* If the resulting operand is an empty statement, just return the omitted
3076 statement casted to void. */
3077 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3078 return build1_loc (loc, NOP_EXPR, void_type_node,
3079 fold_ignored_result (omitted));
3080
3081 if (TREE_SIDE_EFFECTS (omitted))
3082 return build2_loc (loc, COMPOUND_EXPR, type,
3083 fold_ignored_result (omitted), t);
3084
3085 return non_lvalue_loc (loc, t);
3086 }
3087
3088 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3089
3090 static tree
3091 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3092 tree omitted)
3093 {
3094 tree t = fold_convert_loc (loc, type, result);
3095
3096 /* If the resulting operand is an empty statement, just return the omitted
3097 statement casted to void. */
3098 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3099 return build1_loc (loc, NOP_EXPR, void_type_node,
3100 fold_ignored_result (omitted));
3101
3102 if (TREE_SIDE_EFFECTS (omitted))
3103 return build2_loc (loc, COMPOUND_EXPR, type,
3104 fold_ignored_result (omitted), t);
3105
3106 return pedantic_non_lvalue_loc (loc, t);
3107 }
3108
3109 /* Return a tree for the case when the result of an expression is RESULT
3110 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3111 of the expression but are now not needed.
3112
3113 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3114 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3115 evaluated before OMITTED2. Otherwise, if neither has side effects,
3116 just do the conversion of RESULT to TYPE. */
3117
3118 tree
3119 omit_two_operands_loc (location_t loc, tree type, tree result,
3120 tree omitted1, tree omitted2)
3121 {
3122 tree t = fold_convert_loc (loc, type, result);
3123
3124 if (TREE_SIDE_EFFECTS (omitted2))
3125 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3126 if (TREE_SIDE_EFFECTS (omitted1))
3127 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3128
3129 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3130 }
3131
3132 \f
3133 /* Return a simplified tree node for the truth-negation of ARG. This
3134 never alters ARG itself. We assume that ARG is an operation that
3135 returns a truth value (0 or 1).
3136
3137 FIXME: one would think we would fold the result, but it causes
3138 problems with the dominator optimizer. */
3139
3140 static tree
3141 fold_truth_not_expr (location_t loc, tree arg)
3142 {
3143 tree type = TREE_TYPE (arg);
3144 enum tree_code code = TREE_CODE (arg);
3145 location_t loc1, loc2;
3146
3147 /* If this is a comparison, we can simply invert it, except for
3148 floating-point non-equality comparisons, in which case we just
3149 enclose a TRUTH_NOT_EXPR around what we have. */
3150
3151 if (TREE_CODE_CLASS (code) == tcc_comparison)
3152 {
3153 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3154 if (FLOAT_TYPE_P (op_type)
3155 && flag_trapping_math
3156 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3157 && code != NE_EXPR && code != EQ_EXPR)
3158 return NULL_TREE;
3159
3160 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3161 if (code == ERROR_MARK)
3162 return NULL_TREE;
3163
3164 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3165 TREE_OPERAND (arg, 1));
3166 }
3167
3168 switch (code)
3169 {
3170 case INTEGER_CST:
3171 return constant_boolean_node (integer_zerop (arg), type);
3172
3173 case TRUTH_AND_EXPR:
3174 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3175 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3176 return build2_loc (loc, TRUTH_OR_EXPR, type,
3177 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3178 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3179
3180 case TRUTH_OR_EXPR:
3181 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3182 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3183 return build2_loc (loc, TRUTH_AND_EXPR, type,
3184 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3185 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3186
3187 case TRUTH_XOR_EXPR:
3188 /* Here we can invert either operand. We invert the first operand
3189 unless the second operand is a TRUTH_NOT_EXPR in which case our
3190 result is the XOR of the first operand with the inside of the
3191 negation of the second operand. */
3192
3193 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3194 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3195 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3196 else
3197 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3198 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3199 TREE_OPERAND (arg, 1));
3200
3201 case TRUTH_ANDIF_EXPR:
3202 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3203 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3204 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3205 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3206 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3207
3208 case TRUTH_ORIF_EXPR:
3209 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3210 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3211 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3212 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3213 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3214
3215 case TRUTH_NOT_EXPR:
3216 return TREE_OPERAND (arg, 0);
3217
3218 case COND_EXPR:
3219 {
3220 tree arg1 = TREE_OPERAND (arg, 1);
3221 tree arg2 = TREE_OPERAND (arg, 2);
3222
3223 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3224 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3225
3226 /* A COND_EXPR may have a throw as one operand, which
3227 then has void type. Just leave void operands
3228 as they are. */
3229 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3230 VOID_TYPE_P (TREE_TYPE (arg1))
3231 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3232 VOID_TYPE_P (TREE_TYPE (arg2))
3233 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3234 }
3235
3236 case COMPOUND_EXPR:
3237 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3238 return build2_loc (loc, COMPOUND_EXPR, type,
3239 TREE_OPERAND (arg, 0),
3240 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3241
3242 case NON_LVALUE_EXPR:
3243 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3244 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3245
3246 CASE_CONVERT:
3247 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3248 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3249
3250 /* ... fall through ... */
3251
3252 case FLOAT_EXPR:
3253 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3254 return build1_loc (loc, TREE_CODE (arg), type,
3255 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3256
3257 case BIT_AND_EXPR:
3258 if (!integer_onep (TREE_OPERAND (arg, 1)))
3259 return NULL_TREE;
3260 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3261
3262 case SAVE_EXPR:
3263 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3264
3265 case CLEANUP_POINT_EXPR:
3266 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3267 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3268 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3269
3270 default:
3271 return NULL_TREE;
3272 }
3273 }
3274
3275 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3276 assume that ARG is an operation that returns a truth value (0 or 1
3277 for scalars, 0 or -1 for vectors). Return the folded expression if
3278 folding is successful. Otherwise, return NULL_TREE. */
3279
3280 static tree
3281 fold_invert_truthvalue (location_t loc, tree arg)
3282 {
3283 tree type = TREE_TYPE (arg);
3284 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3285 ? BIT_NOT_EXPR
3286 : TRUTH_NOT_EXPR,
3287 type, arg);
3288 }
3289
3290 /* Return a simplified tree node for the truth-negation of ARG. This
3291 never alters ARG itself. We assume that ARG is an operation that
3292 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3293
3294 tree
3295 invert_truthvalue_loc (location_t loc, tree arg)
3296 {
3297 if (TREE_CODE (arg) == ERROR_MARK)
3298 return arg;
3299
3300 tree type = TREE_TYPE (arg);
3301 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3302 ? BIT_NOT_EXPR
3303 : TRUTH_NOT_EXPR,
3304 type, arg);
3305 }
3306
3307 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3308 operands are another bit-wise operation with a common input. If so,
3309 distribute the bit operations to save an operation and possibly two if
3310 constants are involved. For example, convert
3311 (A | B) & (A | C) into A | (B & C)
3312 Further simplification will occur if B and C are constants.
3313
3314 If this optimization cannot be done, 0 will be returned. */
3315
3316 static tree
3317 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3318 tree arg0, tree arg1)
3319 {
3320 tree common;
3321 tree left, right;
3322
3323 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3324 || TREE_CODE (arg0) == code
3325 || (TREE_CODE (arg0) != BIT_AND_EXPR
3326 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3327 return 0;
3328
3329 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3330 {
3331 common = TREE_OPERAND (arg0, 0);
3332 left = TREE_OPERAND (arg0, 1);
3333 right = TREE_OPERAND (arg1, 1);
3334 }
3335 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3336 {
3337 common = TREE_OPERAND (arg0, 0);
3338 left = TREE_OPERAND (arg0, 1);
3339 right = TREE_OPERAND (arg1, 0);
3340 }
3341 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3342 {
3343 common = TREE_OPERAND (arg0, 1);
3344 left = TREE_OPERAND (arg0, 0);
3345 right = TREE_OPERAND (arg1, 1);
3346 }
3347 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3348 {
3349 common = TREE_OPERAND (arg0, 1);
3350 left = TREE_OPERAND (arg0, 0);
3351 right = TREE_OPERAND (arg1, 0);
3352 }
3353 else
3354 return 0;
3355
3356 common = fold_convert_loc (loc, type, common);
3357 left = fold_convert_loc (loc, type, left);
3358 right = fold_convert_loc (loc, type, right);
3359 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3360 fold_build2_loc (loc, code, type, left, right));
3361 }
3362
3363 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3364 with code CODE. This optimization is unsafe. */
3365 static tree
3366 distribute_real_division (location_t loc, enum tree_code code, tree type,
3367 tree arg0, tree arg1)
3368 {
3369 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3370 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3371
3372 /* (A / C) +- (B / C) -> (A +- B) / C. */
3373 if (mul0 == mul1
3374 && operand_equal_p (TREE_OPERAND (arg0, 1),
3375 TREE_OPERAND (arg1, 1), 0))
3376 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3377 fold_build2_loc (loc, code, type,
3378 TREE_OPERAND (arg0, 0),
3379 TREE_OPERAND (arg1, 0)),
3380 TREE_OPERAND (arg0, 1));
3381
3382 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3383 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3384 TREE_OPERAND (arg1, 0), 0)
3385 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3386 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3387 {
3388 REAL_VALUE_TYPE r0, r1;
3389 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3390 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3391 if (!mul0)
3392 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3393 if (!mul1)
3394 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3395 real_arithmetic (&r0, code, &r0, &r1);
3396 return fold_build2_loc (loc, MULT_EXPR, type,
3397 TREE_OPERAND (arg0, 0),
3398 build_real (type, r0));
3399 }
3400
3401 return NULL_TREE;
3402 }
3403 \f
3404 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3405 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3406
3407 static tree
3408 make_bit_field_ref (location_t loc, tree inner, tree type,
3409 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3410 {
3411 tree result, bftype;
3412
3413 if (bitpos == 0)
3414 {
3415 tree size = TYPE_SIZE (TREE_TYPE (inner));
3416 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3417 || POINTER_TYPE_P (TREE_TYPE (inner)))
3418 && tree_fits_shwi_p (size)
3419 && tree_to_shwi (size) == bitsize)
3420 return fold_convert_loc (loc, type, inner);
3421 }
3422
3423 bftype = type;
3424 if (TYPE_PRECISION (bftype) != bitsize
3425 || TYPE_UNSIGNED (bftype) == !unsignedp)
3426 bftype = build_nonstandard_integer_type (bitsize, 0);
3427
3428 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3429 size_int (bitsize), bitsize_int (bitpos));
3430
3431 if (bftype != type)
3432 result = fold_convert_loc (loc, type, result);
3433
3434 return result;
3435 }
3436
3437 /* Optimize a bit-field compare.
3438
3439 There are two cases: First is a compare against a constant and the
3440 second is a comparison of two items where the fields are at the same
3441 bit position relative to the start of a chunk (byte, halfword, word)
3442 large enough to contain it. In these cases we can avoid the shift
3443 implicit in bitfield extractions.
3444
3445 For constants, we emit a compare of the shifted constant with the
3446 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3447 compared. For two fields at the same position, we do the ANDs with the
3448 similar mask and compare the result of the ANDs.
3449
3450 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3451 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3452 are the left and right operands of the comparison, respectively.
3453
3454 If the optimization described above can be done, we return the resulting
3455 tree. Otherwise we return zero. */
3456
3457 static tree
3458 optimize_bit_field_compare (location_t loc, enum tree_code code,
3459 tree compare_type, tree lhs, tree rhs)
3460 {
3461 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3462 tree type = TREE_TYPE (lhs);
3463 tree signed_type, unsigned_type;
3464 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3465 enum machine_mode lmode, rmode, nmode;
3466 int lunsignedp, runsignedp;
3467 int lvolatilep = 0, rvolatilep = 0;
3468 tree linner, rinner = NULL_TREE;
3469 tree mask;
3470 tree offset;
3471
3472 /* In the strict volatile bitfields case, doing code changes here may prevent
3473 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3474 if (flag_strict_volatile_bitfields > 0)
3475 return 0;
3476
3477 /* Get all the information about the extractions being done. If the bit size
3478 if the same as the size of the underlying object, we aren't doing an
3479 extraction at all and so can do nothing. We also don't want to
3480 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3481 then will no longer be able to replace it. */
3482 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3483 &lunsignedp, &lvolatilep, false);
3484 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3485 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3486 return 0;
3487
3488 if (!const_p)
3489 {
3490 /* If this is not a constant, we can only do something if bit positions,
3491 sizes, and signedness are the same. */
3492 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3493 &runsignedp, &rvolatilep, false);
3494
3495 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3496 || lunsignedp != runsignedp || offset != 0
3497 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3498 return 0;
3499 }
3500
3501 /* See if we can find a mode to refer to this field. We should be able to,
3502 but fail if we can't. */
3503 if (lvolatilep
3504 && GET_MODE_BITSIZE (lmode) > 0
3505 && flag_strict_volatile_bitfields > 0)
3506 nmode = lmode;
3507 else
3508 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3509 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3510 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3511 TYPE_ALIGN (TREE_TYPE (rinner))),
3512 word_mode, lvolatilep || rvolatilep);
3513 if (nmode == VOIDmode)
3514 return 0;
3515
3516 /* Set signed and unsigned types of the precision of this mode for the
3517 shifts below. */
3518 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3519 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3520
3521 /* Compute the bit position and size for the new reference and our offset
3522 within it. If the new reference is the same size as the original, we
3523 won't optimize anything, so return zero. */
3524 nbitsize = GET_MODE_BITSIZE (nmode);
3525 nbitpos = lbitpos & ~ (nbitsize - 1);
3526 lbitpos -= nbitpos;
3527 if (nbitsize == lbitsize)
3528 return 0;
3529
3530 if (BYTES_BIG_ENDIAN)
3531 lbitpos = nbitsize - lbitsize - lbitpos;
3532
3533 /* Make the mask to be used against the extracted field. */
3534 mask = build_int_cst_type (unsigned_type, -1);
3535 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3536 mask = const_binop (RSHIFT_EXPR, mask,
3537 size_int (nbitsize - lbitsize - lbitpos));
3538
3539 if (! const_p)
3540 /* If not comparing with constant, just rework the comparison
3541 and return. */
3542 return fold_build2_loc (loc, code, compare_type,
3543 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3544 make_bit_field_ref (loc, linner,
3545 unsigned_type,
3546 nbitsize, nbitpos,
3547 1),
3548 mask),
3549 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3550 make_bit_field_ref (loc, rinner,
3551 unsigned_type,
3552 nbitsize, nbitpos,
3553 1),
3554 mask));
3555
3556 /* Otherwise, we are handling the constant case. See if the constant is too
3557 big for the field. Warn and return a tree of for 0 (false) if so. We do
3558 this not only for its own sake, but to avoid having to test for this
3559 error case below. If we didn't, we might generate wrong code.
3560
3561 For unsigned fields, the constant shifted right by the field length should
3562 be all zero. For signed fields, the high-order bits should agree with
3563 the sign bit. */
3564
3565 if (lunsignedp)
3566 {
3567 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3568 fold_convert_loc (loc,
3569 unsigned_type, rhs),
3570 size_int (lbitsize))))
3571 {
3572 warning (0, "comparison is always %d due to width of bit-field",
3573 code == NE_EXPR);
3574 return constant_boolean_node (code == NE_EXPR, compare_type);
3575 }
3576 }
3577 else
3578 {
3579 tree tem = const_binop (RSHIFT_EXPR,
3580 fold_convert_loc (loc, signed_type, rhs),
3581 size_int (lbitsize - 1));
3582 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3583 {
3584 warning (0, "comparison is always %d due to width of bit-field",
3585 code == NE_EXPR);
3586 return constant_boolean_node (code == NE_EXPR, compare_type);
3587 }
3588 }
3589
3590 /* Single-bit compares should always be against zero. */
3591 if (lbitsize == 1 && ! integer_zerop (rhs))
3592 {
3593 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3594 rhs = build_int_cst (type, 0);
3595 }
3596
3597 /* Make a new bitfield reference, shift the constant over the
3598 appropriate number of bits and mask it with the computed mask
3599 (in case this was a signed field). If we changed it, make a new one. */
3600 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3601 if (lvolatilep)
3602 {
3603 TREE_SIDE_EFFECTS (lhs) = 1;
3604 TREE_THIS_VOLATILE (lhs) = 1;
3605 }
3606
3607 rhs = const_binop (BIT_AND_EXPR,
3608 const_binop (LSHIFT_EXPR,
3609 fold_convert_loc (loc, unsigned_type, rhs),
3610 size_int (lbitpos)),
3611 mask);
3612
3613 lhs = build2_loc (loc, code, compare_type,
3614 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3615 return lhs;
3616 }
3617 \f
3618 /* Subroutine for fold_truth_andor_1: decode a field reference.
3619
3620 If EXP is a comparison reference, we return the innermost reference.
3621
3622 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3623 set to the starting bit number.
3624
3625 If the innermost field can be completely contained in a mode-sized
3626 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3627
3628 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3629 otherwise it is not changed.
3630
3631 *PUNSIGNEDP is set to the signedness of the field.
3632
3633 *PMASK is set to the mask used. This is either contained in a
3634 BIT_AND_EXPR or derived from the width of the field.
3635
3636 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3637
3638 Return 0 if this is not a component reference or is one that we can't
3639 do anything with. */
3640
3641 static tree
3642 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3643 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3644 int *punsignedp, int *pvolatilep,
3645 tree *pmask, tree *pand_mask)
3646 {
3647 tree outer_type = 0;
3648 tree and_mask = 0;
3649 tree mask, inner, offset;
3650 tree unsigned_type;
3651 unsigned int precision;
3652
3653 /* All the optimizations using this function assume integer fields.
3654 There are problems with FP fields since the type_for_size call
3655 below can fail for, e.g., XFmode. */
3656 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3657 return 0;
3658
3659 /* We are interested in the bare arrangement of bits, so strip everything
3660 that doesn't affect the machine mode. However, record the type of the
3661 outermost expression if it may matter below. */
3662 if (CONVERT_EXPR_P (exp)
3663 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3664 outer_type = TREE_TYPE (exp);
3665 STRIP_NOPS (exp);
3666
3667 if (TREE_CODE (exp) == BIT_AND_EXPR)
3668 {
3669 and_mask = TREE_OPERAND (exp, 1);
3670 exp = TREE_OPERAND (exp, 0);
3671 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3672 if (TREE_CODE (and_mask) != INTEGER_CST)
3673 return 0;
3674 }
3675
3676 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3677 punsignedp, pvolatilep, false);
3678 if ((inner == exp && and_mask == 0)
3679 || *pbitsize < 0 || offset != 0
3680 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3681 return 0;
3682
3683 /* If the number of bits in the reference is the same as the bitsize of
3684 the outer type, then the outer type gives the signedness. Otherwise
3685 (in case of a small bitfield) the signedness is unchanged. */
3686 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3687 *punsignedp = TYPE_UNSIGNED (outer_type);
3688
3689 /* Compute the mask to access the bitfield. */
3690 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3691 precision = TYPE_PRECISION (unsigned_type);
3692
3693 mask = build_int_cst_type (unsigned_type, -1);
3694
3695 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3696 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3697
3698 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3699 if (and_mask != 0)
3700 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3701 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3702
3703 *pmask = mask;
3704 *pand_mask = and_mask;
3705 return inner;
3706 }
3707
3708 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3709 bit positions and MASK is SIGNED. */
3710
3711 static int
3712 all_ones_mask_p (const_tree mask, unsigned int size)
3713 {
3714 tree type = TREE_TYPE (mask);
3715 unsigned int precision = TYPE_PRECISION (type);
3716
3717 /* If this function returns true when the type of the mask is
3718 UNSIGNED, then there will be errors. In particular see
3719 gcc.c-torture/execute/990326-1.c. There does not appear to be
3720 any documentation paper trail as to why this is so. But the pre
3721 wide-int worked with that restriction and it has been preserved
3722 here. */
3723 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3724 return false;
3725
3726 return wi::mask (size, false, precision) == mask;
3727 }
3728
3729 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3730 represents the sign bit of EXP's type. If EXP represents a sign
3731 or zero extension, also test VAL against the unextended type.
3732 The return value is the (sub)expression whose sign bit is VAL,
3733 or NULL_TREE otherwise. */
3734
3735 static tree
3736 sign_bit_p (tree exp, const_tree val)
3737 {
3738 int width;
3739 tree t;
3740
3741 /* Tree EXP must have an integral type. */
3742 t = TREE_TYPE (exp);
3743 if (! INTEGRAL_TYPE_P (t))
3744 return NULL_TREE;
3745
3746 /* Tree VAL must be an integer constant. */
3747 if (TREE_CODE (val) != INTEGER_CST
3748 || TREE_OVERFLOW (val))
3749 return NULL_TREE;
3750
3751 width = TYPE_PRECISION (t);
3752 if (wi::only_sign_bit_p (val, width))
3753 return exp;
3754
3755 /* Handle extension from a narrower type. */
3756 if (TREE_CODE (exp) == NOP_EXPR
3757 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3758 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3759
3760 return NULL_TREE;
3761 }
3762
3763 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3764 to be evaluated unconditionally. */
3765
3766 static int
3767 simple_operand_p (const_tree exp)
3768 {
3769 /* Strip any conversions that don't change the machine mode. */
3770 STRIP_NOPS (exp);
3771
3772 return (CONSTANT_CLASS_P (exp)
3773 || TREE_CODE (exp) == SSA_NAME
3774 || (DECL_P (exp)
3775 && ! TREE_ADDRESSABLE (exp)
3776 && ! TREE_THIS_VOLATILE (exp)
3777 && ! DECL_NONLOCAL (exp)
3778 /* Don't regard global variables as simple. They may be
3779 allocated in ways unknown to the compiler (shared memory,
3780 #pragma weak, etc). */
3781 && ! TREE_PUBLIC (exp)
3782 && ! DECL_EXTERNAL (exp)
3783 /* Weakrefs are not safe to be read, since they can be NULL.
3784 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3785 have DECL_WEAK flag set. */
3786 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3787 /* Loading a static variable is unduly expensive, but global
3788 registers aren't expensive. */
3789 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3790 }
3791
3792 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3793 to be evaluated unconditionally.
3794 I addition to simple_operand_p, we assume that comparisons, conversions,
3795 and logic-not operations are simple, if their operands are simple, too. */
3796
3797 static bool
3798 simple_operand_p_2 (tree exp)
3799 {
3800 enum tree_code code;
3801
3802 if (TREE_SIDE_EFFECTS (exp)
3803 || tree_could_trap_p (exp))
3804 return false;
3805
3806 while (CONVERT_EXPR_P (exp))
3807 exp = TREE_OPERAND (exp, 0);
3808
3809 code = TREE_CODE (exp);
3810
3811 if (TREE_CODE_CLASS (code) == tcc_comparison)
3812 return (simple_operand_p (TREE_OPERAND (exp, 0))
3813 && simple_operand_p (TREE_OPERAND (exp, 1)));
3814
3815 if (code == TRUTH_NOT_EXPR)
3816 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3817
3818 return simple_operand_p (exp);
3819 }
3820
3821 \f
3822 /* The following functions are subroutines to fold_range_test and allow it to
3823 try to change a logical combination of comparisons into a range test.
3824
3825 For example, both
3826 X == 2 || X == 3 || X == 4 || X == 5
3827 and
3828 X >= 2 && X <= 5
3829 are converted to
3830 (unsigned) (X - 2) <= 3
3831
3832 We describe each set of comparisons as being either inside or outside
3833 a range, using a variable named like IN_P, and then describe the
3834 range with a lower and upper bound. If one of the bounds is omitted,
3835 it represents either the highest or lowest value of the type.
3836
3837 In the comments below, we represent a range by two numbers in brackets
3838 preceded by a "+" to designate being inside that range, or a "-" to
3839 designate being outside that range, so the condition can be inverted by
3840 flipping the prefix. An omitted bound is represented by a "-". For
3841 example, "- [-, 10]" means being outside the range starting at the lowest
3842 possible value and ending at 10, in other words, being greater than 10.
3843 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3844 always false.
3845
3846 We set up things so that the missing bounds are handled in a consistent
3847 manner so neither a missing bound nor "true" and "false" need to be
3848 handled using a special case. */
3849
3850 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3851 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3852 and UPPER1_P are nonzero if the respective argument is an upper bound
3853 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3854 must be specified for a comparison. ARG1 will be converted to ARG0's
3855 type if both are specified. */
3856
3857 static tree
3858 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3859 tree arg1, int upper1_p)
3860 {
3861 tree tem;
3862 int result;
3863 int sgn0, sgn1;
3864
3865 /* If neither arg represents infinity, do the normal operation.
3866 Else, if not a comparison, return infinity. Else handle the special
3867 comparison rules. Note that most of the cases below won't occur, but
3868 are handled for consistency. */
3869
3870 if (arg0 != 0 && arg1 != 0)
3871 {
3872 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3873 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3874 STRIP_NOPS (tem);
3875 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3876 }
3877
3878 if (TREE_CODE_CLASS (code) != tcc_comparison)
3879 return 0;
3880
3881 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3882 for neither. In real maths, we cannot assume open ended ranges are
3883 the same. But, this is computer arithmetic, where numbers are finite.
3884 We can therefore make the transformation of any unbounded range with
3885 the value Z, Z being greater than any representable number. This permits
3886 us to treat unbounded ranges as equal. */
3887 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3888 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3889 switch (code)
3890 {
3891 case EQ_EXPR:
3892 result = sgn0 == sgn1;
3893 break;
3894 case NE_EXPR:
3895 result = sgn0 != sgn1;
3896 break;
3897 case LT_EXPR:
3898 result = sgn0 < sgn1;
3899 break;
3900 case LE_EXPR:
3901 result = sgn0 <= sgn1;
3902 break;
3903 case GT_EXPR:
3904 result = sgn0 > sgn1;
3905 break;
3906 case GE_EXPR:
3907 result = sgn0 >= sgn1;
3908 break;
3909 default:
3910 gcc_unreachable ();
3911 }
3912
3913 return constant_boolean_node (result, type);
3914 }
3915 \f
3916 /* Helper routine for make_range. Perform one step for it, return
3917 new expression if the loop should continue or NULL_TREE if it should
3918 stop. */
3919
3920 tree
3921 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3922 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3923 bool *strict_overflow_p)
3924 {
3925 tree arg0_type = TREE_TYPE (arg0);
3926 tree n_low, n_high, low = *p_low, high = *p_high;
3927 int in_p = *p_in_p, n_in_p;
3928
3929 switch (code)
3930 {
3931 case TRUTH_NOT_EXPR:
3932 /* We can only do something if the range is testing for zero. */
3933 if (low == NULL_TREE || high == NULL_TREE
3934 || ! integer_zerop (low) || ! integer_zerop (high))
3935 return NULL_TREE;
3936 *p_in_p = ! in_p;
3937 return arg0;
3938
3939 case EQ_EXPR: case NE_EXPR:
3940 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3941 /* We can only do something if the range is testing for zero
3942 and if the second operand is an integer constant. Note that
3943 saying something is "in" the range we make is done by
3944 complementing IN_P since it will set in the initial case of
3945 being not equal to zero; "out" is leaving it alone. */
3946 if (low == NULL_TREE || high == NULL_TREE
3947 || ! integer_zerop (low) || ! integer_zerop (high)
3948 || TREE_CODE (arg1) != INTEGER_CST)
3949 return NULL_TREE;
3950
3951 switch (code)
3952 {
3953 case NE_EXPR: /* - [c, c] */
3954 low = high = arg1;
3955 break;
3956 case EQ_EXPR: /* + [c, c] */
3957 in_p = ! in_p, low = high = arg1;
3958 break;
3959 case GT_EXPR: /* - [-, c] */
3960 low = 0, high = arg1;
3961 break;
3962 case GE_EXPR: /* + [c, -] */
3963 in_p = ! in_p, low = arg1, high = 0;
3964 break;
3965 case LT_EXPR: /* - [c, -] */
3966 low = arg1, high = 0;
3967 break;
3968 case LE_EXPR: /* + [-, c] */
3969 in_p = ! in_p, low = 0, high = arg1;
3970 break;
3971 default:
3972 gcc_unreachable ();
3973 }
3974
3975 /* If this is an unsigned comparison, we also know that EXP is
3976 greater than or equal to zero. We base the range tests we make
3977 on that fact, so we record it here so we can parse existing
3978 range tests. We test arg0_type since often the return type
3979 of, e.g. EQ_EXPR, is boolean. */
3980 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3981 {
3982 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3983 in_p, low, high, 1,
3984 build_int_cst (arg0_type, 0),
3985 NULL_TREE))
3986 return NULL_TREE;
3987
3988 in_p = n_in_p, low = n_low, high = n_high;
3989
3990 /* If the high bound is missing, but we have a nonzero low
3991 bound, reverse the range so it goes from zero to the low bound
3992 minus 1. */
3993 if (high == 0 && low && ! integer_zerop (low))
3994 {
3995 in_p = ! in_p;
3996 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3997 build_int_cst (TREE_TYPE (low), 1), 0);
3998 low = build_int_cst (arg0_type, 0);
3999 }
4000 }
4001
4002 *p_low = low;
4003 *p_high = high;
4004 *p_in_p = in_p;
4005 return arg0;
4006
4007 case NEGATE_EXPR:
4008 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4009 low and high are non-NULL, then normalize will DTRT. */
4010 if (!TYPE_UNSIGNED (arg0_type)
4011 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4012 {
4013 if (low == NULL_TREE)
4014 low = TYPE_MIN_VALUE (arg0_type);
4015 if (high == NULL_TREE)
4016 high = TYPE_MAX_VALUE (arg0_type);
4017 }
4018
4019 /* (-x) IN [a,b] -> x in [-b, -a] */
4020 n_low = range_binop (MINUS_EXPR, exp_type,
4021 build_int_cst (exp_type, 0),
4022 0, high, 1);
4023 n_high = range_binop (MINUS_EXPR, exp_type,
4024 build_int_cst (exp_type, 0),
4025 0, low, 0);
4026 if (n_high != 0 && TREE_OVERFLOW (n_high))
4027 return NULL_TREE;
4028 goto normalize;
4029
4030 case BIT_NOT_EXPR:
4031 /* ~ X -> -X - 1 */
4032 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4033 build_int_cst (exp_type, 1));
4034
4035 case PLUS_EXPR:
4036 case MINUS_EXPR:
4037 if (TREE_CODE (arg1) != INTEGER_CST)
4038 return NULL_TREE;
4039
4040 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4041 move a constant to the other side. */
4042 if (!TYPE_UNSIGNED (arg0_type)
4043 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4044 return NULL_TREE;
4045
4046 /* If EXP is signed, any overflow in the computation is undefined,
4047 so we don't worry about it so long as our computations on
4048 the bounds don't overflow. For unsigned, overflow is defined
4049 and this is exactly the right thing. */
4050 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4051 arg0_type, low, 0, arg1, 0);
4052 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4053 arg0_type, high, 1, arg1, 0);
4054 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4055 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4056 return NULL_TREE;
4057
4058 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4059 *strict_overflow_p = true;
4060
4061 normalize:
4062 /* Check for an unsigned range which has wrapped around the maximum
4063 value thus making n_high < n_low, and normalize it. */
4064 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4065 {
4066 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4067 build_int_cst (TREE_TYPE (n_high), 1), 0);
4068 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4069 build_int_cst (TREE_TYPE (n_low), 1), 0);
4070
4071 /* If the range is of the form +/- [ x+1, x ], we won't
4072 be able to normalize it. But then, it represents the
4073 whole range or the empty set, so make it
4074 +/- [ -, - ]. */
4075 if (tree_int_cst_equal (n_low, low)
4076 && tree_int_cst_equal (n_high, high))
4077 low = high = 0;
4078 else
4079 in_p = ! in_p;
4080 }
4081 else
4082 low = n_low, high = n_high;
4083
4084 *p_low = low;
4085 *p_high = high;
4086 *p_in_p = in_p;
4087 return arg0;
4088
4089 CASE_CONVERT:
4090 case NON_LVALUE_EXPR:
4091 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4092 return NULL_TREE;
4093
4094 if (! INTEGRAL_TYPE_P (arg0_type)
4095 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4096 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4097 return NULL_TREE;
4098
4099 n_low = low, n_high = high;
4100
4101 if (n_low != 0)
4102 n_low = fold_convert_loc (loc, arg0_type, n_low);
4103
4104 if (n_high != 0)
4105 n_high = fold_convert_loc (loc, arg0_type, n_high);
4106
4107 /* If we're converting arg0 from an unsigned type, to exp,
4108 a signed type, we will be doing the comparison as unsigned.
4109 The tests above have already verified that LOW and HIGH
4110 are both positive.
4111
4112 So we have to ensure that we will handle large unsigned
4113 values the same way that the current signed bounds treat
4114 negative values. */
4115
4116 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4117 {
4118 tree high_positive;
4119 tree equiv_type;
4120 /* For fixed-point modes, we need to pass the saturating flag
4121 as the 2nd parameter. */
4122 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4123 equiv_type
4124 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4125 TYPE_SATURATING (arg0_type));
4126 else
4127 equiv_type
4128 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4129
4130 /* A range without an upper bound is, naturally, unbounded.
4131 Since convert would have cropped a very large value, use
4132 the max value for the destination type. */
4133 high_positive
4134 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4135 : TYPE_MAX_VALUE (arg0_type);
4136
4137 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4138 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4139 fold_convert_loc (loc, arg0_type,
4140 high_positive),
4141 build_int_cst (arg0_type, 1));
4142
4143 /* If the low bound is specified, "and" the range with the
4144 range for which the original unsigned value will be
4145 positive. */
4146 if (low != 0)
4147 {
4148 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4149 1, fold_convert_loc (loc, arg0_type,
4150 integer_zero_node),
4151 high_positive))
4152 return NULL_TREE;
4153
4154 in_p = (n_in_p == in_p);
4155 }
4156 else
4157 {
4158 /* Otherwise, "or" the range with the range of the input
4159 that will be interpreted as negative. */
4160 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4161 1, fold_convert_loc (loc, arg0_type,
4162 integer_zero_node),
4163 high_positive))
4164 return NULL_TREE;
4165
4166 in_p = (in_p != n_in_p);
4167 }
4168 }
4169
4170 *p_low = n_low;
4171 *p_high = n_high;
4172 *p_in_p = in_p;
4173 return arg0;
4174
4175 default:
4176 return NULL_TREE;
4177 }
4178 }
4179
4180 /* Given EXP, a logical expression, set the range it is testing into
4181 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4182 actually being tested. *PLOW and *PHIGH will be made of the same
4183 type as the returned expression. If EXP is not a comparison, we
4184 will most likely not be returning a useful value and range. Set
4185 *STRICT_OVERFLOW_P to true if the return value is only valid
4186 because signed overflow is undefined; otherwise, do not change
4187 *STRICT_OVERFLOW_P. */
4188
4189 tree
4190 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4191 bool *strict_overflow_p)
4192 {
4193 enum tree_code code;
4194 tree arg0, arg1 = NULL_TREE;
4195 tree exp_type, nexp;
4196 int in_p;
4197 tree low, high;
4198 location_t loc = EXPR_LOCATION (exp);
4199
4200 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4201 and see if we can refine the range. Some of the cases below may not
4202 happen, but it doesn't seem worth worrying about this. We "continue"
4203 the outer loop when we've changed something; otherwise we "break"
4204 the switch, which will "break" the while. */
4205
4206 in_p = 0;
4207 low = high = build_int_cst (TREE_TYPE (exp), 0);
4208
4209 while (1)
4210 {
4211 code = TREE_CODE (exp);
4212 exp_type = TREE_TYPE (exp);
4213 arg0 = NULL_TREE;
4214
4215 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4216 {
4217 if (TREE_OPERAND_LENGTH (exp) > 0)
4218 arg0 = TREE_OPERAND (exp, 0);
4219 if (TREE_CODE_CLASS (code) == tcc_binary
4220 || TREE_CODE_CLASS (code) == tcc_comparison
4221 || (TREE_CODE_CLASS (code) == tcc_expression
4222 && TREE_OPERAND_LENGTH (exp) > 1))
4223 arg1 = TREE_OPERAND (exp, 1);
4224 }
4225 if (arg0 == NULL_TREE)
4226 break;
4227
4228 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4229 &high, &in_p, strict_overflow_p);
4230 if (nexp == NULL_TREE)
4231 break;
4232 exp = nexp;
4233 }
4234
4235 /* If EXP is a constant, we can evaluate whether this is true or false. */
4236 if (TREE_CODE (exp) == INTEGER_CST)
4237 {
4238 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4239 exp, 0, low, 0))
4240 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4241 exp, 1, high, 1)));
4242 low = high = 0;
4243 exp = 0;
4244 }
4245
4246 *pin_p = in_p, *plow = low, *phigh = high;
4247 return exp;
4248 }
4249 \f
4250 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4251 type, TYPE, return an expression to test if EXP is in (or out of, depending
4252 on IN_P) the range. Return 0 if the test couldn't be created. */
4253
4254 tree
4255 build_range_check (location_t loc, tree type, tree exp, int in_p,
4256 tree low, tree high)
4257 {
4258 tree etype = TREE_TYPE (exp), value;
4259
4260 #ifdef HAVE_canonicalize_funcptr_for_compare
4261 /* Disable this optimization for function pointer expressions
4262 on targets that require function pointer canonicalization. */
4263 if (HAVE_canonicalize_funcptr_for_compare
4264 && TREE_CODE (etype) == POINTER_TYPE
4265 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4266 return NULL_TREE;
4267 #endif
4268
4269 if (! in_p)
4270 {
4271 value = build_range_check (loc, type, exp, 1, low, high);
4272 if (value != 0)
4273 return invert_truthvalue_loc (loc, value);
4274
4275 return 0;
4276 }
4277
4278 if (low == 0 && high == 0)
4279 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4280
4281 if (low == 0)
4282 return fold_build2_loc (loc, LE_EXPR, type, exp,
4283 fold_convert_loc (loc, etype, high));
4284
4285 if (high == 0)
4286 return fold_build2_loc (loc, GE_EXPR, type, exp,
4287 fold_convert_loc (loc, etype, low));
4288
4289 if (operand_equal_p (low, high, 0))
4290 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4291 fold_convert_loc (loc, etype, low));
4292
4293 if (integer_zerop (low))
4294 {
4295 if (! TYPE_UNSIGNED (etype))
4296 {
4297 etype = unsigned_type_for (etype);
4298 high = fold_convert_loc (loc, etype, high);
4299 exp = fold_convert_loc (loc, etype, exp);
4300 }
4301 return build_range_check (loc, type, exp, 1, 0, high);
4302 }
4303
4304 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4305 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4306 {
4307 int prec = TYPE_PRECISION (etype);
4308 wide_int osb = wi::set_bit_in_zero (prec - 1, prec) - 1;
4309
4310 if (osb == high)
4311 {
4312 if (TYPE_UNSIGNED (etype))
4313 {
4314 tree signed_etype = signed_type_for (etype);
4315 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4316 etype
4317 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4318 else
4319 etype = signed_etype;
4320 exp = fold_convert_loc (loc, etype, exp);
4321 }
4322 return fold_build2_loc (loc, GT_EXPR, type, exp,
4323 build_int_cst (etype, 0));
4324 }
4325 }
4326
4327 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4328 This requires wrap-around arithmetics for the type of the expression.
4329 First make sure that arithmetics in this type is valid, then make sure
4330 that it wraps around. */
4331 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4332 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4333 TYPE_UNSIGNED (etype));
4334
4335 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4336 {
4337 tree utype, minv, maxv;
4338
4339 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4340 for the type in question, as we rely on this here. */
4341 utype = unsigned_type_for (etype);
4342 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4343 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4344 build_int_cst (TREE_TYPE (maxv), 1), 1);
4345 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4346
4347 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4348 minv, 1, maxv, 1)))
4349 etype = utype;
4350 else
4351 return 0;
4352 }
4353
4354 high = fold_convert_loc (loc, etype, high);
4355 low = fold_convert_loc (loc, etype, low);
4356 exp = fold_convert_loc (loc, etype, exp);
4357
4358 value = const_binop (MINUS_EXPR, high, low);
4359
4360
4361 if (POINTER_TYPE_P (etype))
4362 {
4363 if (value != 0 && !TREE_OVERFLOW (value))
4364 {
4365 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4366 return build_range_check (loc, type,
4367 fold_build_pointer_plus_loc (loc, exp, low),
4368 1, build_int_cst (etype, 0), value);
4369 }
4370 return 0;
4371 }
4372
4373 if (value != 0 && !TREE_OVERFLOW (value))
4374 return build_range_check (loc, type,
4375 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4376 1, build_int_cst (etype, 0), value);
4377
4378 return 0;
4379 }
4380 \f
4381 /* Return the predecessor of VAL in its type, handling the infinite case. */
4382
4383 static tree
4384 range_predecessor (tree val)
4385 {
4386 tree type = TREE_TYPE (val);
4387
4388 if (INTEGRAL_TYPE_P (type)
4389 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4390 return 0;
4391 else
4392 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4393 build_int_cst (TREE_TYPE (val), 1), 0);
4394 }
4395
4396 /* Return the successor of VAL in its type, handling the infinite case. */
4397
4398 static tree
4399 range_successor (tree val)
4400 {
4401 tree type = TREE_TYPE (val);
4402
4403 if (INTEGRAL_TYPE_P (type)
4404 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4405 return 0;
4406 else
4407 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4408 build_int_cst (TREE_TYPE (val), 1), 0);
4409 }
4410
4411 /* Given two ranges, see if we can merge them into one. Return 1 if we
4412 can, 0 if we can't. Set the output range into the specified parameters. */
4413
4414 bool
4415 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4416 tree high0, int in1_p, tree low1, tree high1)
4417 {
4418 int no_overlap;
4419 int subset;
4420 int temp;
4421 tree tem;
4422 int in_p;
4423 tree low, high;
4424 int lowequal = ((low0 == 0 && low1 == 0)
4425 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4426 low0, 0, low1, 0)));
4427 int highequal = ((high0 == 0 && high1 == 0)
4428 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4429 high0, 1, high1, 1)));
4430
4431 /* Make range 0 be the range that starts first, or ends last if they
4432 start at the same value. Swap them if it isn't. */
4433 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4434 low0, 0, low1, 0))
4435 || (lowequal
4436 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4437 high1, 1, high0, 1))))
4438 {
4439 temp = in0_p, in0_p = in1_p, in1_p = temp;
4440 tem = low0, low0 = low1, low1 = tem;
4441 tem = high0, high0 = high1, high1 = tem;
4442 }
4443
4444 /* Now flag two cases, whether the ranges are disjoint or whether the
4445 second range is totally subsumed in the first. Note that the tests
4446 below are simplified by the ones above. */
4447 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4448 high0, 1, low1, 0));
4449 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4450 high1, 1, high0, 1));
4451
4452 /* We now have four cases, depending on whether we are including or
4453 excluding the two ranges. */
4454 if (in0_p && in1_p)
4455 {
4456 /* If they don't overlap, the result is false. If the second range
4457 is a subset it is the result. Otherwise, the range is from the start
4458 of the second to the end of the first. */
4459 if (no_overlap)
4460 in_p = 0, low = high = 0;
4461 else if (subset)
4462 in_p = 1, low = low1, high = high1;
4463 else
4464 in_p = 1, low = low1, high = high0;
4465 }
4466
4467 else if (in0_p && ! in1_p)
4468 {
4469 /* If they don't overlap, the result is the first range. If they are
4470 equal, the result is false. If the second range is a subset of the
4471 first, and the ranges begin at the same place, we go from just after
4472 the end of the second range to the end of the first. If the second
4473 range is not a subset of the first, or if it is a subset and both
4474 ranges end at the same place, the range starts at the start of the
4475 first range and ends just before the second range.
4476 Otherwise, we can't describe this as a single range. */
4477 if (no_overlap)
4478 in_p = 1, low = low0, high = high0;
4479 else if (lowequal && highequal)
4480 in_p = 0, low = high = 0;
4481 else if (subset && lowequal)
4482 {
4483 low = range_successor (high1);
4484 high = high0;
4485 in_p = 1;
4486 if (low == 0)
4487 {
4488 /* We are in the weird situation where high0 > high1 but
4489 high1 has no successor. Punt. */
4490 return 0;
4491 }
4492 }
4493 else if (! subset || highequal)
4494 {
4495 low = low0;
4496 high = range_predecessor (low1);
4497 in_p = 1;
4498 if (high == 0)
4499 {
4500 /* low0 < low1 but low1 has no predecessor. Punt. */
4501 return 0;
4502 }
4503 }
4504 else
4505 return 0;
4506 }
4507
4508 else if (! in0_p && in1_p)
4509 {
4510 /* If they don't overlap, the result is the second range. If the second
4511 is a subset of the first, the result is false. Otherwise,
4512 the range starts just after the first range and ends at the
4513 end of the second. */
4514 if (no_overlap)
4515 in_p = 1, low = low1, high = high1;
4516 else if (subset || highequal)
4517 in_p = 0, low = high = 0;
4518 else
4519 {
4520 low = range_successor (high0);
4521 high = high1;
4522 in_p = 1;
4523 if (low == 0)
4524 {
4525 /* high1 > high0 but high0 has no successor. Punt. */
4526 return 0;
4527 }
4528 }
4529 }
4530
4531 else
4532 {
4533 /* The case where we are excluding both ranges. Here the complex case
4534 is if they don't overlap. In that case, the only time we have a
4535 range is if they are adjacent. If the second is a subset of the
4536 first, the result is the first. Otherwise, the range to exclude
4537 starts at the beginning of the first range and ends at the end of the
4538 second. */
4539 if (no_overlap)
4540 {
4541 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4542 range_successor (high0),
4543 1, low1, 0)))
4544 in_p = 0, low = low0, high = high1;
4545 else
4546 {
4547 /* Canonicalize - [min, x] into - [-, x]. */
4548 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4549 switch (TREE_CODE (TREE_TYPE (low0)))
4550 {
4551 case ENUMERAL_TYPE:
4552 if (TYPE_PRECISION (TREE_TYPE (low0))
4553 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4554 break;
4555 /* FALLTHROUGH */
4556 case INTEGER_TYPE:
4557 if (tree_int_cst_equal (low0,
4558 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4559 low0 = 0;
4560 break;
4561 case POINTER_TYPE:
4562 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4563 && integer_zerop (low0))
4564 low0 = 0;
4565 break;
4566 default:
4567 break;
4568 }
4569
4570 /* Canonicalize - [x, max] into - [x, -]. */
4571 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4572 switch (TREE_CODE (TREE_TYPE (high1)))
4573 {
4574 case ENUMERAL_TYPE:
4575 if (TYPE_PRECISION (TREE_TYPE (high1))
4576 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4577 break;
4578 /* FALLTHROUGH */
4579 case INTEGER_TYPE:
4580 if (tree_int_cst_equal (high1,
4581 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4582 high1 = 0;
4583 break;
4584 case POINTER_TYPE:
4585 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4586 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4587 high1, 1,
4588 build_int_cst (TREE_TYPE (high1), 1),
4589 1)))
4590 high1 = 0;
4591 break;
4592 default:
4593 break;
4594 }
4595
4596 /* The ranges might be also adjacent between the maximum and
4597 minimum values of the given type. For
4598 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4599 return + [x + 1, y - 1]. */
4600 if (low0 == 0 && high1 == 0)
4601 {
4602 low = range_successor (high0);
4603 high = range_predecessor (low1);
4604 if (low == 0 || high == 0)
4605 return 0;
4606
4607 in_p = 1;
4608 }
4609 else
4610 return 0;
4611 }
4612 }
4613 else if (subset)
4614 in_p = 0, low = low0, high = high0;
4615 else
4616 in_p = 0, low = low0, high = high1;
4617 }
4618
4619 *pin_p = in_p, *plow = low, *phigh = high;
4620 return 1;
4621 }
4622 \f
4623
4624 /* Subroutine of fold, looking inside expressions of the form
4625 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4626 of the COND_EXPR. This function is being used also to optimize
4627 A op B ? C : A, by reversing the comparison first.
4628
4629 Return a folded expression whose code is not a COND_EXPR
4630 anymore, or NULL_TREE if no folding opportunity is found. */
4631
4632 static tree
4633 fold_cond_expr_with_comparison (location_t loc, tree type,
4634 tree arg0, tree arg1, tree arg2)
4635 {
4636 enum tree_code comp_code = TREE_CODE (arg0);
4637 tree arg00 = TREE_OPERAND (arg0, 0);
4638 tree arg01 = TREE_OPERAND (arg0, 1);
4639 tree arg1_type = TREE_TYPE (arg1);
4640 tree tem;
4641
4642 STRIP_NOPS (arg1);
4643 STRIP_NOPS (arg2);
4644
4645 /* If we have A op 0 ? A : -A, consider applying the following
4646 transformations:
4647
4648 A == 0? A : -A same as -A
4649 A != 0? A : -A same as A
4650 A >= 0? A : -A same as abs (A)
4651 A > 0? A : -A same as abs (A)
4652 A <= 0? A : -A same as -abs (A)
4653 A < 0? A : -A same as -abs (A)
4654
4655 None of these transformations work for modes with signed
4656 zeros. If A is +/-0, the first two transformations will
4657 change the sign of the result (from +0 to -0, or vice
4658 versa). The last four will fix the sign of the result,
4659 even though the original expressions could be positive or
4660 negative, depending on the sign of A.
4661
4662 Note that all these transformations are correct if A is
4663 NaN, since the two alternatives (A and -A) are also NaNs. */
4664 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4665 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4666 ? real_zerop (arg01)
4667 : integer_zerop (arg01))
4668 && ((TREE_CODE (arg2) == NEGATE_EXPR
4669 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4670 /* In the case that A is of the form X-Y, '-A' (arg2) may
4671 have already been folded to Y-X, check for that. */
4672 || (TREE_CODE (arg1) == MINUS_EXPR
4673 && TREE_CODE (arg2) == MINUS_EXPR
4674 && operand_equal_p (TREE_OPERAND (arg1, 0),
4675 TREE_OPERAND (arg2, 1), 0)
4676 && operand_equal_p (TREE_OPERAND (arg1, 1),
4677 TREE_OPERAND (arg2, 0), 0))))
4678 switch (comp_code)
4679 {
4680 case EQ_EXPR:
4681 case UNEQ_EXPR:
4682 tem = fold_convert_loc (loc, arg1_type, arg1);
4683 return pedantic_non_lvalue_loc (loc,
4684 fold_convert_loc (loc, type,
4685 negate_expr (tem)));
4686 case NE_EXPR:
4687 case LTGT_EXPR:
4688 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4689 case UNGE_EXPR:
4690 case UNGT_EXPR:
4691 if (flag_trapping_math)
4692 break;
4693 /* Fall through. */
4694 case GE_EXPR:
4695 case GT_EXPR:
4696 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4697 arg1 = fold_convert_loc (loc, signed_type_for
4698 (TREE_TYPE (arg1)), arg1);
4699 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4700 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4701 case UNLE_EXPR:
4702 case UNLT_EXPR:
4703 if (flag_trapping_math)
4704 break;
4705 case LE_EXPR:
4706 case LT_EXPR:
4707 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4708 arg1 = fold_convert_loc (loc, signed_type_for
4709 (TREE_TYPE (arg1)), arg1);
4710 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4711 return negate_expr (fold_convert_loc (loc, type, tem));
4712 default:
4713 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4714 break;
4715 }
4716
4717 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4718 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4719 both transformations are correct when A is NaN: A != 0
4720 is then true, and A == 0 is false. */
4721
4722 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4723 && integer_zerop (arg01) && integer_zerop (arg2))
4724 {
4725 if (comp_code == NE_EXPR)
4726 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4727 else if (comp_code == EQ_EXPR)
4728 return build_zero_cst (type);
4729 }
4730
4731 /* Try some transformations of A op B ? A : B.
4732
4733 A == B? A : B same as B
4734 A != B? A : B same as A
4735 A >= B? A : B same as max (A, B)
4736 A > B? A : B same as max (B, A)
4737 A <= B? A : B same as min (A, B)
4738 A < B? A : B same as min (B, A)
4739
4740 As above, these transformations don't work in the presence
4741 of signed zeros. For example, if A and B are zeros of
4742 opposite sign, the first two transformations will change
4743 the sign of the result. In the last four, the original
4744 expressions give different results for (A=+0, B=-0) and
4745 (A=-0, B=+0), but the transformed expressions do not.
4746
4747 The first two transformations are correct if either A or B
4748 is a NaN. In the first transformation, the condition will
4749 be false, and B will indeed be chosen. In the case of the
4750 second transformation, the condition A != B will be true,
4751 and A will be chosen.
4752
4753 The conversions to max() and min() are not correct if B is
4754 a number and A is not. The conditions in the original
4755 expressions will be false, so all four give B. The min()
4756 and max() versions would give a NaN instead. */
4757 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4758 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4759 /* Avoid these transformations if the COND_EXPR may be used
4760 as an lvalue in the C++ front-end. PR c++/19199. */
4761 && (in_gimple_form
4762 || VECTOR_TYPE_P (type)
4763 || (strcmp (lang_hooks.name, "GNU C++") != 0
4764 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4765 || ! maybe_lvalue_p (arg1)
4766 || ! maybe_lvalue_p (arg2)))
4767 {
4768 tree comp_op0 = arg00;
4769 tree comp_op1 = arg01;
4770 tree comp_type = TREE_TYPE (comp_op0);
4771
4772 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4773 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4774 {
4775 comp_type = type;
4776 comp_op0 = arg1;
4777 comp_op1 = arg2;
4778 }
4779
4780 switch (comp_code)
4781 {
4782 case EQ_EXPR:
4783 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4784 case NE_EXPR:
4785 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4786 case LE_EXPR:
4787 case LT_EXPR:
4788 case UNLE_EXPR:
4789 case UNLT_EXPR:
4790 /* In C++ a ?: expression can be an lvalue, so put the
4791 operand which will be used if they are equal first
4792 so that we can convert this back to the
4793 corresponding COND_EXPR. */
4794 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4795 {
4796 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4797 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4798 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4799 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4800 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4801 comp_op1, comp_op0);
4802 return pedantic_non_lvalue_loc (loc,
4803 fold_convert_loc (loc, type, tem));
4804 }
4805 break;
4806 case GE_EXPR:
4807 case GT_EXPR:
4808 case UNGE_EXPR:
4809 case UNGT_EXPR:
4810 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4811 {
4812 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4813 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4814 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4815 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4816 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4817 comp_op1, comp_op0);
4818 return pedantic_non_lvalue_loc (loc,
4819 fold_convert_loc (loc, type, tem));
4820 }
4821 break;
4822 case UNEQ_EXPR:
4823 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4824 return pedantic_non_lvalue_loc (loc,
4825 fold_convert_loc (loc, type, arg2));
4826 break;
4827 case LTGT_EXPR:
4828 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4829 return pedantic_non_lvalue_loc (loc,
4830 fold_convert_loc (loc, type, arg1));
4831 break;
4832 default:
4833 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4834 break;
4835 }
4836 }
4837
4838 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4839 we might still be able to simplify this. For example,
4840 if C1 is one less or one more than C2, this might have started
4841 out as a MIN or MAX and been transformed by this function.
4842 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4843
4844 if (INTEGRAL_TYPE_P (type)
4845 && TREE_CODE (arg01) == INTEGER_CST
4846 && TREE_CODE (arg2) == INTEGER_CST)
4847 switch (comp_code)
4848 {
4849 case EQ_EXPR:
4850 if (TREE_CODE (arg1) == INTEGER_CST)
4851 break;
4852 /* We can replace A with C1 in this case. */
4853 arg1 = fold_convert_loc (loc, type, arg01);
4854 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4855
4856 case LT_EXPR:
4857 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4858 MIN_EXPR, to preserve the signedness of the comparison. */
4859 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4860 OEP_ONLY_CONST)
4861 && operand_equal_p (arg01,
4862 const_binop (PLUS_EXPR, arg2,
4863 build_int_cst (type, 1)),
4864 OEP_ONLY_CONST))
4865 {
4866 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4867 fold_convert_loc (loc, TREE_TYPE (arg00),
4868 arg2));
4869 return pedantic_non_lvalue_loc (loc,
4870 fold_convert_loc (loc, type, tem));
4871 }
4872 break;
4873
4874 case LE_EXPR:
4875 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4876 as above. */
4877 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4878 OEP_ONLY_CONST)
4879 && operand_equal_p (arg01,
4880 const_binop (MINUS_EXPR, arg2,
4881 build_int_cst (type, 1)),
4882 OEP_ONLY_CONST))
4883 {
4884 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4885 fold_convert_loc (loc, TREE_TYPE (arg00),
4886 arg2));
4887 return pedantic_non_lvalue_loc (loc,
4888 fold_convert_loc (loc, type, tem));
4889 }
4890 break;
4891
4892 case GT_EXPR:
4893 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4894 MAX_EXPR, to preserve the signedness of the comparison. */
4895 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4896 OEP_ONLY_CONST)
4897 && operand_equal_p (arg01,
4898 const_binop (MINUS_EXPR, arg2,
4899 build_int_cst (type, 1)),
4900 OEP_ONLY_CONST))
4901 {
4902 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4903 fold_convert_loc (loc, TREE_TYPE (arg00),
4904 arg2));
4905 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4906 }
4907 break;
4908
4909 case GE_EXPR:
4910 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4911 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4912 OEP_ONLY_CONST)
4913 && operand_equal_p (arg01,
4914 const_binop (PLUS_EXPR, arg2,
4915 build_int_cst (type, 1)),
4916 OEP_ONLY_CONST))
4917 {
4918 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4919 fold_convert_loc (loc, TREE_TYPE (arg00),
4920 arg2));
4921 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4922 }
4923 break;
4924 case NE_EXPR:
4925 break;
4926 default:
4927 gcc_unreachable ();
4928 }
4929
4930 return NULL_TREE;
4931 }
4932
4933
4934 \f
4935 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4936 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4937 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4938 false) >= 2)
4939 #endif
4940
4941 /* EXP is some logical combination of boolean tests. See if we can
4942 merge it into some range test. Return the new tree if so. */
4943
4944 static tree
4945 fold_range_test (location_t loc, enum tree_code code, tree type,
4946 tree op0, tree op1)
4947 {
4948 int or_op = (code == TRUTH_ORIF_EXPR
4949 || code == TRUTH_OR_EXPR);
4950 int in0_p, in1_p, in_p;
4951 tree low0, low1, low, high0, high1, high;
4952 bool strict_overflow_p = false;
4953 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4954 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4955 tree tem;
4956 const char * const warnmsg = G_("assuming signed overflow does not occur "
4957 "when simplifying range test");
4958
4959 /* If this is an OR operation, invert both sides; we will invert
4960 again at the end. */
4961 if (or_op)
4962 in0_p = ! in0_p, in1_p = ! in1_p;
4963
4964 /* If both expressions are the same, if we can merge the ranges, and we
4965 can build the range test, return it or it inverted. If one of the
4966 ranges is always true or always false, consider it to be the same
4967 expression as the other. */
4968 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4969 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4970 in1_p, low1, high1)
4971 && 0 != (tem = (build_range_check (loc, type,
4972 lhs != 0 ? lhs
4973 : rhs != 0 ? rhs : integer_zero_node,
4974 in_p, low, high))))
4975 {
4976 if (strict_overflow_p)
4977 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4978 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4979 }
4980
4981 /* On machines where the branch cost is expensive, if this is a
4982 short-circuited branch and the underlying object on both sides
4983 is the same, make a non-short-circuit operation. */
4984 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4985 && lhs != 0 && rhs != 0
4986 && (code == TRUTH_ANDIF_EXPR
4987 || code == TRUTH_ORIF_EXPR)
4988 && operand_equal_p (lhs, rhs, 0))
4989 {
4990 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4991 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4992 which cases we can't do this. */
4993 if (simple_operand_p (lhs))
4994 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4995 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4996 type, op0, op1);
4997
4998 else if (!lang_hooks.decls.global_bindings_p ()
4999 && !CONTAINS_PLACEHOLDER_P (lhs))
5000 {
5001 tree common = save_expr (lhs);
5002
5003 if (0 != (lhs = build_range_check (loc, type, common,
5004 or_op ? ! in0_p : in0_p,
5005 low0, high0))
5006 && (0 != (rhs = build_range_check (loc, type, common,
5007 or_op ? ! in1_p : in1_p,
5008 low1, high1))))
5009 {
5010 if (strict_overflow_p)
5011 fold_overflow_warning (warnmsg,
5012 WARN_STRICT_OVERFLOW_COMPARISON);
5013 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5014 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5015 type, lhs, rhs);
5016 }
5017 }
5018 }
5019
5020 return 0;
5021 }
5022 \f
5023 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5024 bit value. Arrange things so the extra bits will be set to zero if and
5025 only if C is signed-extended to its full width. If MASK is nonzero,
5026 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5027
5028 static tree
5029 unextend (tree c, int p, int unsignedp, tree mask)
5030 {
5031 tree type = TREE_TYPE (c);
5032 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5033 tree temp;
5034
5035 if (p == modesize || unsignedp)
5036 return c;
5037
5038 /* We work by getting just the sign bit into the low-order bit, then
5039 into the high-order bit, then sign-extend. We then XOR that value
5040 with C. */
5041 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5042
5043 /* We must use a signed type in order to get an arithmetic right shift.
5044 However, we must also avoid introducing accidental overflows, so that
5045 a subsequent call to integer_zerop will work. Hence we must
5046 do the type conversion here. At this point, the constant is either
5047 zero or one, and the conversion to a signed type can never overflow.
5048 We could get an overflow if this conversion is done anywhere else. */
5049 if (TYPE_UNSIGNED (type))
5050 temp = fold_convert (signed_type_for (type), temp);
5051
5052 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5053 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5054 if (mask != 0)
5055 temp = const_binop (BIT_AND_EXPR, temp,
5056 fold_convert (TREE_TYPE (c), mask));
5057 /* If necessary, convert the type back to match the type of C. */
5058 if (TYPE_UNSIGNED (type))
5059 temp = fold_convert (type, temp);
5060
5061 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5062 }
5063 \f
5064 /* For an expression that has the form
5065 (A && B) || ~B
5066 or
5067 (A || B) && ~B,
5068 we can drop one of the inner expressions and simplify to
5069 A || ~B
5070 or
5071 A && ~B
5072 LOC is the location of the resulting expression. OP is the inner
5073 logical operation; the left-hand side in the examples above, while CMPOP
5074 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5075 removing a condition that guards another, as in
5076 (A != NULL && A->...) || A == NULL
5077 which we must not transform. If RHS_ONLY is true, only eliminate the
5078 right-most operand of the inner logical operation. */
5079
5080 static tree
5081 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5082 bool rhs_only)
5083 {
5084 tree type = TREE_TYPE (cmpop);
5085 enum tree_code code = TREE_CODE (cmpop);
5086 enum tree_code truthop_code = TREE_CODE (op);
5087 tree lhs = TREE_OPERAND (op, 0);
5088 tree rhs = TREE_OPERAND (op, 1);
5089 tree orig_lhs = lhs, orig_rhs = rhs;
5090 enum tree_code rhs_code = TREE_CODE (rhs);
5091 enum tree_code lhs_code = TREE_CODE (lhs);
5092 enum tree_code inv_code;
5093
5094 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5095 return NULL_TREE;
5096
5097 if (TREE_CODE_CLASS (code) != tcc_comparison)
5098 return NULL_TREE;
5099
5100 if (rhs_code == truthop_code)
5101 {
5102 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5103 if (newrhs != NULL_TREE)
5104 {
5105 rhs = newrhs;
5106 rhs_code = TREE_CODE (rhs);
5107 }
5108 }
5109 if (lhs_code == truthop_code && !rhs_only)
5110 {
5111 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5112 if (newlhs != NULL_TREE)
5113 {
5114 lhs = newlhs;
5115 lhs_code = TREE_CODE (lhs);
5116 }
5117 }
5118
5119 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5120 if (inv_code == rhs_code
5121 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5122 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5123 return lhs;
5124 if (!rhs_only && inv_code == lhs_code
5125 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5126 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5127 return rhs;
5128 if (rhs != orig_rhs || lhs != orig_lhs)
5129 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5130 lhs, rhs);
5131 return NULL_TREE;
5132 }
5133
5134 /* Find ways of folding logical expressions of LHS and RHS:
5135 Try to merge two comparisons to the same innermost item.
5136 Look for range tests like "ch >= '0' && ch <= '9'".
5137 Look for combinations of simple terms on machines with expensive branches
5138 and evaluate the RHS unconditionally.
5139
5140 For example, if we have p->a == 2 && p->b == 4 and we can make an
5141 object large enough to span both A and B, we can do this with a comparison
5142 against the object ANDed with the a mask.
5143
5144 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5145 operations to do this with one comparison.
5146
5147 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5148 function and the one above.
5149
5150 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5151 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5152
5153 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5154 two operands.
5155
5156 We return the simplified tree or 0 if no optimization is possible. */
5157
5158 static tree
5159 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5160 tree lhs, tree rhs)
5161 {
5162 /* If this is the "or" of two comparisons, we can do something if
5163 the comparisons are NE_EXPR. If this is the "and", we can do something
5164 if the comparisons are EQ_EXPR. I.e.,
5165 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5166
5167 WANTED_CODE is this operation code. For single bit fields, we can
5168 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5169 comparison for one-bit fields. */
5170
5171 enum tree_code wanted_code;
5172 enum tree_code lcode, rcode;
5173 tree ll_arg, lr_arg, rl_arg, rr_arg;
5174 tree ll_inner, lr_inner, rl_inner, rr_inner;
5175 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5176 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5177 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5178 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5179 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5180 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5181 enum machine_mode lnmode, rnmode;
5182 tree ll_mask, lr_mask, rl_mask, rr_mask;
5183 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5184 tree l_const, r_const;
5185 tree lntype, rntype, result;
5186 HOST_WIDE_INT first_bit, end_bit;
5187 int volatilep;
5188
5189 /* Start by getting the comparison codes. Fail if anything is volatile.
5190 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5191 it were surrounded with a NE_EXPR. */
5192
5193 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5194 return 0;
5195
5196 lcode = TREE_CODE (lhs);
5197 rcode = TREE_CODE (rhs);
5198
5199 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5200 {
5201 lhs = build2 (NE_EXPR, truth_type, lhs,
5202 build_int_cst (TREE_TYPE (lhs), 0));
5203 lcode = NE_EXPR;
5204 }
5205
5206 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5207 {
5208 rhs = build2 (NE_EXPR, truth_type, rhs,
5209 build_int_cst (TREE_TYPE (rhs), 0));
5210 rcode = NE_EXPR;
5211 }
5212
5213 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5214 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5215 return 0;
5216
5217 ll_arg = TREE_OPERAND (lhs, 0);
5218 lr_arg = TREE_OPERAND (lhs, 1);
5219 rl_arg = TREE_OPERAND (rhs, 0);
5220 rr_arg = TREE_OPERAND (rhs, 1);
5221
5222 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5223 if (simple_operand_p (ll_arg)
5224 && simple_operand_p (lr_arg))
5225 {
5226 if (operand_equal_p (ll_arg, rl_arg, 0)
5227 && operand_equal_p (lr_arg, rr_arg, 0))
5228 {
5229 result = combine_comparisons (loc, code, lcode, rcode,
5230 truth_type, ll_arg, lr_arg);
5231 if (result)
5232 return result;
5233 }
5234 else if (operand_equal_p (ll_arg, rr_arg, 0)
5235 && operand_equal_p (lr_arg, rl_arg, 0))
5236 {
5237 result = combine_comparisons (loc, code, lcode,
5238 swap_tree_comparison (rcode),
5239 truth_type, ll_arg, lr_arg);
5240 if (result)
5241 return result;
5242 }
5243 }
5244
5245 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5246 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5247
5248 /* If the RHS can be evaluated unconditionally and its operands are
5249 simple, it wins to evaluate the RHS unconditionally on machines
5250 with expensive branches. In this case, this isn't a comparison
5251 that can be merged. */
5252
5253 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5254 false) >= 2
5255 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5256 && simple_operand_p (rl_arg)
5257 && simple_operand_p (rr_arg))
5258 {
5259 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5260 if (code == TRUTH_OR_EXPR
5261 && lcode == NE_EXPR && integer_zerop (lr_arg)
5262 && rcode == NE_EXPR && integer_zerop (rr_arg)
5263 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5264 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5265 return build2_loc (loc, NE_EXPR, truth_type,
5266 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5267 ll_arg, rl_arg),
5268 build_int_cst (TREE_TYPE (ll_arg), 0));
5269
5270 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5271 if (code == TRUTH_AND_EXPR
5272 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5273 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5274 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5275 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5276 return build2_loc (loc, EQ_EXPR, truth_type,
5277 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5278 ll_arg, rl_arg),
5279 build_int_cst (TREE_TYPE (ll_arg), 0));
5280 }
5281
5282 /* See if the comparisons can be merged. Then get all the parameters for
5283 each side. */
5284
5285 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5286 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5287 return 0;
5288
5289 volatilep = 0;
5290 ll_inner = decode_field_reference (loc, ll_arg,
5291 &ll_bitsize, &ll_bitpos, &ll_mode,
5292 &ll_unsignedp, &volatilep, &ll_mask,
5293 &ll_and_mask);
5294 lr_inner = decode_field_reference (loc, lr_arg,
5295 &lr_bitsize, &lr_bitpos, &lr_mode,
5296 &lr_unsignedp, &volatilep, &lr_mask,
5297 &lr_and_mask);
5298 rl_inner = decode_field_reference (loc, rl_arg,
5299 &rl_bitsize, &rl_bitpos, &rl_mode,
5300 &rl_unsignedp, &volatilep, &rl_mask,
5301 &rl_and_mask);
5302 rr_inner = decode_field_reference (loc, rr_arg,
5303 &rr_bitsize, &rr_bitpos, &rr_mode,
5304 &rr_unsignedp, &volatilep, &rr_mask,
5305 &rr_and_mask);
5306
5307 /* It must be true that the inner operation on the lhs of each
5308 comparison must be the same if we are to be able to do anything.
5309 Then see if we have constants. If not, the same must be true for
5310 the rhs's. */
5311 if (volatilep || ll_inner == 0 || rl_inner == 0
5312 || ! operand_equal_p (ll_inner, rl_inner, 0))
5313 return 0;
5314
5315 if (TREE_CODE (lr_arg) == INTEGER_CST
5316 && TREE_CODE (rr_arg) == INTEGER_CST)
5317 l_const = lr_arg, r_const = rr_arg;
5318 else if (lr_inner == 0 || rr_inner == 0
5319 || ! operand_equal_p (lr_inner, rr_inner, 0))
5320 return 0;
5321 else
5322 l_const = r_const = 0;
5323
5324 /* If either comparison code is not correct for our logical operation,
5325 fail. However, we can convert a one-bit comparison against zero into
5326 the opposite comparison against that bit being set in the field. */
5327
5328 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5329 if (lcode != wanted_code)
5330 {
5331 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5332 {
5333 /* Make the left operand unsigned, since we are only interested
5334 in the value of one bit. Otherwise we are doing the wrong
5335 thing below. */
5336 ll_unsignedp = 1;
5337 l_const = ll_mask;
5338 }
5339 else
5340 return 0;
5341 }
5342
5343 /* This is analogous to the code for l_const above. */
5344 if (rcode != wanted_code)
5345 {
5346 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5347 {
5348 rl_unsignedp = 1;
5349 r_const = rl_mask;
5350 }
5351 else
5352 return 0;
5353 }
5354
5355 /* See if we can find a mode that contains both fields being compared on
5356 the left. If we can't, fail. Otherwise, update all constants and masks
5357 to be relative to a field of that size. */
5358 first_bit = MIN (ll_bitpos, rl_bitpos);
5359 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5360 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5361 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5362 volatilep);
5363 if (lnmode == VOIDmode)
5364 return 0;
5365
5366 lnbitsize = GET_MODE_BITSIZE (lnmode);
5367 lnbitpos = first_bit & ~ (lnbitsize - 1);
5368 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5369 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5370
5371 if (BYTES_BIG_ENDIAN)
5372 {
5373 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5374 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5375 }
5376
5377 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5378 size_int (xll_bitpos));
5379 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5380 size_int (xrl_bitpos));
5381
5382 if (l_const)
5383 {
5384 l_const = fold_convert_loc (loc, lntype, l_const);
5385 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5386 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5387 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5388 fold_build1_loc (loc, BIT_NOT_EXPR,
5389 lntype, ll_mask))))
5390 {
5391 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5392
5393 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5394 }
5395 }
5396 if (r_const)
5397 {
5398 r_const = fold_convert_loc (loc, lntype, r_const);
5399 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5400 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5401 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5402 fold_build1_loc (loc, BIT_NOT_EXPR,
5403 lntype, rl_mask))))
5404 {
5405 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5406
5407 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5408 }
5409 }
5410
5411 /* If the right sides are not constant, do the same for it. Also,
5412 disallow this optimization if a size or signedness mismatch occurs
5413 between the left and right sides. */
5414 if (l_const == 0)
5415 {
5416 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5417 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5418 /* Make sure the two fields on the right
5419 correspond to the left without being swapped. */
5420 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5421 return 0;
5422
5423 first_bit = MIN (lr_bitpos, rr_bitpos);
5424 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5425 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5426 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5427 volatilep);
5428 if (rnmode == VOIDmode)
5429 return 0;
5430
5431 rnbitsize = GET_MODE_BITSIZE (rnmode);
5432 rnbitpos = first_bit & ~ (rnbitsize - 1);
5433 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5434 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5435
5436 if (BYTES_BIG_ENDIAN)
5437 {
5438 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5439 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5440 }
5441
5442 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5443 rntype, lr_mask),
5444 size_int (xlr_bitpos));
5445 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5446 rntype, rr_mask),
5447 size_int (xrr_bitpos));
5448
5449 /* Make a mask that corresponds to both fields being compared.
5450 Do this for both items being compared. If the operands are the
5451 same size and the bits being compared are in the same position
5452 then we can do this by masking both and comparing the masked
5453 results. */
5454 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5455 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5456 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5457 {
5458 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5459 ll_unsignedp || rl_unsignedp);
5460 if (! all_ones_mask_p (ll_mask, lnbitsize))
5461 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5462
5463 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5464 lr_unsignedp || rr_unsignedp);
5465 if (! all_ones_mask_p (lr_mask, rnbitsize))
5466 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5467
5468 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5469 }
5470
5471 /* There is still another way we can do something: If both pairs of
5472 fields being compared are adjacent, we may be able to make a wider
5473 field containing them both.
5474
5475 Note that we still must mask the lhs/rhs expressions. Furthermore,
5476 the mask must be shifted to account for the shift done by
5477 make_bit_field_ref. */
5478 if ((ll_bitsize + ll_bitpos == rl_bitpos
5479 && lr_bitsize + lr_bitpos == rr_bitpos)
5480 || (ll_bitpos == rl_bitpos + rl_bitsize
5481 && lr_bitpos == rr_bitpos + rr_bitsize))
5482 {
5483 tree type;
5484
5485 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5486 ll_bitsize + rl_bitsize,
5487 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5488 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5489 lr_bitsize + rr_bitsize,
5490 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5491
5492 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5493 size_int (MIN (xll_bitpos, xrl_bitpos)));
5494 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5495 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5496
5497 /* Convert to the smaller type before masking out unwanted bits. */
5498 type = lntype;
5499 if (lntype != rntype)
5500 {
5501 if (lnbitsize > rnbitsize)
5502 {
5503 lhs = fold_convert_loc (loc, rntype, lhs);
5504 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5505 type = rntype;
5506 }
5507 else if (lnbitsize < rnbitsize)
5508 {
5509 rhs = fold_convert_loc (loc, lntype, rhs);
5510 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5511 type = lntype;
5512 }
5513 }
5514
5515 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5516 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5517
5518 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5519 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5520
5521 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5522 }
5523
5524 return 0;
5525 }
5526
5527 /* Handle the case of comparisons with constants. If there is something in
5528 common between the masks, those bits of the constants must be the same.
5529 If not, the condition is always false. Test for this to avoid generating
5530 incorrect code below. */
5531 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5532 if (! integer_zerop (result)
5533 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5534 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5535 {
5536 if (wanted_code == NE_EXPR)
5537 {
5538 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5539 return constant_boolean_node (true, truth_type);
5540 }
5541 else
5542 {
5543 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5544 return constant_boolean_node (false, truth_type);
5545 }
5546 }
5547
5548 /* Construct the expression we will return. First get the component
5549 reference we will make. Unless the mask is all ones the width of
5550 that field, perform the mask operation. Then compare with the
5551 merged constant. */
5552 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5553 ll_unsignedp || rl_unsignedp);
5554
5555 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5556 if (! all_ones_mask_p (ll_mask, lnbitsize))
5557 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5558
5559 return build2_loc (loc, wanted_code, truth_type, result,
5560 const_binop (BIT_IOR_EXPR, l_const, r_const));
5561 }
5562 \f
5563 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5564 constant. */
5565
5566 static tree
5567 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5568 tree op0, tree op1)
5569 {
5570 tree arg0 = op0;
5571 enum tree_code op_code;
5572 tree comp_const;
5573 tree minmax_const;
5574 int consts_equal, consts_lt;
5575 tree inner;
5576
5577 STRIP_SIGN_NOPS (arg0);
5578
5579 op_code = TREE_CODE (arg0);
5580 minmax_const = TREE_OPERAND (arg0, 1);
5581 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5582 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5583 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5584 inner = TREE_OPERAND (arg0, 0);
5585
5586 /* If something does not permit us to optimize, return the original tree. */
5587 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5588 || TREE_CODE (comp_const) != INTEGER_CST
5589 || TREE_OVERFLOW (comp_const)
5590 || TREE_CODE (minmax_const) != INTEGER_CST
5591 || TREE_OVERFLOW (minmax_const))
5592 return NULL_TREE;
5593
5594 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5595 and GT_EXPR, doing the rest with recursive calls using logical
5596 simplifications. */
5597 switch (code)
5598 {
5599 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5600 {
5601 tree tem
5602 = optimize_minmax_comparison (loc,
5603 invert_tree_comparison (code, false),
5604 type, op0, op1);
5605 if (tem)
5606 return invert_truthvalue_loc (loc, tem);
5607 return NULL_TREE;
5608 }
5609
5610 case GE_EXPR:
5611 return
5612 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5613 optimize_minmax_comparison
5614 (loc, EQ_EXPR, type, arg0, comp_const),
5615 optimize_minmax_comparison
5616 (loc, GT_EXPR, type, arg0, comp_const));
5617
5618 case EQ_EXPR:
5619 if (op_code == MAX_EXPR && consts_equal)
5620 /* MAX (X, 0) == 0 -> X <= 0 */
5621 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5622
5623 else if (op_code == MAX_EXPR && consts_lt)
5624 /* MAX (X, 0) == 5 -> X == 5 */
5625 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5626
5627 else if (op_code == MAX_EXPR)
5628 /* MAX (X, 0) == -1 -> false */
5629 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5630
5631 else if (consts_equal)
5632 /* MIN (X, 0) == 0 -> X >= 0 */
5633 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5634
5635 else if (consts_lt)
5636 /* MIN (X, 0) == 5 -> false */
5637 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5638
5639 else
5640 /* MIN (X, 0) == -1 -> X == -1 */
5641 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5642
5643 case GT_EXPR:
5644 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5645 /* MAX (X, 0) > 0 -> X > 0
5646 MAX (X, 0) > 5 -> X > 5 */
5647 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5648
5649 else if (op_code == MAX_EXPR)
5650 /* MAX (X, 0) > -1 -> true */
5651 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5652
5653 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5654 /* MIN (X, 0) > 0 -> false
5655 MIN (X, 0) > 5 -> false */
5656 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5657
5658 else
5659 /* MIN (X, 0) > -1 -> X > -1 */
5660 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5661
5662 default:
5663 return NULL_TREE;
5664 }
5665 }
5666 \f
5667 /* T is an integer expression that is being multiplied, divided, or taken a
5668 modulus (CODE says which and what kind of divide or modulus) by a
5669 constant C. See if we can eliminate that operation by folding it with
5670 other operations already in T. WIDE_TYPE, if non-null, is a type that
5671 should be used for the computation if wider than our type.
5672
5673 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5674 (X * 2) + (Y * 4). We must, however, be assured that either the original
5675 expression would not overflow or that overflow is undefined for the type
5676 in the language in question.
5677
5678 If we return a non-null expression, it is an equivalent form of the
5679 original computation, but need not be in the original type.
5680
5681 We set *STRICT_OVERFLOW_P to true if the return values depends on
5682 signed overflow being undefined. Otherwise we do not change
5683 *STRICT_OVERFLOW_P. */
5684
5685 static tree
5686 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5687 bool *strict_overflow_p)
5688 {
5689 /* To avoid exponential search depth, refuse to allow recursion past
5690 three levels. Beyond that (1) it's highly unlikely that we'll find
5691 something interesting and (2) we've probably processed it before
5692 when we built the inner expression. */
5693
5694 static int depth;
5695 tree ret;
5696
5697 if (depth > 3)
5698 return NULL;
5699
5700 depth++;
5701 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5702 depth--;
5703
5704 return ret;
5705 }
5706
5707 static tree
5708 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5709 bool *strict_overflow_p)
5710 {
5711 tree type = TREE_TYPE (t);
5712 enum tree_code tcode = TREE_CODE (t);
5713 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5714 > GET_MODE_SIZE (TYPE_MODE (type)))
5715 ? wide_type : type);
5716 tree t1, t2;
5717 int same_p = tcode == code;
5718 tree op0 = NULL_TREE, op1 = NULL_TREE;
5719 bool sub_strict_overflow_p;
5720
5721 /* Don't deal with constants of zero here; they confuse the code below. */
5722 if (integer_zerop (c))
5723 return NULL_TREE;
5724
5725 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5726 op0 = TREE_OPERAND (t, 0);
5727
5728 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5729 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5730
5731 /* Note that we need not handle conditional operations here since fold
5732 already handles those cases. So just do arithmetic here. */
5733 switch (tcode)
5734 {
5735 case INTEGER_CST:
5736 /* For a constant, we can always simplify if we are a multiply
5737 or (for divide and modulus) if it is a multiple of our constant. */
5738 if (code == MULT_EXPR
5739 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5740 return const_binop (code, fold_convert (ctype, t),
5741 fold_convert (ctype, c));
5742 break;
5743
5744 CASE_CONVERT: case NON_LVALUE_EXPR:
5745 /* If op0 is an expression ... */
5746 if ((COMPARISON_CLASS_P (op0)
5747 || UNARY_CLASS_P (op0)
5748 || BINARY_CLASS_P (op0)
5749 || VL_EXP_CLASS_P (op0)
5750 || EXPRESSION_CLASS_P (op0))
5751 /* ... and has wrapping overflow, and its type is smaller
5752 than ctype, then we cannot pass through as widening. */
5753 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5754 && (TYPE_PRECISION (ctype)
5755 > TYPE_PRECISION (TREE_TYPE (op0))))
5756 /* ... or this is a truncation (t is narrower than op0),
5757 then we cannot pass through this narrowing. */
5758 || (TYPE_PRECISION (type)
5759 < TYPE_PRECISION (TREE_TYPE (op0)))
5760 /* ... or signedness changes for division or modulus,
5761 then we cannot pass through this conversion. */
5762 || (code != MULT_EXPR
5763 && (TYPE_UNSIGNED (ctype)
5764 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5765 /* ... or has undefined overflow while the converted to
5766 type has not, we cannot do the operation in the inner type
5767 as that would introduce undefined overflow. */
5768 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5769 && !TYPE_OVERFLOW_UNDEFINED (type))))
5770 break;
5771
5772 /* Pass the constant down and see if we can make a simplification. If
5773 we can, replace this expression with the inner simplification for
5774 possible later conversion to our or some other type. */
5775 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5776 && TREE_CODE (t2) == INTEGER_CST
5777 && !TREE_OVERFLOW (t2)
5778 && (0 != (t1 = extract_muldiv (op0, t2, code,
5779 code == MULT_EXPR
5780 ? ctype : NULL_TREE,
5781 strict_overflow_p))))
5782 return t1;
5783 break;
5784
5785 case ABS_EXPR:
5786 /* If widening the type changes it from signed to unsigned, then we
5787 must avoid building ABS_EXPR itself as unsigned. */
5788 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5789 {
5790 tree cstype = (*signed_type_for) (ctype);
5791 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5792 != 0)
5793 {
5794 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5795 return fold_convert (ctype, t1);
5796 }
5797 break;
5798 }
5799 /* If the constant is negative, we cannot simplify this. */
5800 if (tree_int_cst_sgn (c) == -1)
5801 break;
5802 /* FALLTHROUGH */
5803 case NEGATE_EXPR:
5804 /* For division and modulus, type can't be unsigned, as e.g.
5805 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5806 For signed types, even with wrapping overflow, this is fine. */
5807 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5808 break;
5809 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5810 != 0)
5811 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5812 break;
5813
5814 case MIN_EXPR: case MAX_EXPR:
5815 /* If widening the type changes the signedness, then we can't perform
5816 this optimization as that changes the result. */
5817 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5818 break;
5819
5820 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5821 sub_strict_overflow_p = false;
5822 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5823 &sub_strict_overflow_p)) != 0
5824 && (t2 = extract_muldiv (op1, c, code, wide_type,
5825 &sub_strict_overflow_p)) != 0)
5826 {
5827 if (tree_int_cst_sgn (c) < 0)
5828 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5829 if (sub_strict_overflow_p)
5830 *strict_overflow_p = true;
5831 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5832 fold_convert (ctype, t2));
5833 }
5834 break;
5835
5836 case LSHIFT_EXPR: case RSHIFT_EXPR:
5837 /* If the second operand is constant, this is a multiplication
5838 or floor division, by a power of two, so we can treat it that
5839 way unless the multiplier or divisor overflows. Signed
5840 left-shift overflow is implementation-defined rather than
5841 undefined in C90, so do not convert signed left shift into
5842 multiplication. */
5843 if (TREE_CODE (op1) == INTEGER_CST
5844 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5845 /* const_binop may not detect overflow correctly,
5846 so check for it explicitly here. */
5847 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5848 && 0 != (t1 = fold_convert (ctype,
5849 const_binop (LSHIFT_EXPR,
5850 size_one_node,
5851 op1)))
5852 && !TREE_OVERFLOW (t1))
5853 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5854 ? MULT_EXPR : FLOOR_DIV_EXPR,
5855 ctype,
5856 fold_convert (ctype, op0),
5857 t1),
5858 c, code, wide_type, strict_overflow_p);
5859 break;
5860
5861 case PLUS_EXPR: case MINUS_EXPR:
5862 /* See if we can eliminate the operation on both sides. If we can, we
5863 can return a new PLUS or MINUS. If we can't, the only remaining
5864 cases where we can do anything are if the second operand is a
5865 constant. */
5866 sub_strict_overflow_p = false;
5867 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5868 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5869 if (t1 != 0 && t2 != 0
5870 && (code == MULT_EXPR
5871 /* If not multiplication, we can only do this if both operands
5872 are divisible by c. */
5873 || (multiple_of_p (ctype, op0, c)
5874 && multiple_of_p (ctype, op1, c))))
5875 {
5876 if (sub_strict_overflow_p)
5877 *strict_overflow_p = true;
5878 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5879 fold_convert (ctype, t2));
5880 }
5881
5882 /* If this was a subtraction, negate OP1 and set it to be an addition.
5883 This simplifies the logic below. */
5884 if (tcode == MINUS_EXPR)
5885 {
5886 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5887 /* If OP1 was not easily negatable, the constant may be OP0. */
5888 if (TREE_CODE (op0) == INTEGER_CST)
5889 {
5890 tree tem = op0;
5891 op0 = op1;
5892 op1 = tem;
5893 tem = t1;
5894 t1 = t2;
5895 t2 = tem;
5896 }
5897 }
5898
5899 if (TREE_CODE (op1) != INTEGER_CST)
5900 break;
5901
5902 /* If either OP1 or C are negative, this optimization is not safe for
5903 some of the division and remainder types while for others we need
5904 to change the code. */
5905 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5906 {
5907 if (code == CEIL_DIV_EXPR)
5908 code = FLOOR_DIV_EXPR;
5909 else if (code == FLOOR_DIV_EXPR)
5910 code = CEIL_DIV_EXPR;
5911 else if (code != MULT_EXPR
5912 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5913 break;
5914 }
5915
5916 /* If it's a multiply or a division/modulus operation of a multiple
5917 of our constant, do the operation and verify it doesn't overflow. */
5918 if (code == MULT_EXPR
5919 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5920 {
5921 op1 = const_binop (code, fold_convert (ctype, op1),
5922 fold_convert (ctype, c));
5923 /* We allow the constant to overflow with wrapping semantics. */
5924 if (op1 == 0
5925 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5926 break;
5927 }
5928 else
5929 break;
5930
5931 /* If we have an unsigned type, we cannot widen the operation since it
5932 will change the result if the original computation overflowed. */
5933 if (TYPE_UNSIGNED (ctype) && ctype != type)
5934 break;
5935
5936 /* If we were able to eliminate our operation from the first side,
5937 apply our operation to the second side and reform the PLUS. */
5938 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5939 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5940
5941 /* The last case is if we are a multiply. In that case, we can
5942 apply the distributive law to commute the multiply and addition
5943 if the multiplication of the constants doesn't overflow
5944 and overflow is defined. With undefined overflow
5945 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5946 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5947 return fold_build2 (tcode, ctype,
5948 fold_build2 (code, ctype,
5949 fold_convert (ctype, op0),
5950 fold_convert (ctype, c)),
5951 op1);
5952
5953 break;
5954
5955 case MULT_EXPR:
5956 /* We have a special case here if we are doing something like
5957 (C * 8) % 4 since we know that's zero. */
5958 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5959 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5960 /* If the multiplication can overflow we cannot optimize this. */
5961 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5962 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5963 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5964 {
5965 *strict_overflow_p = true;
5966 return omit_one_operand (type, integer_zero_node, op0);
5967 }
5968
5969 /* ... fall through ... */
5970
5971 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5972 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5973 /* If we can extract our operation from the LHS, do so and return a
5974 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5975 do something only if the second operand is a constant. */
5976 if (same_p
5977 && (t1 = extract_muldiv (op0, c, code, wide_type,
5978 strict_overflow_p)) != 0)
5979 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5980 fold_convert (ctype, op1));
5981 else if (tcode == MULT_EXPR && code == MULT_EXPR
5982 && (t1 = extract_muldiv (op1, c, code, wide_type,
5983 strict_overflow_p)) != 0)
5984 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5985 fold_convert (ctype, t1));
5986 else if (TREE_CODE (op1) != INTEGER_CST)
5987 return 0;
5988
5989 /* If these are the same operation types, we can associate them
5990 assuming no overflow. */
5991 if (tcode == code)
5992 {
5993 wide_int mul;
5994 bool overflow_p;
5995 signop sign = TYPE_SIGN (ctype);
5996 mul = wi::mul_full (op1, c, sign);
5997 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5998 if (!wi::fits_to_tree_p (mul, ctype)
5999 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6000 overflow_p = true;
6001 if (!overflow_p)
6002 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6003 wide_int_to_tree (ctype, mul));
6004 }
6005
6006 /* If these operations "cancel" each other, we have the main
6007 optimizations of this pass, which occur when either constant is a
6008 multiple of the other, in which case we replace this with either an
6009 operation or CODE or TCODE.
6010
6011 If we have an unsigned type, we cannot do this since it will change
6012 the result if the original computation overflowed. */
6013 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6014 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6015 || (tcode == MULT_EXPR
6016 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6017 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6018 && code != MULT_EXPR)))
6019 {
6020 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6021 {
6022 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6023 *strict_overflow_p = true;
6024 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6025 fold_convert (ctype,
6026 const_binop (TRUNC_DIV_EXPR,
6027 op1, c)));
6028 }
6029 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6030 {
6031 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6032 *strict_overflow_p = true;
6033 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6034 fold_convert (ctype,
6035 const_binop (TRUNC_DIV_EXPR,
6036 c, op1)));
6037 }
6038 }
6039 break;
6040
6041 default:
6042 break;
6043 }
6044
6045 return 0;
6046 }
6047 \f
6048 /* Return a node which has the indicated constant VALUE (either 0 or
6049 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6050 and is of the indicated TYPE. */
6051
6052 tree
6053 constant_boolean_node (bool value, tree type)
6054 {
6055 if (type == integer_type_node)
6056 return value ? integer_one_node : integer_zero_node;
6057 else if (type == boolean_type_node)
6058 return value ? boolean_true_node : boolean_false_node;
6059 else if (TREE_CODE (type) == VECTOR_TYPE)
6060 return build_vector_from_val (type,
6061 build_int_cst (TREE_TYPE (type),
6062 value ? -1 : 0));
6063 else
6064 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6065 }
6066
6067
6068 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6069 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6070 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6071 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6072 COND is the first argument to CODE; otherwise (as in the example
6073 given here), it is the second argument. TYPE is the type of the
6074 original expression. Return NULL_TREE if no simplification is
6075 possible. */
6076
6077 static tree
6078 fold_binary_op_with_conditional_arg (location_t loc,
6079 enum tree_code code,
6080 tree type, tree op0, tree op1,
6081 tree cond, tree arg, int cond_first_p)
6082 {
6083 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6084 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6085 tree test, true_value, false_value;
6086 tree lhs = NULL_TREE;
6087 tree rhs = NULL_TREE;
6088 enum tree_code cond_code = COND_EXPR;
6089
6090 if (TREE_CODE (cond) == COND_EXPR
6091 || TREE_CODE (cond) == VEC_COND_EXPR)
6092 {
6093 test = TREE_OPERAND (cond, 0);
6094 true_value = TREE_OPERAND (cond, 1);
6095 false_value = TREE_OPERAND (cond, 2);
6096 /* If this operand throws an expression, then it does not make
6097 sense to try to perform a logical or arithmetic operation
6098 involving it. */
6099 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6100 lhs = true_value;
6101 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6102 rhs = false_value;
6103 }
6104 else
6105 {
6106 tree testtype = TREE_TYPE (cond);
6107 test = cond;
6108 true_value = constant_boolean_node (true, testtype);
6109 false_value = constant_boolean_node (false, testtype);
6110 }
6111
6112 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6113 cond_code = VEC_COND_EXPR;
6114
6115 /* This transformation is only worthwhile if we don't have to wrap ARG
6116 in a SAVE_EXPR and the operation can be simplified without recursing
6117 on at least one of the branches once its pushed inside the COND_EXPR. */
6118 if (!TREE_CONSTANT (arg)
6119 && (TREE_SIDE_EFFECTS (arg)
6120 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6121 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6122 return NULL_TREE;
6123
6124 arg = fold_convert_loc (loc, arg_type, arg);
6125 if (lhs == 0)
6126 {
6127 true_value = fold_convert_loc (loc, cond_type, true_value);
6128 if (cond_first_p)
6129 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6130 else
6131 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6132 }
6133 if (rhs == 0)
6134 {
6135 false_value = fold_convert_loc (loc, cond_type, false_value);
6136 if (cond_first_p)
6137 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6138 else
6139 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6140 }
6141
6142 /* Check that we have simplified at least one of the branches. */
6143 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6144 return NULL_TREE;
6145
6146 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6147 }
6148
6149 \f
6150 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6151
6152 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6153 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6154 ADDEND is the same as X.
6155
6156 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6157 and finite. The problematic cases are when X is zero, and its mode
6158 has signed zeros. In the case of rounding towards -infinity,
6159 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6160 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6161
6162 bool
6163 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6164 {
6165 if (!real_zerop (addend))
6166 return false;
6167
6168 /* Don't allow the fold with -fsignaling-nans. */
6169 if (HONOR_SNANS (TYPE_MODE (type)))
6170 return false;
6171
6172 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6173 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6174 return true;
6175
6176 /* In a vector or complex, we would need to check the sign of all zeros. */
6177 if (TREE_CODE (addend) != REAL_CST)
6178 return false;
6179
6180 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6181 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6182 negate = !negate;
6183
6184 /* The mode has signed zeros, and we have to honor their sign.
6185 In this situation, there is only one case we can return true for.
6186 X - 0 is the same as X unless rounding towards -infinity is
6187 supported. */
6188 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6189 }
6190
6191 /* Subroutine of fold() that checks comparisons of built-in math
6192 functions against real constants.
6193
6194 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6195 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6196 is the type of the result and ARG0 and ARG1 are the operands of the
6197 comparison. ARG1 must be a TREE_REAL_CST.
6198
6199 The function returns the constant folded tree if a simplification
6200 can be made, and NULL_TREE otherwise. */
6201
6202 static tree
6203 fold_mathfn_compare (location_t loc,
6204 enum built_in_function fcode, enum tree_code code,
6205 tree type, tree arg0, tree arg1)
6206 {
6207 REAL_VALUE_TYPE c;
6208
6209 if (BUILTIN_SQRT_P (fcode))
6210 {
6211 tree arg = CALL_EXPR_ARG (arg0, 0);
6212 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6213
6214 c = TREE_REAL_CST (arg1);
6215 if (REAL_VALUE_NEGATIVE (c))
6216 {
6217 /* sqrt(x) < y is always false, if y is negative. */
6218 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6219 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6220
6221 /* sqrt(x) > y is always true, if y is negative and we
6222 don't care about NaNs, i.e. negative values of x. */
6223 if (code == NE_EXPR || !HONOR_NANS (mode))
6224 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6225
6226 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6227 return fold_build2_loc (loc, GE_EXPR, type, arg,
6228 build_real (TREE_TYPE (arg), dconst0));
6229 }
6230 else if (code == GT_EXPR || code == GE_EXPR)
6231 {
6232 REAL_VALUE_TYPE c2;
6233
6234 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6235 real_convert (&c2, mode, &c2);
6236
6237 if (REAL_VALUE_ISINF (c2))
6238 {
6239 /* sqrt(x) > y is x == +Inf, when y is very large. */
6240 if (HONOR_INFINITIES (mode))
6241 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6242 build_real (TREE_TYPE (arg), c2));
6243
6244 /* sqrt(x) > y is always false, when y is very large
6245 and we don't care about infinities. */
6246 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6247 }
6248
6249 /* sqrt(x) > c is the same as x > c*c. */
6250 return fold_build2_loc (loc, code, type, arg,
6251 build_real (TREE_TYPE (arg), c2));
6252 }
6253 else if (code == LT_EXPR || code == LE_EXPR)
6254 {
6255 REAL_VALUE_TYPE c2;
6256
6257 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6258 real_convert (&c2, mode, &c2);
6259
6260 if (REAL_VALUE_ISINF (c2))
6261 {
6262 /* sqrt(x) < y is always true, when y is a very large
6263 value and we don't care about NaNs or Infinities. */
6264 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6265 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6266
6267 /* sqrt(x) < y is x != +Inf when y is very large and we
6268 don't care about NaNs. */
6269 if (! HONOR_NANS (mode))
6270 return fold_build2_loc (loc, NE_EXPR, type, arg,
6271 build_real (TREE_TYPE (arg), c2));
6272
6273 /* sqrt(x) < y is x >= 0 when y is very large and we
6274 don't care about Infinities. */
6275 if (! HONOR_INFINITIES (mode))
6276 return fold_build2_loc (loc, GE_EXPR, type, arg,
6277 build_real (TREE_TYPE (arg), dconst0));
6278
6279 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6280 arg = save_expr (arg);
6281 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6282 fold_build2_loc (loc, GE_EXPR, type, arg,
6283 build_real (TREE_TYPE (arg),
6284 dconst0)),
6285 fold_build2_loc (loc, NE_EXPR, type, arg,
6286 build_real (TREE_TYPE (arg),
6287 c2)));
6288 }
6289
6290 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6291 if (! HONOR_NANS (mode))
6292 return fold_build2_loc (loc, code, type, arg,
6293 build_real (TREE_TYPE (arg), c2));
6294
6295 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6296 arg = save_expr (arg);
6297 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6298 fold_build2_loc (loc, GE_EXPR, type, arg,
6299 build_real (TREE_TYPE (arg),
6300 dconst0)),
6301 fold_build2_loc (loc, code, type, arg,
6302 build_real (TREE_TYPE (arg),
6303 c2)));
6304 }
6305 }
6306
6307 return NULL_TREE;
6308 }
6309
6310 /* Subroutine of fold() that optimizes comparisons against Infinities,
6311 either +Inf or -Inf.
6312
6313 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6314 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6315 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6316
6317 The function returns the constant folded tree if a simplification
6318 can be made, and NULL_TREE otherwise. */
6319
6320 static tree
6321 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6322 tree arg0, tree arg1)
6323 {
6324 enum machine_mode mode;
6325 REAL_VALUE_TYPE max;
6326 tree temp;
6327 bool neg;
6328
6329 mode = TYPE_MODE (TREE_TYPE (arg0));
6330
6331 /* For negative infinity swap the sense of the comparison. */
6332 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6333 if (neg)
6334 code = swap_tree_comparison (code);
6335
6336 switch (code)
6337 {
6338 case GT_EXPR:
6339 /* x > +Inf is always false, if with ignore sNANs. */
6340 if (HONOR_SNANS (mode))
6341 return NULL_TREE;
6342 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6343
6344 case LE_EXPR:
6345 /* x <= +Inf is always true, if we don't case about NaNs. */
6346 if (! HONOR_NANS (mode))
6347 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6348
6349 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6350 arg0 = save_expr (arg0);
6351 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6352
6353 case EQ_EXPR:
6354 case GE_EXPR:
6355 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6356 real_maxval (&max, neg, mode);
6357 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6358 arg0, build_real (TREE_TYPE (arg0), max));
6359
6360 case LT_EXPR:
6361 /* x < +Inf is always equal to x <= DBL_MAX. */
6362 real_maxval (&max, neg, mode);
6363 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6364 arg0, build_real (TREE_TYPE (arg0), max));
6365
6366 case NE_EXPR:
6367 /* x != +Inf is always equal to !(x > DBL_MAX). */
6368 real_maxval (&max, neg, mode);
6369 if (! HONOR_NANS (mode))
6370 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6371 arg0, build_real (TREE_TYPE (arg0), max));
6372
6373 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6374 arg0, build_real (TREE_TYPE (arg0), max));
6375 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6376
6377 default:
6378 break;
6379 }
6380
6381 return NULL_TREE;
6382 }
6383
6384 /* Subroutine of fold() that optimizes comparisons of a division by
6385 a nonzero integer constant against an integer constant, i.e.
6386 X/C1 op C2.
6387
6388 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6389 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6390 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6391
6392 The function returns the constant folded tree if a simplification
6393 can be made, and NULL_TREE otherwise. */
6394
6395 static tree
6396 fold_div_compare (location_t loc,
6397 enum tree_code code, tree type, tree arg0, tree arg1)
6398 {
6399 tree prod, tmp, hi, lo;
6400 tree arg00 = TREE_OPERAND (arg0, 0);
6401 tree arg01 = TREE_OPERAND (arg0, 1);
6402 wide_int val;
6403 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6404 bool neg_overflow = false;
6405 bool overflow;
6406
6407 /* We have to do this the hard way to detect unsigned overflow.
6408 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6409 val = wi::mul (arg01, arg1, sign, &overflow);
6410 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6411 neg_overflow = false;
6412
6413 if (sign == UNSIGNED)
6414 {
6415 tmp = int_const_binop (MINUS_EXPR, arg01,
6416 build_int_cst (TREE_TYPE (arg01), 1));
6417 lo = prod;
6418
6419 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6420 val = wi::add (prod, tmp, sign, &overflow);
6421 hi = force_fit_type (TREE_TYPE (arg00), val,
6422 -1, overflow | TREE_OVERFLOW (prod));
6423 }
6424 else if (tree_int_cst_sgn (arg01) >= 0)
6425 {
6426 tmp = int_const_binop (MINUS_EXPR, arg01,
6427 build_int_cst (TREE_TYPE (arg01), 1));
6428 switch (tree_int_cst_sgn (arg1))
6429 {
6430 case -1:
6431 neg_overflow = true;
6432 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6433 hi = prod;
6434 break;
6435
6436 case 0:
6437 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6438 hi = tmp;
6439 break;
6440
6441 case 1:
6442 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6443 lo = prod;
6444 break;
6445
6446 default:
6447 gcc_unreachable ();
6448 }
6449 }
6450 else
6451 {
6452 /* A negative divisor reverses the relational operators. */
6453 code = swap_tree_comparison (code);
6454
6455 tmp = int_const_binop (PLUS_EXPR, arg01,
6456 build_int_cst (TREE_TYPE (arg01), 1));
6457 switch (tree_int_cst_sgn (arg1))
6458 {
6459 case -1:
6460 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6461 lo = prod;
6462 break;
6463
6464 case 0:
6465 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6466 lo = tmp;
6467 break;
6468
6469 case 1:
6470 neg_overflow = true;
6471 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6472 hi = prod;
6473 break;
6474
6475 default:
6476 gcc_unreachable ();
6477 }
6478 }
6479
6480 switch (code)
6481 {
6482 case EQ_EXPR:
6483 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6484 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6485 if (TREE_OVERFLOW (hi))
6486 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6487 if (TREE_OVERFLOW (lo))
6488 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6489 return build_range_check (loc, type, arg00, 1, lo, hi);
6490
6491 case NE_EXPR:
6492 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6493 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6494 if (TREE_OVERFLOW (hi))
6495 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6496 if (TREE_OVERFLOW (lo))
6497 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6498 return build_range_check (loc, type, arg00, 0, lo, hi);
6499
6500 case LT_EXPR:
6501 if (TREE_OVERFLOW (lo))
6502 {
6503 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6504 return omit_one_operand_loc (loc, type, tmp, arg00);
6505 }
6506 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6507
6508 case LE_EXPR:
6509 if (TREE_OVERFLOW (hi))
6510 {
6511 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6512 return omit_one_operand_loc (loc, type, tmp, arg00);
6513 }
6514 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6515
6516 case GT_EXPR:
6517 if (TREE_OVERFLOW (hi))
6518 {
6519 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6520 return omit_one_operand_loc (loc, type, tmp, arg00);
6521 }
6522 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6523
6524 case GE_EXPR:
6525 if (TREE_OVERFLOW (lo))
6526 {
6527 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6528 return omit_one_operand_loc (loc, type, tmp, arg00);
6529 }
6530 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6531
6532 default:
6533 break;
6534 }
6535
6536 return NULL_TREE;
6537 }
6538
6539
6540 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6541 equality/inequality test, then return a simplified form of the test
6542 using a sign testing. Otherwise return NULL. TYPE is the desired
6543 result type. */
6544
6545 static tree
6546 fold_single_bit_test_into_sign_test (location_t loc,
6547 enum tree_code code, tree arg0, tree arg1,
6548 tree result_type)
6549 {
6550 /* If this is testing a single bit, we can optimize the test. */
6551 if ((code == NE_EXPR || code == EQ_EXPR)
6552 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6553 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6554 {
6555 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6556 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6557 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6558
6559 if (arg00 != NULL_TREE
6560 /* This is only a win if casting to a signed type is cheap,
6561 i.e. when arg00's type is not a partial mode. */
6562 && TYPE_PRECISION (TREE_TYPE (arg00))
6563 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6564 {
6565 tree stype = signed_type_for (TREE_TYPE (arg00));
6566 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6567 result_type,
6568 fold_convert_loc (loc, stype, arg00),
6569 build_int_cst (stype, 0));
6570 }
6571 }
6572
6573 return NULL_TREE;
6574 }
6575
6576 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6577 equality/inequality test, then return a simplified form of
6578 the test using shifts and logical operations. Otherwise return
6579 NULL. TYPE is the desired result type. */
6580
6581 tree
6582 fold_single_bit_test (location_t loc, enum tree_code code,
6583 tree arg0, tree arg1, tree result_type)
6584 {
6585 /* If this is testing a single bit, we can optimize the test. */
6586 if ((code == NE_EXPR || code == EQ_EXPR)
6587 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6588 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6589 {
6590 tree inner = TREE_OPERAND (arg0, 0);
6591 tree type = TREE_TYPE (arg0);
6592 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6593 enum machine_mode operand_mode = TYPE_MODE (type);
6594 int ops_unsigned;
6595 tree signed_type, unsigned_type, intermediate_type;
6596 tree tem, one;
6597
6598 /* First, see if we can fold the single bit test into a sign-bit
6599 test. */
6600 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6601 result_type);
6602 if (tem)
6603 return tem;
6604
6605 /* Otherwise we have (A & C) != 0 where C is a single bit,
6606 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6607 Similarly for (A & C) == 0. */
6608
6609 /* If INNER is a right shift of a constant and it plus BITNUM does
6610 not overflow, adjust BITNUM and INNER. */
6611 if (TREE_CODE (inner) == RSHIFT_EXPR
6612 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6613 && wi::ltu_p (wi::add (TREE_OPERAND (inner, 1), bitnum),
6614 TYPE_PRECISION (type)))
6615 {
6616 bitnum += tree_to_hwi (TREE_OPERAND (inner, 1));
6617 inner = TREE_OPERAND (inner, 0);
6618 }
6619
6620 /* If we are going to be able to omit the AND below, we must do our
6621 operations as unsigned. If we must use the AND, we have a choice.
6622 Normally unsigned is faster, but for some machines signed is. */
6623 #ifdef LOAD_EXTEND_OP
6624 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6625 && !flag_syntax_only) ? 0 : 1;
6626 #else
6627 ops_unsigned = 1;
6628 #endif
6629
6630 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6631 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6632 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6633 inner = fold_convert_loc (loc, intermediate_type, inner);
6634
6635 if (bitnum != 0)
6636 inner = build2 (RSHIFT_EXPR, intermediate_type,
6637 inner, size_int (bitnum));
6638
6639 one = build_int_cst (intermediate_type, 1);
6640
6641 if (code == EQ_EXPR)
6642 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6643
6644 /* Put the AND last so it can combine with more things. */
6645 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6646
6647 /* Make sure to return the proper type. */
6648 inner = fold_convert_loc (loc, result_type, inner);
6649
6650 return inner;
6651 }
6652 return NULL_TREE;
6653 }
6654
6655 /* Check whether we are allowed to reorder operands arg0 and arg1,
6656 such that the evaluation of arg1 occurs before arg0. */
6657
6658 static bool
6659 reorder_operands_p (const_tree arg0, const_tree arg1)
6660 {
6661 if (! flag_evaluation_order)
6662 return true;
6663 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6664 return true;
6665 return ! TREE_SIDE_EFFECTS (arg0)
6666 && ! TREE_SIDE_EFFECTS (arg1);
6667 }
6668
6669 /* Test whether it is preferable two swap two operands, ARG0 and
6670 ARG1, for example because ARG0 is an integer constant and ARG1
6671 isn't. If REORDER is true, only recommend swapping if we can
6672 evaluate the operands in reverse order. */
6673
6674 bool
6675 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6676 {
6677 STRIP_SIGN_NOPS (arg0);
6678 STRIP_SIGN_NOPS (arg1);
6679
6680 if (TREE_CODE (arg1) == INTEGER_CST)
6681 return 0;
6682 if (TREE_CODE (arg0) == INTEGER_CST)
6683 return 1;
6684
6685 if (TREE_CODE (arg1) == REAL_CST)
6686 return 0;
6687 if (TREE_CODE (arg0) == REAL_CST)
6688 return 1;
6689
6690 if (TREE_CODE (arg1) == FIXED_CST)
6691 return 0;
6692 if (TREE_CODE (arg0) == FIXED_CST)
6693 return 1;
6694
6695 if (TREE_CODE (arg1) == COMPLEX_CST)
6696 return 0;
6697 if (TREE_CODE (arg0) == COMPLEX_CST)
6698 return 1;
6699
6700 if (TREE_CONSTANT (arg1))
6701 return 0;
6702 if (TREE_CONSTANT (arg0))
6703 return 1;
6704
6705 if (optimize_function_for_size_p (cfun))
6706 return 0;
6707
6708 if (reorder && flag_evaluation_order
6709 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6710 return 0;
6711
6712 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6713 for commutative and comparison operators. Ensuring a canonical
6714 form allows the optimizers to find additional redundancies without
6715 having to explicitly check for both orderings. */
6716 if (TREE_CODE (arg0) == SSA_NAME
6717 && TREE_CODE (arg1) == SSA_NAME
6718 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6719 return 1;
6720
6721 /* Put SSA_NAMEs last. */
6722 if (TREE_CODE (arg1) == SSA_NAME)
6723 return 0;
6724 if (TREE_CODE (arg0) == SSA_NAME)
6725 return 1;
6726
6727 /* Put variables last. */
6728 if (DECL_P (arg1))
6729 return 0;
6730 if (DECL_P (arg0))
6731 return 1;
6732
6733 return 0;
6734 }
6735
6736 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6737 ARG0 is extended to a wider type. */
6738
6739 static tree
6740 fold_widened_comparison (location_t loc, enum tree_code code,
6741 tree type, tree arg0, tree arg1)
6742 {
6743 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6744 tree arg1_unw;
6745 tree shorter_type, outer_type;
6746 tree min, max;
6747 bool above, below;
6748
6749 if (arg0_unw == arg0)
6750 return NULL_TREE;
6751 shorter_type = TREE_TYPE (arg0_unw);
6752
6753 #ifdef HAVE_canonicalize_funcptr_for_compare
6754 /* Disable this optimization if we're casting a function pointer
6755 type on targets that require function pointer canonicalization. */
6756 if (HAVE_canonicalize_funcptr_for_compare
6757 && TREE_CODE (shorter_type) == POINTER_TYPE
6758 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6759 return NULL_TREE;
6760 #endif
6761
6762 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6763 return NULL_TREE;
6764
6765 arg1_unw = get_unwidened (arg1, NULL_TREE);
6766
6767 /* If possible, express the comparison in the shorter mode. */
6768 if ((code == EQ_EXPR || code == NE_EXPR
6769 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6770 && (TREE_TYPE (arg1_unw) == shorter_type
6771 || ((TYPE_PRECISION (shorter_type)
6772 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6773 && (TYPE_UNSIGNED (shorter_type)
6774 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6775 || (TREE_CODE (arg1_unw) == INTEGER_CST
6776 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6777 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6778 && int_fits_type_p (arg1_unw, shorter_type))))
6779 return fold_build2_loc (loc, code, type, arg0_unw,
6780 fold_convert_loc (loc, shorter_type, arg1_unw));
6781
6782 if (TREE_CODE (arg1_unw) != INTEGER_CST
6783 || TREE_CODE (shorter_type) != INTEGER_TYPE
6784 || !int_fits_type_p (arg1_unw, shorter_type))
6785 return NULL_TREE;
6786
6787 /* If we are comparing with the integer that does not fit into the range
6788 of the shorter type, the result is known. */
6789 outer_type = TREE_TYPE (arg1_unw);
6790 min = lower_bound_in_type (outer_type, shorter_type);
6791 max = upper_bound_in_type (outer_type, shorter_type);
6792
6793 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6794 max, arg1_unw));
6795 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6796 arg1_unw, min));
6797
6798 switch (code)
6799 {
6800 case EQ_EXPR:
6801 if (above || below)
6802 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6803 break;
6804
6805 case NE_EXPR:
6806 if (above || below)
6807 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6808 break;
6809
6810 case LT_EXPR:
6811 case LE_EXPR:
6812 if (above)
6813 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6814 else if (below)
6815 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6816
6817 case GT_EXPR:
6818 case GE_EXPR:
6819 if (above)
6820 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6821 else if (below)
6822 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6823
6824 default:
6825 break;
6826 }
6827
6828 return NULL_TREE;
6829 }
6830
6831 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6832 ARG0 just the signedness is changed. */
6833
6834 static tree
6835 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6836 tree arg0, tree arg1)
6837 {
6838 tree arg0_inner;
6839 tree inner_type, outer_type;
6840
6841 if (!CONVERT_EXPR_P (arg0))
6842 return NULL_TREE;
6843
6844 outer_type = TREE_TYPE (arg0);
6845 arg0_inner = TREE_OPERAND (arg0, 0);
6846 inner_type = TREE_TYPE (arg0_inner);
6847
6848 #ifdef HAVE_canonicalize_funcptr_for_compare
6849 /* Disable this optimization if we're casting a function pointer
6850 type on targets that require function pointer canonicalization. */
6851 if (HAVE_canonicalize_funcptr_for_compare
6852 && TREE_CODE (inner_type) == POINTER_TYPE
6853 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6854 return NULL_TREE;
6855 #endif
6856
6857 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6858 return NULL_TREE;
6859
6860 if (TREE_CODE (arg1) != INTEGER_CST
6861 && !(CONVERT_EXPR_P (arg1)
6862 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6863 return NULL_TREE;
6864
6865 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6866 && code != NE_EXPR
6867 && code != EQ_EXPR)
6868 return NULL_TREE;
6869
6870 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6871 return NULL_TREE;
6872
6873 if (TREE_CODE (arg1) == INTEGER_CST)
6874 arg1 = force_fit_type (inner_type, arg1, 0, TREE_OVERFLOW (arg1));
6875 else
6876 arg1 = fold_convert_loc (loc, inner_type, arg1);
6877
6878 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6879 }
6880
6881 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6882 step of the array. Reconstructs s and delta in the case of s *
6883 delta being an integer constant (and thus already folded). ADDR is
6884 the address. MULT is the multiplicative expression. If the
6885 function succeeds, the new address expression is returned.
6886 Otherwise NULL_TREE is returned. LOC is the location of the
6887 resulting expression. */
6888
6889 static tree
6890 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6891 {
6892 tree s, delta, step;
6893 tree ref = TREE_OPERAND (addr, 0), pref;
6894 tree ret, pos;
6895 tree itype;
6896 bool mdim = false;
6897
6898 /* Strip the nops that might be added when converting op1 to sizetype. */
6899 STRIP_NOPS (op1);
6900
6901 /* Canonicalize op1 into a possibly non-constant delta
6902 and an INTEGER_CST s. */
6903 if (TREE_CODE (op1) == MULT_EXPR)
6904 {
6905 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6906
6907 STRIP_NOPS (arg0);
6908 STRIP_NOPS (arg1);
6909
6910 if (TREE_CODE (arg0) == INTEGER_CST)
6911 {
6912 s = arg0;
6913 delta = arg1;
6914 }
6915 else if (TREE_CODE (arg1) == INTEGER_CST)
6916 {
6917 s = arg1;
6918 delta = arg0;
6919 }
6920 else
6921 return NULL_TREE;
6922 }
6923 else if (TREE_CODE (op1) == INTEGER_CST)
6924 {
6925 delta = op1;
6926 s = NULL_TREE;
6927 }
6928 else
6929 {
6930 /* Simulate we are delta * 1. */
6931 delta = op1;
6932 s = integer_one_node;
6933 }
6934
6935 /* Handle &x.array the same as we would handle &x.array[0]. */
6936 if (TREE_CODE (ref) == COMPONENT_REF
6937 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6938 {
6939 tree domain;
6940
6941 /* Remember if this was a multi-dimensional array. */
6942 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6943 mdim = true;
6944
6945 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6946 if (! domain)
6947 goto cont;
6948 itype = TREE_TYPE (domain);
6949
6950 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6951 if (TREE_CODE (step) != INTEGER_CST)
6952 goto cont;
6953
6954 if (s)
6955 {
6956 if (! tree_int_cst_equal (step, s))
6957 goto cont;
6958 }
6959 else
6960 {
6961 /* Try if delta is a multiple of step. */
6962 tree tmp = div_if_zero_remainder (op1, step);
6963 if (! tmp)
6964 goto cont;
6965 delta = tmp;
6966 }
6967
6968 /* Only fold here if we can verify we do not overflow one
6969 dimension of a multi-dimensional array. */
6970 if (mdim)
6971 {
6972 tree tmp;
6973
6974 if (!TYPE_MIN_VALUE (domain)
6975 || !TYPE_MAX_VALUE (domain)
6976 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6977 goto cont;
6978
6979 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6980 fold_convert_loc (loc, itype,
6981 TYPE_MIN_VALUE (domain)),
6982 fold_convert_loc (loc, itype, delta));
6983 if (TREE_CODE (tmp) != INTEGER_CST
6984 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6985 goto cont;
6986 }
6987
6988 /* We found a suitable component reference. */
6989
6990 pref = TREE_OPERAND (addr, 0);
6991 ret = copy_node (pref);
6992 SET_EXPR_LOCATION (ret, loc);
6993
6994 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6995 fold_build2_loc
6996 (loc, PLUS_EXPR, itype,
6997 fold_convert_loc (loc, itype,
6998 TYPE_MIN_VALUE
6999 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7000 fold_convert_loc (loc, itype, delta)),
7001 NULL_TREE, NULL_TREE);
7002 return build_fold_addr_expr_loc (loc, ret);
7003 }
7004
7005 cont:
7006
7007 for (;; ref = TREE_OPERAND (ref, 0))
7008 {
7009 if (TREE_CODE (ref) == ARRAY_REF)
7010 {
7011 tree domain;
7012
7013 /* Remember if this was a multi-dimensional array. */
7014 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7015 mdim = true;
7016
7017 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7018 if (! domain)
7019 continue;
7020 itype = TREE_TYPE (domain);
7021
7022 step = array_ref_element_size (ref);
7023 if (TREE_CODE (step) != INTEGER_CST)
7024 continue;
7025
7026 if (s)
7027 {
7028 if (! tree_int_cst_equal (step, s))
7029 continue;
7030 }
7031 else
7032 {
7033 /* Try if delta is a multiple of step. */
7034 tree tmp = div_if_zero_remainder (op1, step);
7035 if (! tmp)
7036 continue;
7037 delta = tmp;
7038 }
7039
7040 /* Only fold here if we can verify we do not overflow one
7041 dimension of a multi-dimensional array. */
7042 if (mdim)
7043 {
7044 tree tmp;
7045
7046 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7047 || !TYPE_MAX_VALUE (domain)
7048 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7049 continue;
7050
7051 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7052 fold_convert_loc (loc, itype,
7053 TREE_OPERAND (ref, 1)),
7054 fold_convert_loc (loc, itype, delta));
7055 if (!tmp
7056 || TREE_CODE (tmp) != INTEGER_CST
7057 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7058 continue;
7059 }
7060
7061 break;
7062 }
7063 else
7064 mdim = false;
7065
7066 if (!handled_component_p (ref))
7067 return NULL_TREE;
7068 }
7069
7070 /* We found the suitable array reference. So copy everything up to it,
7071 and replace the index. */
7072
7073 pref = TREE_OPERAND (addr, 0);
7074 ret = copy_node (pref);
7075 SET_EXPR_LOCATION (ret, loc);
7076 pos = ret;
7077
7078 while (pref != ref)
7079 {
7080 pref = TREE_OPERAND (pref, 0);
7081 TREE_OPERAND (pos, 0) = copy_node (pref);
7082 pos = TREE_OPERAND (pos, 0);
7083 }
7084
7085 TREE_OPERAND (pos, 1)
7086 = fold_build2_loc (loc, PLUS_EXPR, itype,
7087 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7088 fold_convert_loc (loc, itype, delta));
7089 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7090 }
7091
7092
7093 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7094 means A >= Y && A != MAX, but in this case we know that
7095 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7096
7097 static tree
7098 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7099 {
7100 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7101
7102 if (TREE_CODE (bound) == LT_EXPR)
7103 a = TREE_OPERAND (bound, 0);
7104 else if (TREE_CODE (bound) == GT_EXPR)
7105 a = TREE_OPERAND (bound, 1);
7106 else
7107 return NULL_TREE;
7108
7109 typea = TREE_TYPE (a);
7110 if (!INTEGRAL_TYPE_P (typea)
7111 && !POINTER_TYPE_P (typea))
7112 return NULL_TREE;
7113
7114 if (TREE_CODE (ineq) == LT_EXPR)
7115 {
7116 a1 = TREE_OPERAND (ineq, 1);
7117 y = TREE_OPERAND (ineq, 0);
7118 }
7119 else if (TREE_CODE (ineq) == GT_EXPR)
7120 {
7121 a1 = TREE_OPERAND (ineq, 0);
7122 y = TREE_OPERAND (ineq, 1);
7123 }
7124 else
7125 return NULL_TREE;
7126
7127 if (TREE_TYPE (a1) != typea)
7128 return NULL_TREE;
7129
7130 if (POINTER_TYPE_P (typea))
7131 {
7132 /* Convert the pointer types into integer before taking the difference. */
7133 tree ta = fold_convert_loc (loc, ssizetype, a);
7134 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7135 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7136 }
7137 else
7138 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7139
7140 if (!diff || !integer_onep (diff))
7141 return NULL_TREE;
7142
7143 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7144 }
7145
7146 /* Fold a sum or difference of at least one multiplication.
7147 Returns the folded tree or NULL if no simplification could be made. */
7148
7149 static tree
7150 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7151 tree arg0, tree arg1)
7152 {
7153 tree arg00, arg01, arg10, arg11;
7154 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7155
7156 /* (A * C) +- (B * C) -> (A+-B) * C.
7157 (A * C) +- A -> A * (C+-1).
7158 We are most concerned about the case where C is a constant,
7159 but other combinations show up during loop reduction. Since
7160 it is not difficult, try all four possibilities. */
7161
7162 if (TREE_CODE (arg0) == MULT_EXPR)
7163 {
7164 arg00 = TREE_OPERAND (arg0, 0);
7165 arg01 = TREE_OPERAND (arg0, 1);
7166 }
7167 else if (TREE_CODE (arg0) == INTEGER_CST)
7168 {
7169 arg00 = build_one_cst (type);
7170 arg01 = arg0;
7171 }
7172 else
7173 {
7174 /* We cannot generate constant 1 for fract. */
7175 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7176 return NULL_TREE;
7177 arg00 = arg0;
7178 arg01 = build_one_cst (type);
7179 }
7180 if (TREE_CODE (arg1) == MULT_EXPR)
7181 {
7182 arg10 = TREE_OPERAND (arg1, 0);
7183 arg11 = TREE_OPERAND (arg1, 1);
7184 }
7185 else if (TREE_CODE (arg1) == INTEGER_CST)
7186 {
7187 arg10 = build_one_cst (type);
7188 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7189 the purpose of this canonicalization. */
7190 if (TYPE_SIGN (TREE_TYPE (arg1)) == SIGNED
7191 && wi::neg_p (arg1)
7192 && negate_expr_p (arg1)
7193 && code == PLUS_EXPR)
7194 {
7195 arg11 = negate_expr (arg1);
7196 code = MINUS_EXPR;
7197 }
7198 else
7199 arg11 = arg1;
7200 }
7201 else
7202 {
7203 /* We cannot generate constant 1 for fract. */
7204 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7205 return NULL_TREE;
7206 arg10 = arg1;
7207 arg11 = build_one_cst (type);
7208 }
7209 same = NULL_TREE;
7210
7211 if (operand_equal_p (arg01, arg11, 0))
7212 same = arg01, alt0 = arg00, alt1 = arg10;
7213 else if (operand_equal_p (arg00, arg10, 0))
7214 same = arg00, alt0 = arg01, alt1 = arg11;
7215 else if (operand_equal_p (arg00, arg11, 0))
7216 same = arg00, alt0 = arg01, alt1 = arg10;
7217 else if (operand_equal_p (arg01, arg10, 0))
7218 same = arg01, alt0 = arg00, alt1 = arg11;
7219
7220 /* No identical multiplicands; see if we can find a common
7221 power-of-two factor in non-power-of-two multiplies. This
7222 can help in multi-dimensional array access. */
7223 else if (tree_fits_shwi_p (arg01)
7224 && tree_fits_shwi_p (arg11))
7225 {
7226 HOST_WIDE_INT int01, int11, tmp;
7227 bool swap = false;
7228 tree maybe_same;
7229 int01 = tree_to_shwi (arg01);
7230 int11 = tree_to_shwi (arg11);
7231
7232 /* Move min of absolute values to int11. */
7233 if (absu_hwi (int01) < absu_hwi (int11))
7234 {
7235 tmp = int01, int01 = int11, int11 = tmp;
7236 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7237 maybe_same = arg01;
7238 swap = true;
7239 }
7240 else
7241 maybe_same = arg11;
7242
7243 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7244 /* The remainder should not be a constant, otherwise we
7245 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7246 increased the number of multiplications necessary. */
7247 && TREE_CODE (arg10) != INTEGER_CST)
7248 {
7249 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7250 build_int_cst (TREE_TYPE (arg00),
7251 int01 / int11));
7252 alt1 = arg10;
7253 same = maybe_same;
7254 if (swap)
7255 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7256 }
7257 }
7258
7259 if (same)
7260 return fold_build2_loc (loc, MULT_EXPR, type,
7261 fold_build2_loc (loc, code, type,
7262 fold_convert_loc (loc, type, alt0),
7263 fold_convert_loc (loc, type, alt1)),
7264 fold_convert_loc (loc, type, same));
7265
7266 return NULL_TREE;
7267 }
7268
7269 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7270 specified by EXPR into the buffer PTR of length LEN bytes.
7271 Return the number of bytes placed in the buffer, or zero
7272 upon failure. */
7273
7274 static int
7275 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7276 {
7277 tree type = TREE_TYPE (expr);
7278 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7279 int byte, offset, word, words;
7280 unsigned char value;
7281
7282 if (total_bytes > len)
7283 return 0;
7284 words = total_bytes / UNITS_PER_WORD;
7285
7286 for (byte = 0; byte < total_bytes; byte++)
7287 {
7288 int bitpos = byte * BITS_PER_UNIT;
7289 value = wi::extract_uhwi (expr, bitpos, BITS_PER_UNIT);
7290
7291 if (total_bytes > UNITS_PER_WORD)
7292 {
7293 word = byte / UNITS_PER_WORD;
7294 if (WORDS_BIG_ENDIAN)
7295 word = (words - 1) - word;
7296 offset = word * UNITS_PER_WORD;
7297 if (BYTES_BIG_ENDIAN)
7298 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7299 else
7300 offset += byte % UNITS_PER_WORD;
7301 }
7302 else
7303 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7304 ptr[offset] = value;
7305 }
7306 return total_bytes;
7307 }
7308
7309
7310 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7311 specified by EXPR into the buffer PTR of length LEN bytes.
7312 Return the number of bytes placed in the buffer, or zero
7313 upon failure. */
7314
7315 static int
7316 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7317 {
7318 tree type = TREE_TYPE (expr);
7319 enum machine_mode mode = TYPE_MODE (type);
7320 int total_bytes = GET_MODE_SIZE (mode);
7321 FIXED_VALUE_TYPE value;
7322 tree i_value, i_type;
7323
7324 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7325 return 0;
7326
7327 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7328
7329 if (NULL_TREE == i_type
7330 || TYPE_PRECISION (i_type) != total_bytes)
7331 return 0;
7332
7333 value = TREE_FIXED_CST (expr);
7334 i_value = double_int_to_tree (i_type, value.data);
7335
7336 return native_encode_int (i_value, ptr, len);
7337 }
7338
7339
7340 /* Subroutine of native_encode_expr. Encode the REAL_CST
7341 specified by EXPR into the buffer PTR of length LEN bytes.
7342 Return the number of bytes placed in the buffer, or zero
7343 upon failure. */
7344
7345 static int
7346 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7347 {
7348 tree type = TREE_TYPE (expr);
7349 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7350 int byte, offset, word, words, bitpos;
7351 unsigned char value;
7352
7353 /* There are always 32 bits in each long, no matter the size of
7354 the hosts long. We handle floating point representations with
7355 up to 192 bits. */
7356 long tmp[6];
7357
7358 if (total_bytes > len)
7359 return 0;
7360 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7361
7362 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7363
7364 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7365 bitpos += BITS_PER_UNIT)
7366 {
7367 byte = (bitpos / BITS_PER_UNIT) & 3;
7368 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7369
7370 if (UNITS_PER_WORD < 4)
7371 {
7372 word = byte / UNITS_PER_WORD;
7373 if (WORDS_BIG_ENDIAN)
7374 word = (words - 1) - word;
7375 offset = word * UNITS_PER_WORD;
7376 if (BYTES_BIG_ENDIAN)
7377 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7378 else
7379 offset += byte % UNITS_PER_WORD;
7380 }
7381 else
7382 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7383 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7384 }
7385 return total_bytes;
7386 }
7387
7388 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7389 specified by EXPR into the buffer PTR of length LEN bytes.
7390 Return the number of bytes placed in the buffer, or zero
7391 upon failure. */
7392
7393 static int
7394 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7395 {
7396 int rsize, isize;
7397 tree part;
7398
7399 part = TREE_REALPART (expr);
7400 rsize = native_encode_expr (part, ptr, len);
7401 if (rsize == 0)
7402 return 0;
7403 part = TREE_IMAGPART (expr);
7404 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7405 if (isize != rsize)
7406 return 0;
7407 return rsize + isize;
7408 }
7409
7410
7411 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7412 specified by EXPR into the buffer PTR of length LEN bytes.
7413 Return the number of bytes placed in the buffer, or zero
7414 upon failure. */
7415
7416 static int
7417 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7418 {
7419 unsigned i, count;
7420 int size, offset;
7421 tree itype, elem;
7422
7423 offset = 0;
7424 count = VECTOR_CST_NELTS (expr);
7425 itype = TREE_TYPE (TREE_TYPE (expr));
7426 size = GET_MODE_SIZE (TYPE_MODE (itype));
7427 for (i = 0; i < count; i++)
7428 {
7429 elem = VECTOR_CST_ELT (expr, i);
7430 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7431 return 0;
7432 offset += size;
7433 }
7434 return offset;
7435 }
7436
7437
7438 /* Subroutine of native_encode_expr. Encode the STRING_CST
7439 specified by EXPR into the buffer PTR of length LEN bytes.
7440 Return the number of bytes placed in the buffer, or zero
7441 upon failure. */
7442
7443 static int
7444 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7445 {
7446 tree type = TREE_TYPE (expr);
7447 HOST_WIDE_INT total_bytes;
7448
7449 if (TREE_CODE (type) != ARRAY_TYPE
7450 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7451 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7452 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7453 return 0;
7454 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7455 if (total_bytes > len)
7456 return 0;
7457 if (TREE_STRING_LENGTH (expr) < total_bytes)
7458 {
7459 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7460 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7461 total_bytes - TREE_STRING_LENGTH (expr));
7462 }
7463 else
7464 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7465 return total_bytes;
7466 }
7467
7468
7469 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7470 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7471 buffer PTR of length LEN bytes. Return the number of bytes
7472 placed in the buffer, or zero upon failure. */
7473
7474 int
7475 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7476 {
7477 switch (TREE_CODE (expr))
7478 {
7479 case INTEGER_CST:
7480 return native_encode_int (expr, ptr, len);
7481
7482 case REAL_CST:
7483 return native_encode_real (expr, ptr, len);
7484
7485 case FIXED_CST:
7486 return native_encode_fixed (expr, ptr, len);
7487
7488 case COMPLEX_CST:
7489 return native_encode_complex (expr, ptr, len);
7490
7491 case VECTOR_CST:
7492 return native_encode_vector (expr, ptr, len);
7493
7494 case STRING_CST:
7495 return native_encode_string (expr, ptr, len);
7496
7497 default:
7498 return 0;
7499 }
7500 }
7501
7502
7503 /* Subroutine of native_interpret_expr. Interpret the contents of
7504 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7505 If the buffer cannot be interpreted, return NULL_TREE. */
7506
7507 static tree
7508 native_interpret_int (tree type, const unsigned char *ptr, int len)
7509 {
7510 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7511 wide_int result;
7512
7513 if (total_bytes > len
7514 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7515 return NULL_TREE;
7516
7517 result = wi::from_buffer (ptr, total_bytes);
7518
7519 return wide_int_to_tree (type, result);
7520 }
7521
7522
7523 /* Subroutine of native_interpret_expr. Interpret the contents of
7524 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7525 If the buffer cannot be interpreted, return NULL_TREE. */
7526
7527 static tree
7528 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7529 {
7530 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7531 double_int result;
7532 FIXED_VALUE_TYPE fixed_value;
7533
7534 if (total_bytes > len
7535 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7536 return NULL_TREE;
7537
7538 result = double_int::from_buffer (ptr, total_bytes);
7539 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7540
7541 return build_fixed (type, fixed_value);
7542 }
7543
7544
7545 /* Subroutine of native_interpret_expr. Interpret the contents of
7546 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7547 If the buffer cannot be interpreted, return NULL_TREE. */
7548
7549 static tree
7550 native_interpret_real (tree type, const unsigned char *ptr, int len)
7551 {
7552 enum machine_mode mode = TYPE_MODE (type);
7553 int total_bytes = GET_MODE_SIZE (mode);
7554 int byte, offset, word, words, bitpos;
7555 unsigned char value;
7556 /* There are always 32 bits in each long, no matter the size of
7557 the hosts long. We handle floating point representations with
7558 up to 192 bits. */
7559 REAL_VALUE_TYPE r;
7560 long tmp[6];
7561
7562 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7563 if (total_bytes > len || total_bytes > 24)
7564 return NULL_TREE;
7565 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7566
7567 memset (tmp, 0, sizeof (tmp));
7568 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7569 bitpos += BITS_PER_UNIT)
7570 {
7571 byte = (bitpos / BITS_PER_UNIT) & 3;
7572 if (UNITS_PER_WORD < 4)
7573 {
7574 word = byte / UNITS_PER_WORD;
7575 if (WORDS_BIG_ENDIAN)
7576 word = (words - 1) - word;
7577 offset = word * UNITS_PER_WORD;
7578 if (BYTES_BIG_ENDIAN)
7579 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7580 else
7581 offset += byte % UNITS_PER_WORD;
7582 }
7583 else
7584 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7585 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7586
7587 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7588 }
7589
7590 real_from_target (&r, tmp, mode);
7591 return build_real (type, r);
7592 }
7593
7594
7595 /* Subroutine of native_interpret_expr. Interpret the contents of
7596 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7597 If the buffer cannot be interpreted, return NULL_TREE. */
7598
7599 static tree
7600 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7601 {
7602 tree etype, rpart, ipart;
7603 int size;
7604
7605 etype = TREE_TYPE (type);
7606 size = GET_MODE_SIZE (TYPE_MODE (etype));
7607 if (size * 2 > len)
7608 return NULL_TREE;
7609 rpart = native_interpret_expr (etype, ptr, size);
7610 if (!rpart)
7611 return NULL_TREE;
7612 ipart = native_interpret_expr (etype, ptr+size, size);
7613 if (!ipart)
7614 return NULL_TREE;
7615 return build_complex (type, rpart, ipart);
7616 }
7617
7618
7619 /* Subroutine of native_interpret_expr. Interpret the contents of
7620 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7621 If the buffer cannot be interpreted, return NULL_TREE. */
7622
7623 static tree
7624 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7625 {
7626 tree etype, elem;
7627 int i, size, count;
7628 tree *elements;
7629
7630 etype = TREE_TYPE (type);
7631 size = GET_MODE_SIZE (TYPE_MODE (etype));
7632 count = TYPE_VECTOR_SUBPARTS (type);
7633 if (size * count > len)
7634 return NULL_TREE;
7635
7636 elements = XALLOCAVEC (tree, count);
7637 for (i = count - 1; i >= 0; i--)
7638 {
7639 elem = native_interpret_expr (etype, ptr+(i*size), size);
7640 if (!elem)
7641 return NULL_TREE;
7642 elements[i] = elem;
7643 }
7644 return build_vector (type, elements);
7645 }
7646
7647
7648 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7649 the buffer PTR of length LEN as a constant of type TYPE. For
7650 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7651 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7652 return NULL_TREE. */
7653
7654 tree
7655 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7656 {
7657 switch (TREE_CODE (type))
7658 {
7659 case INTEGER_TYPE:
7660 case ENUMERAL_TYPE:
7661 case BOOLEAN_TYPE:
7662 case POINTER_TYPE:
7663 case REFERENCE_TYPE:
7664 return native_interpret_int (type, ptr, len);
7665
7666 case REAL_TYPE:
7667 return native_interpret_real (type, ptr, len);
7668
7669 case FIXED_POINT_TYPE:
7670 return native_interpret_fixed (type, ptr, len);
7671
7672 case COMPLEX_TYPE:
7673 return native_interpret_complex (type, ptr, len);
7674
7675 case VECTOR_TYPE:
7676 return native_interpret_vector (type, ptr, len);
7677
7678 default:
7679 return NULL_TREE;
7680 }
7681 }
7682
7683 /* Returns true if we can interpret the contents of a native encoding
7684 as TYPE. */
7685
7686 static bool
7687 can_native_interpret_type_p (tree type)
7688 {
7689 switch (TREE_CODE (type))
7690 {
7691 case INTEGER_TYPE:
7692 case ENUMERAL_TYPE:
7693 case BOOLEAN_TYPE:
7694 case POINTER_TYPE:
7695 case REFERENCE_TYPE:
7696 case FIXED_POINT_TYPE:
7697 case REAL_TYPE:
7698 case COMPLEX_TYPE:
7699 case VECTOR_TYPE:
7700 return true;
7701 default:
7702 return false;
7703 }
7704 }
7705
7706 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7707 TYPE at compile-time. If we're unable to perform the conversion
7708 return NULL_TREE. */
7709
7710 static tree
7711 fold_view_convert_expr (tree type, tree expr)
7712 {
7713 /* We support up to 512-bit values (for V8DFmode). */
7714 unsigned char buffer[64];
7715 int len;
7716
7717 /* Check that the host and target are sane. */
7718 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7719 return NULL_TREE;
7720
7721 len = native_encode_expr (expr, buffer, sizeof (buffer));
7722 if (len == 0)
7723 return NULL_TREE;
7724
7725 return native_interpret_expr (type, buffer, len);
7726 }
7727
7728 /* Build an expression for the address of T. Folds away INDIRECT_REF
7729 to avoid confusing the gimplify process. */
7730
7731 tree
7732 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7733 {
7734 /* The size of the object is not relevant when talking about its address. */
7735 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7736 t = TREE_OPERAND (t, 0);
7737
7738 if (TREE_CODE (t) == INDIRECT_REF)
7739 {
7740 t = TREE_OPERAND (t, 0);
7741
7742 if (TREE_TYPE (t) != ptrtype)
7743 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7744 }
7745 else if (TREE_CODE (t) == MEM_REF
7746 && integer_zerop (TREE_OPERAND (t, 1)))
7747 return TREE_OPERAND (t, 0);
7748 else if (TREE_CODE (t) == MEM_REF
7749 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7750 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7751 TREE_OPERAND (t, 0),
7752 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7753 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7754 {
7755 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7756
7757 if (TREE_TYPE (t) != ptrtype)
7758 t = fold_convert_loc (loc, ptrtype, t);
7759 }
7760 else
7761 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7762
7763 return t;
7764 }
7765
7766 /* Build an expression for the address of T. */
7767
7768 tree
7769 build_fold_addr_expr_loc (location_t loc, tree t)
7770 {
7771 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7772
7773 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7774 }
7775
7776 static bool vec_cst_ctor_to_array (tree, tree *);
7777
7778 /* Fold a unary expression of code CODE and type TYPE with operand
7779 OP0. Return the folded expression if folding is successful.
7780 Otherwise, return NULL_TREE. */
7781
7782 tree
7783 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7784 {
7785 tree tem;
7786 tree arg0;
7787 enum tree_code_class kind = TREE_CODE_CLASS (code);
7788
7789 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7790 && TREE_CODE_LENGTH (code) == 1);
7791
7792 arg0 = op0;
7793 if (arg0)
7794 {
7795 if (CONVERT_EXPR_CODE_P (code)
7796 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7797 {
7798 /* Don't use STRIP_NOPS, because signedness of argument type
7799 matters. */
7800 STRIP_SIGN_NOPS (arg0);
7801 }
7802 else
7803 {
7804 /* Strip any conversions that don't change the mode. This
7805 is safe for every expression, except for a comparison
7806 expression because its signedness is derived from its
7807 operands.
7808
7809 Note that this is done as an internal manipulation within
7810 the constant folder, in order to find the simplest
7811 representation of the arguments so that their form can be
7812 studied. In any cases, the appropriate type conversions
7813 should be put back in the tree that will get out of the
7814 constant folder. */
7815 STRIP_NOPS (arg0);
7816 }
7817 }
7818
7819 if (TREE_CODE_CLASS (code) == tcc_unary)
7820 {
7821 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7822 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7823 fold_build1_loc (loc, code, type,
7824 fold_convert_loc (loc, TREE_TYPE (op0),
7825 TREE_OPERAND (arg0, 1))));
7826 else if (TREE_CODE (arg0) == COND_EXPR)
7827 {
7828 tree arg01 = TREE_OPERAND (arg0, 1);
7829 tree arg02 = TREE_OPERAND (arg0, 2);
7830 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7831 arg01 = fold_build1_loc (loc, code, type,
7832 fold_convert_loc (loc,
7833 TREE_TYPE (op0), arg01));
7834 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7835 arg02 = fold_build1_loc (loc, code, type,
7836 fold_convert_loc (loc,
7837 TREE_TYPE (op0), arg02));
7838 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7839 arg01, arg02);
7840
7841 /* If this was a conversion, and all we did was to move into
7842 inside the COND_EXPR, bring it back out. But leave it if
7843 it is a conversion from integer to integer and the
7844 result precision is no wider than a word since such a
7845 conversion is cheap and may be optimized away by combine,
7846 while it couldn't if it were outside the COND_EXPR. Then return
7847 so we don't get into an infinite recursion loop taking the
7848 conversion out and then back in. */
7849
7850 if ((CONVERT_EXPR_CODE_P (code)
7851 || code == NON_LVALUE_EXPR)
7852 && TREE_CODE (tem) == COND_EXPR
7853 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7854 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7855 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7856 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7857 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7858 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7859 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7860 && (INTEGRAL_TYPE_P
7861 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7862 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7863 || flag_syntax_only))
7864 tem = build1_loc (loc, code, type,
7865 build3 (COND_EXPR,
7866 TREE_TYPE (TREE_OPERAND
7867 (TREE_OPERAND (tem, 1), 0)),
7868 TREE_OPERAND (tem, 0),
7869 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7870 TREE_OPERAND (TREE_OPERAND (tem, 2),
7871 0)));
7872 return tem;
7873 }
7874 }
7875
7876 switch (code)
7877 {
7878 case PAREN_EXPR:
7879 /* Re-association barriers around constants and other re-association
7880 barriers can be removed. */
7881 if (CONSTANT_CLASS_P (op0)
7882 || TREE_CODE (op0) == PAREN_EXPR)
7883 return fold_convert_loc (loc, type, op0);
7884 return NULL_TREE;
7885
7886 CASE_CONVERT:
7887 case FLOAT_EXPR:
7888 case FIX_TRUNC_EXPR:
7889 if (TREE_TYPE (op0) == type)
7890 return op0;
7891
7892 if (COMPARISON_CLASS_P (op0))
7893 {
7894 /* If we have (type) (a CMP b) and type is an integral type, return
7895 new expression involving the new type. Canonicalize
7896 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7897 non-integral type.
7898 Do not fold the result as that would not simplify further, also
7899 folding again results in recursions. */
7900 if (TREE_CODE (type) == BOOLEAN_TYPE)
7901 return build2_loc (loc, TREE_CODE (op0), type,
7902 TREE_OPERAND (op0, 0),
7903 TREE_OPERAND (op0, 1));
7904 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7905 && TREE_CODE (type) != VECTOR_TYPE)
7906 return build3_loc (loc, COND_EXPR, type, op0,
7907 constant_boolean_node (true, type),
7908 constant_boolean_node (false, type));
7909 }
7910
7911 /* Handle cases of two conversions in a row. */
7912 if (CONVERT_EXPR_P (op0))
7913 {
7914 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7915 tree inter_type = TREE_TYPE (op0);
7916 int inside_int = INTEGRAL_TYPE_P (inside_type);
7917 int inside_ptr = POINTER_TYPE_P (inside_type);
7918 int inside_float = FLOAT_TYPE_P (inside_type);
7919 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7920 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7921 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7922 int inter_int = INTEGRAL_TYPE_P (inter_type);
7923 int inter_ptr = POINTER_TYPE_P (inter_type);
7924 int inter_float = FLOAT_TYPE_P (inter_type);
7925 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7926 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7927 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7928 int final_int = INTEGRAL_TYPE_P (type);
7929 int final_ptr = POINTER_TYPE_P (type);
7930 int final_float = FLOAT_TYPE_P (type);
7931 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7932 unsigned int final_prec = TYPE_PRECISION (type);
7933 int final_unsignedp = TYPE_UNSIGNED (type);
7934
7935 /* In addition to the cases of two conversions in a row
7936 handled below, if we are converting something to its own
7937 type via an object of identical or wider precision, neither
7938 conversion is needed. */
7939 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7940 && (((inter_int || inter_ptr) && final_int)
7941 || (inter_float && final_float))
7942 && inter_prec >= final_prec)
7943 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7944
7945 /* Likewise, if the intermediate and initial types are either both
7946 float or both integer, we don't need the middle conversion if the
7947 former is wider than the latter and doesn't change the signedness
7948 (for integers). Avoid this if the final type is a pointer since
7949 then we sometimes need the middle conversion. Likewise if the
7950 final type has a precision not equal to the size of its mode. */
7951 if (((inter_int && inside_int)
7952 || (inter_float && inside_float)
7953 || (inter_vec && inside_vec))
7954 && inter_prec >= inside_prec
7955 && (inter_float || inter_vec
7956 || inter_unsignedp == inside_unsignedp)
7957 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7958 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7959 && ! final_ptr
7960 && (! final_vec || inter_prec == inside_prec))
7961 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7962
7963 /* If we have a sign-extension of a zero-extended value, we can
7964 replace that by a single zero-extension. Likewise if the
7965 final conversion does not change precision we can drop the
7966 intermediate conversion. */
7967 if (inside_int && inter_int && final_int
7968 && ((inside_prec < inter_prec && inter_prec < final_prec
7969 && inside_unsignedp && !inter_unsignedp)
7970 || final_prec == inter_prec))
7971 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7972
7973 /* Two conversions in a row are not needed unless:
7974 - some conversion is floating-point (overstrict for now), or
7975 - some conversion is a vector (overstrict for now), or
7976 - the intermediate type is narrower than both initial and
7977 final, or
7978 - the intermediate type and innermost type differ in signedness,
7979 and the outermost type is wider than the intermediate, or
7980 - the initial type is a pointer type and the precisions of the
7981 intermediate and final types differ, or
7982 - the final type is a pointer type and the precisions of the
7983 initial and intermediate types differ. */
7984 if (! inside_float && ! inter_float && ! final_float
7985 && ! inside_vec && ! inter_vec && ! final_vec
7986 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7987 && ! (inside_int && inter_int
7988 && inter_unsignedp != inside_unsignedp
7989 && inter_prec < final_prec)
7990 && ((inter_unsignedp && inter_prec > inside_prec)
7991 == (final_unsignedp && final_prec > inter_prec))
7992 && ! (inside_ptr && inter_prec != final_prec)
7993 && ! (final_ptr && inside_prec != inter_prec)
7994 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7995 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7996 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7997 }
7998
7999 /* Handle (T *)&A.B.C for A being of type T and B and C
8000 living at offset zero. This occurs frequently in
8001 C++ upcasting and then accessing the base. */
8002 if (TREE_CODE (op0) == ADDR_EXPR
8003 && POINTER_TYPE_P (type)
8004 && handled_component_p (TREE_OPERAND (op0, 0)))
8005 {
8006 HOST_WIDE_INT bitsize, bitpos;
8007 tree offset;
8008 enum machine_mode mode;
8009 int unsignedp, volatilep;
8010 tree base = TREE_OPERAND (op0, 0);
8011 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8012 &mode, &unsignedp, &volatilep, false);
8013 /* If the reference was to a (constant) zero offset, we can use
8014 the address of the base if it has the same base type
8015 as the result type and the pointer type is unqualified. */
8016 if (! offset && bitpos == 0
8017 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8018 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8019 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8020 return fold_convert_loc (loc, type,
8021 build_fold_addr_expr_loc (loc, base));
8022 }
8023
8024 if (TREE_CODE (op0) == MODIFY_EXPR
8025 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8026 /* Detect assigning a bitfield. */
8027 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8028 && DECL_BIT_FIELD
8029 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8030 {
8031 /* Don't leave an assignment inside a conversion
8032 unless assigning a bitfield. */
8033 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8034 /* First do the assignment, then return converted constant. */
8035 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8036 TREE_NO_WARNING (tem) = 1;
8037 TREE_USED (tem) = 1;
8038 return tem;
8039 }
8040
8041 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8042 constants (if x has signed type, the sign bit cannot be set
8043 in c). This folds extension into the BIT_AND_EXPR.
8044 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8045 very likely don't have maximal range for their precision and this
8046 transformation effectively doesn't preserve non-maximal ranges. */
8047 if (TREE_CODE (type) == INTEGER_TYPE
8048 && TREE_CODE (op0) == BIT_AND_EXPR
8049 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8050 {
8051 tree and_expr = op0;
8052 tree and0 = TREE_OPERAND (and_expr, 0);
8053 tree and1 = TREE_OPERAND (and_expr, 1);
8054 int change = 0;
8055
8056 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8057 || (TYPE_PRECISION (type)
8058 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8059 change = 1;
8060 else if (TYPE_PRECISION (TREE_TYPE (and1))
8061 <= HOST_BITS_PER_WIDE_INT
8062 && tree_fits_uhwi_p (and1))
8063 {
8064 unsigned HOST_WIDE_INT cst;
8065
8066 cst = tree_to_uhwi (and1);
8067 cst &= HOST_WIDE_INT_M1U
8068 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8069 change = (cst == 0);
8070 #ifdef LOAD_EXTEND_OP
8071 if (change
8072 && !flag_syntax_only
8073 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8074 == ZERO_EXTEND))
8075 {
8076 tree uns = unsigned_type_for (TREE_TYPE (and0));
8077 and0 = fold_convert_loc (loc, uns, and0);
8078 and1 = fold_convert_loc (loc, uns, and1);
8079 }
8080 #endif
8081 }
8082 if (change)
8083 {
8084 tem = force_fit_type (type, and1, 0, TREE_OVERFLOW (and1));
8085 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8086 fold_convert_loc (loc, type, and0), tem);
8087 }
8088 }
8089
8090 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8091 when one of the new casts will fold away. Conservatively we assume
8092 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8093 if (POINTER_TYPE_P (type)
8094 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8095 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8096 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8097 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8098 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8099 {
8100 tree arg00 = TREE_OPERAND (arg0, 0);
8101 tree arg01 = TREE_OPERAND (arg0, 1);
8102
8103 return fold_build_pointer_plus_loc
8104 (loc, fold_convert_loc (loc, type, arg00), arg01);
8105 }
8106
8107 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8108 of the same precision, and X is an integer type not narrower than
8109 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8110 if (INTEGRAL_TYPE_P (type)
8111 && TREE_CODE (op0) == BIT_NOT_EXPR
8112 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8113 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8114 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8115 {
8116 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8117 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8118 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8119 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8120 fold_convert_loc (loc, type, tem));
8121 }
8122
8123 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8124 type of X and Y (integer types only). */
8125 if (INTEGRAL_TYPE_P (type)
8126 && TREE_CODE (op0) == MULT_EXPR
8127 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8128 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8129 {
8130 /* Be careful not to introduce new overflows. */
8131 tree mult_type;
8132 if (TYPE_OVERFLOW_WRAPS (type))
8133 mult_type = type;
8134 else
8135 mult_type = unsigned_type_for (type);
8136
8137 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8138 {
8139 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8140 fold_convert_loc (loc, mult_type,
8141 TREE_OPERAND (op0, 0)),
8142 fold_convert_loc (loc, mult_type,
8143 TREE_OPERAND (op0, 1)));
8144 return fold_convert_loc (loc, type, tem);
8145 }
8146 }
8147
8148 tem = fold_convert_const (code, type, op0);
8149 return tem ? tem : NULL_TREE;
8150
8151 case ADDR_SPACE_CONVERT_EXPR:
8152 if (integer_zerop (arg0))
8153 return fold_convert_const (code, type, arg0);
8154 return NULL_TREE;
8155
8156 case FIXED_CONVERT_EXPR:
8157 tem = fold_convert_const (code, type, arg0);
8158 return tem ? tem : NULL_TREE;
8159
8160 case VIEW_CONVERT_EXPR:
8161 if (TREE_TYPE (op0) == type)
8162 return op0;
8163 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8164 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8165 type, TREE_OPERAND (op0, 0));
8166 if (TREE_CODE (op0) == MEM_REF)
8167 return fold_build2_loc (loc, MEM_REF, type,
8168 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8169
8170 /* For integral conversions with the same precision or pointer
8171 conversions use a NOP_EXPR instead. */
8172 if ((INTEGRAL_TYPE_P (type)
8173 || POINTER_TYPE_P (type))
8174 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8175 || POINTER_TYPE_P (TREE_TYPE (op0)))
8176 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8177 return fold_convert_loc (loc, type, op0);
8178
8179 /* Strip inner integral conversions that do not change the precision. */
8180 if (CONVERT_EXPR_P (op0)
8181 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8182 || POINTER_TYPE_P (TREE_TYPE (op0)))
8183 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8184 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8185 && (TYPE_PRECISION (TREE_TYPE (op0))
8186 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8187 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8188 type, TREE_OPERAND (op0, 0));
8189
8190 return fold_view_convert_expr (type, op0);
8191
8192 case NEGATE_EXPR:
8193 tem = fold_negate_expr (loc, arg0);
8194 if (tem)
8195 return fold_convert_loc (loc, type, tem);
8196 return NULL_TREE;
8197
8198 case ABS_EXPR:
8199 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8200 return fold_abs_const (arg0, type);
8201 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8202 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8203 /* Convert fabs((double)float) into (double)fabsf(float). */
8204 else if (TREE_CODE (arg0) == NOP_EXPR
8205 && TREE_CODE (type) == REAL_TYPE)
8206 {
8207 tree targ0 = strip_float_extensions (arg0);
8208 if (targ0 != arg0)
8209 return fold_convert_loc (loc, type,
8210 fold_build1_loc (loc, ABS_EXPR,
8211 TREE_TYPE (targ0),
8212 targ0));
8213 }
8214 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8215 else if (TREE_CODE (arg0) == ABS_EXPR)
8216 return arg0;
8217 else if (tree_expr_nonnegative_p (arg0))
8218 return arg0;
8219
8220 /* Strip sign ops from argument. */
8221 if (TREE_CODE (type) == REAL_TYPE)
8222 {
8223 tem = fold_strip_sign_ops (arg0);
8224 if (tem)
8225 return fold_build1_loc (loc, ABS_EXPR, type,
8226 fold_convert_loc (loc, type, tem));
8227 }
8228 return NULL_TREE;
8229
8230 case CONJ_EXPR:
8231 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8232 return fold_convert_loc (loc, type, arg0);
8233 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8234 {
8235 tree itype = TREE_TYPE (type);
8236 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8237 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8238 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8239 negate_expr (ipart));
8240 }
8241 if (TREE_CODE (arg0) == COMPLEX_CST)
8242 {
8243 tree itype = TREE_TYPE (type);
8244 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8245 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8246 return build_complex (type, rpart, negate_expr (ipart));
8247 }
8248 if (TREE_CODE (arg0) == CONJ_EXPR)
8249 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8250 return NULL_TREE;
8251
8252 case BIT_NOT_EXPR:
8253 if (TREE_CODE (arg0) == INTEGER_CST)
8254 return fold_not_const (arg0, type);
8255 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8256 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8257 /* Convert ~ (-A) to A - 1. */
8258 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8259 return fold_build2_loc (loc, MINUS_EXPR, type,
8260 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8261 build_int_cst (type, 1));
8262 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8263 else if (INTEGRAL_TYPE_P (type)
8264 && ((TREE_CODE (arg0) == MINUS_EXPR
8265 && integer_onep (TREE_OPERAND (arg0, 1)))
8266 || (TREE_CODE (arg0) == PLUS_EXPR
8267 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8268 return fold_build1_loc (loc, NEGATE_EXPR, type,
8269 fold_convert_loc (loc, type,
8270 TREE_OPERAND (arg0, 0)));
8271 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8272 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8273 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8274 fold_convert_loc (loc, type,
8275 TREE_OPERAND (arg0, 0)))))
8276 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8277 fold_convert_loc (loc, type,
8278 TREE_OPERAND (arg0, 1)));
8279 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8280 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8281 fold_convert_loc (loc, type,
8282 TREE_OPERAND (arg0, 1)))))
8283 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8284 fold_convert_loc (loc, type,
8285 TREE_OPERAND (arg0, 0)), tem);
8286 /* Perform BIT_NOT_EXPR on each element individually. */
8287 else if (TREE_CODE (arg0) == VECTOR_CST)
8288 {
8289 tree *elements;
8290 tree elem;
8291 unsigned count = VECTOR_CST_NELTS (arg0), i;
8292
8293 elements = XALLOCAVEC (tree, count);
8294 for (i = 0; i < count; i++)
8295 {
8296 elem = VECTOR_CST_ELT (arg0, i);
8297 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8298 if (elem == NULL_TREE)
8299 break;
8300 elements[i] = elem;
8301 }
8302 if (i == count)
8303 return build_vector (type, elements);
8304 }
8305 else if (COMPARISON_CLASS_P (arg0)
8306 && (VECTOR_TYPE_P (type)
8307 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8308 {
8309 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8310 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8311 HONOR_NANS (TYPE_MODE (op_type)));
8312 if (subcode != ERROR_MARK)
8313 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8314 TREE_OPERAND (arg0, 1));
8315 }
8316
8317
8318 return NULL_TREE;
8319
8320 case TRUTH_NOT_EXPR:
8321 /* Note that the operand of this must be an int
8322 and its values must be 0 or 1.
8323 ("true" is a fixed value perhaps depending on the language,
8324 but we don't handle values other than 1 correctly yet.) */
8325 tem = fold_truth_not_expr (loc, arg0);
8326 if (!tem)
8327 return NULL_TREE;
8328 return fold_convert_loc (loc, type, tem);
8329
8330 case REALPART_EXPR:
8331 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8332 return fold_convert_loc (loc, type, arg0);
8333 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8334 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8335 TREE_OPERAND (arg0, 1));
8336 if (TREE_CODE (arg0) == COMPLEX_CST)
8337 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8338 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8339 {
8340 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8341 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8342 fold_build1_loc (loc, REALPART_EXPR, itype,
8343 TREE_OPERAND (arg0, 0)),
8344 fold_build1_loc (loc, REALPART_EXPR, itype,
8345 TREE_OPERAND (arg0, 1)));
8346 return fold_convert_loc (loc, type, tem);
8347 }
8348 if (TREE_CODE (arg0) == CONJ_EXPR)
8349 {
8350 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8351 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8352 TREE_OPERAND (arg0, 0));
8353 return fold_convert_loc (loc, type, tem);
8354 }
8355 if (TREE_CODE (arg0) == CALL_EXPR)
8356 {
8357 tree fn = get_callee_fndecl (arg0);
8358 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8359 switch (DECL_FUNCTION_CODE (fn))
8360 {
8361 CASE_FLT_FN (BUILT_IN_CEXPI):
8362 fn = mathfn_built_in (type, BUILT_IN_COS);
8363 if (fn)
8364 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8365 break;
8366
8367 default:
8368 break;
8369 }
8370 }
8371 return NULL_TREE;
8372
8373 case IMAGPART_EXPR:
8374 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8375 return build_zero_cst (type);
8376 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8377 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8378 TREE_OPERAND (arg0, 0));
8379 if (TREE_CODE (arg0) == COMPLEX_CST)
8380 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8381 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8382 {
8383 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8384 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8385 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8386 TREE_OPERAND (arg0, 0)),
8387 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8388 TREE_OPERAND (arg0, 1)));
8389 return fold_convert_loc (loc, type, tem);
8390 }
8391 if (TREE_CODE (arg0) == CONJ_EXPR)
8392 {
8393 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8394 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8395 return fold_convert_loc (loc, type, negate_expr (tem));
8396 }
8397 if (TREE_CODE (arg0) == CALL_EXPR)
8398 {
8399 tree fn = get_callee_fndecl (arg0);
8400 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8401 switch (DECL_FUNCTION_CODE (fn))
8402 {
8403 CASE_FLT_FN (BUILT_IN_CEXPI):
8404 fn = mathfn_built_in (type, BUILT_IN_SIN);
8405 if (fn)
8406 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8407 break;
8408
8409 default:
8410 break;
8411 }
8412 }
8413 return NULL_TREE;
8414
8415 case INDIRECT_REF:
8416 /* Fold *&X to X if X is an lvalue. */
8417 if (TREE_CODE (op0) == ADDR_EXPR)
8418 {
8419 tree op00 = TREE_OPERAND (op0, 0);
8420 if ((TREE_CODE (op00) == VAR_DECL
8421 || TREE_CODE (op00) == PARM_DECL
8422 || TREE_CODE (op00) == RESULT_DECL)
8423 && !TREE_READONLY (op00))
8424 return op00;
8425 }
8426 return NULL_TREE;
8427
8428 case VEC_UNPACK_LO_EXPR:
8429 case VEC_UNPACK_HI_EXPR:
8430 case VEC_UNPACK_FLOAT_LO_EXPR:
8431 case VEC_UNPACK_FLOAT_HI_EXPR:
8432 {
8433 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8434 tree *elts;
8435 enum tree_code subcode;
8436
8437 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8438 if (TREE_CODE (arg0) != VECTOR_CST)
8439 return NULL_TREE;
8440
8441 elts = XALLOCAVEC (tree, nelts * 2);
8442 if (!vec_cst_ctor_to_array (arg0, elts))
8443 return NULL_TREE;
8444
8445 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8446 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8447 elts += nelts;
8448
8449 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8450 subcode = NOP_EXPR;
8451 else
8452 subcode = FLOAT_EXPR;
8453
8454 for (i = 0; i < nelts; i++)
8455 {
8456 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8457 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8458 return NULL_TREE;
8459 }
8460
8461 return build_vector (type, elts);
8462 }
8463
8464 case REDUC_MIN_EXPR:
8465 case REDUC_MAX_EXPR:
8466 case REDUC_PLUS_EXPR:
8467 {
8468 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8469 tree *elts;
8470 enum tree_code subcode;
8471
8472 if (TREE_CODE (op0) != VECTOR_CST)
8473 return NULL_TREE;
8474
8475 elts = XALLOCAVEC (tree, nelts);
8476 if (!vec_cst_ctor_to_array (op0, elts))
8477 return NULL_TREE;
8478
8479 switch (code)
8480 {
8481 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8482 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8483 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8484 default: gcc_unreachable ();
8485 }
8486
8487 for (i = 1; i < nelts; i++)
8488 {
8489 elts[0] = const_binop (subcode, elts[0], elts[i]);
8490 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8491 return NULL_TREE;
8492 elts[i] = build_zero_cst (TREE_TYPE (type));
8493 }
8494
8495 return build_vector (type, elts);
8496 }
8497
8498 default:
8499 return NULL_TREE;
8500 } /* switch (code) */
8501 }
8502
8503
8504 /* If the operation was a conversion do _not_ mark a resulting constant
8505 with TREE_OVERFLOW if the original constant was not. These conversions
8506 have implementation defined behavior and retaining the TREE_OVERFLOW
8507 flag here would confuse later passes such as VRP. */
8508 tree
8509 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8510 tree type, tree op0)
8511 {
8512 tree res = fold_unary_loc (loc, code, type, op0);
8513 if (res
8514 && TREE_CODE (res) == INTEGER_CST
8515 && TREE_CODE (op0) == INTEGER_CST
8516 && CONVERT_EXPR_CODE_P (code))
8517 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8518
8519 return res;
8520 }
8521
8522 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8523 operands OP0 and OP1. LOC is the location of the resulting expression.
8524 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8525 Return the folded expression if folding is successful. Otherwise,
8526 return NULL_TREE. */
8527 static tree
8528 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8529 tree arg0, tree arg1, tree op0, tree op1)
8530 {
8531 tree tem;
8532
8533 /* We only do these simplifications if we are optimizing. */
8534 if (!optimize)
8535 return NULL_TREE;
8536
8537 /* Check for things like (A || B) && (A || C). We can convert this
8538 to A || (B && C). Note that either operator can be any of the four
8539 truth and/or operations and the transformation will still be
8540 valid. Also note that we only care about order for the
8541 ANDIF and ORIF operators. If B contains side effects, this
8542 might change the truth-value of A. */
8543 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8544 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8545 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8546 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8547 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8548 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8549 {
8550 tree a00 = TREE_OPERAND (arg0, 0);
8551 tree a01 = TREE_OPERAND (arg0, 1);
8552 tree a10 = TREE_OPERAND (arg1, 0);
8553 tree a11 = TREE_OPERAND (arg1, 1);
8554 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8555 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8556 && (code == TRUTH_AND_EXPR
8557 || code == TRUTH_OR_EXPR));
8558
8559 if (operand_equal_p (a00, a10, 0))
8560 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8561 fold_build2_loc (loc, code, type, a01, a11));
8562 else if (commutative && operand_equal_p (a00, a11, 0))
8563 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8564 fold_build2_loc (loc, code, type, a01, a10));
8565 else if (commutative && operand_equal_p (a01, a10, 0))
8566 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8567 fold_build2_loc (loc, code, type, a00, a11));
8568
8569 /* This case if tricky because we must either have commutative
8570 operators or else A10 must not have side-effects. */
8571
8572 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8573 && operand_equal_p (a01, a11, 0))
8574 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8575 fold_build2_loc (loc, code, type, a00, a10),
8576 a01);
8577 }
8578
8579 /* See if we can build a range comparison. */
8580 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8581 return tem;
8582
8583 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8584 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8585 {
8586 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8587 if (tem)
8588 return fold_build2_loc (loc, code, type, tem, arg1);
8589 }
8590
8591 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8592 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8593 {
8594 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8595 if (tem)
8596 return fold_build2_loc (loc, code, type, arg0, tem);
8597 }
8598
8599 /* Check for the possibility of merging component references. If our
8600 lhs is another similar operation, try to merge its rhs with our
8601 rhs. Then try to merge our lhs and rhs. */
8602 if (TREE_CODE (arg0) == code
8603 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8604 TREE_OPERAND (arg0, 1), arg1)))
8605 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8606
8607 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8608 return tem;
8609
8610 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8611 && (code == TRUTH_AND_EXPR
8612 || code == TRUTH_ANDIF_EXPR
8613 || code == TRUTH_OR_EXPR
8614 || code == TRUTH_ORIF_EXPR))
8615 {
8616 enum tree_code ncode, icode;
8617
8618 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8619 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8620 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8621
8622 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8623 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8624 We don't want to pack more than two leafs to a non-IF AND/OR
8625 expression.
8626 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8627 equal to IF-CODE, then we don't want to add right-hand operand.
8628 If the inner right-hand side of left-hand operand has
8629 side-effects, or isn't simple, then we can't add to it,
8630 as otherwise we might destroy if-sequence. */
8631 if (TREE_CODE (arg0) == icode
8632 && simple_operand_p_2 (arg1)
8633 /* Needed for sequence points to handle trappings, and
8634 side-effects. */
8635 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8636 {
8637 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8638 arg1);
8639 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8640 tem);
8641 }
8642 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8643 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8644 else if (TREE_CODE (arg1) == icode
8645 && simple_operand_p_2 (arg0)
8646 /* Needed for sequence points to handle trappings, and
8647 side-effects. */
8648 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8649 {
8650 tem = fold_build2_loc (loc, ncode, type,
8651 arg0, TREE_OPERAND (arg1, 0));
8652 return fold_build2_loc (loc, icode, type, tem,
8653 TREE_OPERAND (arg1, 1));
8654 }
8655 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8656 into (A OR B).
8657 For sequence point consistancy, we need to check for trapping,
8658 and side-effects. */
8659 else if (code == icode && simple_operand_p_2 (arg0)
8660 && simple_operand_p_2 (arg1))
8661 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8662 }
8663
8664 return NULL_TREE;
8665 }
8666
8667 /* Fold a binary expression of code CODE and type TYPE with operands
8668 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8669 Return the folded expression if folding is successful. Otherwise,
8670 return NULL_TREE. */
8671
8672 static tree
8673 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8674 {
8675 enum tree_code compl_code;
8676
8677 if (code == MIN_EXPR)
8678 compl_code = MAX_EXPR;
8679 else if (code == MAX_EXPR)
8680 compl_code = MIN_EXPR;
8681 else
8682 gcc_unreachable ();
8683
8684 /* MIN (MAX (a, b), b) == b. */
8685 if (TREE_CODE (op0) == compl_code
8686 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8687 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8688
8689 /* MIN (MAX (b, a), b) == b. */
8690 if (TREE_CODE (op0) == compl_code
8691 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8692 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8693 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8694
8695 /* MIN (a, MAX (a, b)) == a. */
8696 if (TREE_CODE (op1) == compl_code
8697 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8698 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8699 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8700
8701 /* MIN (a, MAX (b, a)) == a. */
8702 if (TREE_CODE (op1) == compl_code
8703 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8704 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8705 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8706
8707 return NULL_TREE;
8708 }
8709
8710 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8711 by changing CODE to reduce the magnitude of constants involved in
8712 ARG0 of the comparison.
8713 Returns a canonicalized comparison tree if a simplification was
8714 possible, otherwise returns NULL_TREE.
8715 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8716 valid if signed overflow is undefined. */
8717
8718 static tree
8719 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8720 tree arg0, tree arg1,
8721 bool *strict_overflow_p)
8722 {
8723 enum tree_code code0 = TREE_CODE (arg0);
8724 tree t, cst0 = NULL_TREE;
8725 int sgn0;
8726 bool swap = false;
8727
8728 /* Match A +- CST code arg1 and CST code arg1. We can change the
8729 first form only if overflow is undefined. */
8730 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8731 /* In principle pointers also have undefined overflow behavior,
8732 but that causes problems elsewhere. */
8733 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8734 && (code0 == MINUS_EXPR
8735 || code0 == PLUS_EXPR)
8736 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8737 || code0 == INTEGER_CST))
8738 return NULL_TREE;
8739
8740 /* Identify the constant in arg0 and its sign. */
8741 if (code0 == INTEGER_CST)
8742 cst0 = arg0;
8743 else
8744 cst0 = TREE_OPERAND (arg0, 1);
8745 sgn0 = tree_int_cst_sgn (cst0);
8746
8747 /* Overflowed constants and zero will cause problems. */
8748 if (integer_zerop (cst0)
8749 || TREE_OVERFLOW (cst0))
8750 return NULL_TREE;
8751
8752 /* See if we can reduce the magnitude of the constant in
8753 arg0 by changing the comparison code. */
8754 if (code0 == INTEGER_CST)
8755 {
8756 /* CST <= arg1 -> CST-1 < arg1. */
8757 if (code == LE_EXPR && sgn0 == 1)
8758 code = LT_EXPR;
8759 /* -CST < arg1 -> -CST-1 <= arg1. */
8760 else if (code == LT_EXPR && sgn0 == -1)
8761 code = LE_EXPR;
8762 /* CST > arg1 -> CST-1 >= arg1. */
8763 else if (code == GT_EXPR && sgn0 == 1)
8764 code = GE_EXPR;
8765 /* -CST >= arg1 -> -CST-1 > arg1. */
8766 else if (code == GE_EXPR && sgn0 == -1)
8767 code = GT_EXPR;
8768 else
8769 return NULL_TREE;
8770 /* arg1 code' CST' might be more canonical. */
8771 swap = true;
8772 }
8773 else
8774 {
8775 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8776 if (code == LT_EXPR
8777 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8778 code = LE_EXPR;
8779 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8780 else if (code == GT_EXPR
8781 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8782 code = GE_EXPR;
8783 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8784 else if (code == LE_EXPR
8785 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8786 code = LT_EXPR;
8787 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8788 else if (code == GE_EXPR
8789 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8790 code = GT_EXPR;
8791 else
8792 return NULL_TREE;
8793 *strict_overflow_p = true;
8794 }
8795
8796 /* Now build the constant reduced in magnitude. But not if that
8797 would produce one outside of its types range. */
8798 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8799 && ((sgn0 == 1
8800 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8801 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8802 || (sgn0 == -1
8803 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8804 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8805 /* We cannot swap the comparison here as that would cause us to
8806 endlessly recurse. */
8807 return NULL_TREE;
8808
8809 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8810 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8811 if (code0 != INTEGER_CST)
8812 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8813 t = fold_convert (TREE_TYPE (arg1), t);
8814
8815 /* If swapping might yield to a more canonical form, do so. */
8816 if (swap)
8817 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8818 else
8819 return fold_build2_loc (loc, code, type, t, arg1);
8820 }
8821
8822 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8823 overflow further. Try to decrease the magnitude of constants involved
8824 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8825 and put sole constants at the second argument position.
8826 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8827
8828 static tree
8829 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8830 tree arg0, tree arg1)
8831 {
8832 tree t;
8833 bool strict_overflow_p;
8834 const char * const warnmsg = G_("assuming signed overflow does not occur "
8835 "when reducing constant in comparison");
8836
8837 /* Try canonicalization by simplifying arg0. */
8838 strict_overflow_p = false;
8839 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8840 &strict_overflow_p);
8841 if (t)
8842 {
8843 if (strict_overflow_p)
8844 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8845 return t;
8846 }
8847
8848 /* Try canonicalization by simplifying arg1 using the swapped
8849 comparison. */
8850 code = swap_tree_comparison (code);
8851 strict_overflow_p = false;
8852 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8853 &strict_overflow_p);
8854 if (t && strict_overflow_p)
8855 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8856 return t;
8857 }
8858
8859 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8860 space. This is used to avoid issuing overflow warnings for
8861 expressions like &p->x which can not wrap. */
8862
8863 static bool
8864 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8865 {
8866 wide_int wi_offset, total;
8867
8868 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8869 return true;
8870
8871 if (bitpos < 0)
8872 return true;
8873
8874 int precision = TYPE_PRECISION (TREE_TYPE (base));
8875 if (offset == NULL_TREE)
8876 wi_offset = wi::zero (precision);
8877 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8878 return true;
8879 else
8880 wi_offset = offset;
8881
8882 bool overflow;
8883 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8884 total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8885 if (overflow)
8886 return true;
8887
8888 if (!wi::fits_uhwi_p (total))
8889 return true;
8890
8891 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8892 if (size <= 0)
8893 return true;
8894
8895 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8896 array. */
8897 if (TREE_CODE (base) == ADDR_EXPR)
8898 {
8899 HOST_WIDE_INT base_size;
8900
8901 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8902 if (base_size > 0 && size < base_size)
8903 size = base_size;
8904 }
8905
8906 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8907 }
8908
8909 /* Subroutine of fold_binary. This routine performs all of the
8910 transformations that are common to the equality/inequality
8911 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8912 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8913 fold_binary should call fold_binary. Fold a comparison with
8914 tree code CODE and type TYPE with operands OP0 and OP1. Return
8915 the folded comparison or NULL_TREE. */
8916
8917 static tree
8918 fold_comparison (location_t loc, enum tree_code code, tree type,
8919 tree op0, tree op1)
8920 {
8921 tree arg0, arg1, tem;
8922
8923 arg0 = op0;
8924 arg1 = op1;
8925
8926 STRIP_SIGN_NOPS (arg0);
8927 STRIP_SIGN_NOPS (arg1);
8928
8929 tem = fold_relational_const (code, type, arg0, arg1);
8930 if (tem != NULL_TREE)
8931 return tem;
8932
8933 /* If one arg is a real or integer constant, put it last. */
8934 if (tree_swap_operands_p (arg0, arg1, true))
8935 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8936
8937 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8938 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8939 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8940 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8941 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8942 && (TREE_CODE (arg1) == INTEGER_CST
8943 && !TREE_OVERFLOW (arg1)))
8944 {
8945 tree const1 = TREE_OPERAND (arg0, 1);
8946 tree const2 = arg1;
8947 tree variable = TREE_OPERAND (arg0, 0);
8948 tree lhs;
8949 int lhs_add;
8950 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8951
8952 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8953 TREE_TYPE (arg1), const2, const1);
8954
8955 /* If the constant operation overflowed this can be
8956 simplified as a comparison against INT_MAX/INT_MIN. */
8957 if (TREE_CODE (lhs) == INTEGER_CST
8958 && TREE_OVERFLOW (lhs))
8959 {
8960 int const1_sgn = tree_int_cst_sgn (const1);
8961 enum tree_code code2 = code;
8962
8963 /* Get the sign of the constant on the lhs if the
8964 operation were VARIABLE + CONST1. */
8965 if (TREE_CODE (arg0) == MINUS_EXPR)
8966 const1_sgn = -const1_sgn;
8967
8968 /* The sign of the constant determines if we overflowed
8969 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8970 Canonicalize to the INT_MIN overflow by swapping the comparison
8971 if necessary. */
8972 if (const1_sgn == -1)
8973 code2 = swap_tree_comparison (code);
8974
8975 /* We now can look at the canonicalized case
8976 VARIABLE + 1 CODE2 INT_MIN
8977 and decide on the result. */
8978 if (code2 == LT_EXPR
8979 || code2 == LE_EXPR
8980 || code2 == EQ_EXPR)
8981 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8982 else if (code2 == NE_EXPR
8983 || code2 == GE_EXPR
8984 || code2 == GT_EXPR)
8985 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8986 }
8987
8988 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8989 && (TREE_CODE (lhs) != INTEGER_CST
8990 || !TREE_OVERFLOW (lhs)))
8991 {
8992 if (code != EQ_EXPR && code != NE_EXPR)
8993 fold_overflow_warning ("assuming signed overflow does not occur "
8994 "when changing X +- C1 cmp C2 to "
8995 "X cmp C1 +- C2",
8996 WARN_STRICT_OVERFLOW_COMPARISON);
8997 return fold_build2_loc (loc, code, type, variable, lhs);
8998 }
8999 }
9000
9001 /* For comparisons of pointers we can decompose it to a compile time
9002 comparison of the base objects and the offsets into the object.
9003 This requires at least one operand being an ADDR_EXPR or a
9004 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9005 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9006 && (TREE_CODE (arg0) == ADDR_EXPR
9007 || TREE_CODE (arg1) == ADDR_EXPR
9008 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9009 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9010 {
9011 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9012 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9013 enum machine_mode mode;
9014 int volatilep, unsignedp;
9015 bool indirect_base0 = false, indirect_base1 = false;
9016
9017 /* Get base and offset for the access. Strip ADDR_EXPR for
9018 get_inner_reference, but put it back by stripping INDIRECT_REF
9019 off the base object if possible. indirect_baseN will be true
9020 if baseN is not an address but refers to the object itself. */
9021 base0 = arg0;
9022 if (TREE_CODE (arg0) == ADDR_EXPR)
9023 {
9024 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9025 &bitsize, &bitpos0, &offset0, &mode,
9026 &unsignedp, &volatilep, false);
9027 if (TREE_CODE (base0) == INDIRECT_REF)
9028 base0 = TREE_OPERAND (base0, 0);
9029 else
9030 indirect_base0 = true;
9031 }
9032 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9033 {
9034 base0 = TREE_OPERAND (arg0, 0);
9035 STRIP_SIGN_NOPS (base0);
9036 if (TREE_CODE (base0) == ADDR_EXPR)
9037 {
9038 base0 = TREE_OPERAND (base0, 0);
9039 indirect_base0 = true;
9040 }
9041 offset0 = TREE_OPERAND (arg0, 1);
9042 if (tree_fits_shwi_p (offset0))
9043 {
9044 HOST_WIDE_INT off = size_low_cst (offset0);
9045 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9046 * BITS_PER_UNIT)
9047 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9048 {
9049 bitpos0 = off * BITS_PER_UNIT;
9050 offset0 = NULL_TREE;
9051 }
9052 }
9053 }
9054
9055 base1 = arg1;
9056 if (TREE_CODE (arg1) == ADDR_EXPR)
9057 {
9058 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9059 &bitsize, &bitpos1, &offset1, &mode,
9060 &unsignedp, &volatilep, false);
9061 if (TREE_CODE (base1) == INDIRECT_REF)
9062 base1 = TREE_OPERAND (base1, 0);
9063 else
9064 indirect_base1 = true;
9065 }
9066 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9067 {
9068 base1 = TREE_OPERAND (arg1, 0);
9069 STRIP_SIGN_NOPS (base1);
9070 if (TREE_CODE (base1) == ADDR_EXPR)
9071 {
9072 base1 = TREE_OPERAND (base1, 0);
9073 indirect_base1 = true;
9074 }
9075 offset1 = TREE_OPERAND (arg1, 1);
9076 if (tree_fits_shwi_p (offset1))
9077 {
9078 HOST_WIDE_INT off = size_low_cst (offset1);
9079 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9080 * BITS_PER_UNIT)
9081 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9082 {
9083 bitpos1 = off * BITS_PER_UNIT;
9084 offset1 = NULL_TREE;
9085 }
9086 }
9087 }
9088
9089 /* A local variable can never be pointed to by
9090 the default SSA name of an incoming parameter. */
9091 if ((TREE_CODE (arg0) == ADDR_EXPR
9092 && indirect_base0
9093 && TREE_CODE (base0) == VAR_DECL
9094 && auto_var_in_fn_p (base0, current_function_decl)
9095 && !indirect_base1
9096 && TREE_CODE (base1) == SSA_NAME
9097 && SSA_NAME_IS_DEFAULT_DEF (base1)
9098 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9099 || (TREE_CODE (arg1) == ADDR_EXPR
9100 && indirect_base1
9101 && TREE_CODE (base1) == VAR_DECL
9102 && auto_var_in_fn_p (base1, current_function_decl)
9103 && !indirect_base0
9104 && TREE_CODE (base0) == SSA_NAME
9105 && SSA_NAME_IS_DEFAULT_DEF (base0)
9106 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9107 {
9108 if (code == NE_EXPR)
9109 return constant_boolean_node (1, type);
9110 else if (code == EQ_EXPR)
9111 return constant_boolean_node (0, type);
9112 }
9113 /* If we have equivalent bases we might be able to simplify. */
9114 else if (indirect_base0 == indirect_base1
9115 && operand_equal_p (base0, base1, 0))
9116 {
9117 /* We can fold this expression to a constant if the non-constant
9118 offset parts are equal. */
9119 if ((offset0 == offset1
9120 || (offset0 && offset1
9121 && operand_equal_p (offset0, offset1, 0)))
9122 && (code == EQ_EXPR
9123 || code == NE_EXPR
9124 || (indirect_base0 && DECL_P (base0))
9125 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9126
9127 {
9128 if (code != EQ_EXPR
9129 && code != NE_EXPR
9130 && bitpos0 != bitpos1
9131 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9132 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9133 fold_overflow_warning (("assuming pointer wraparound does not "
9134 "occur when comparing P +- C1 with "
9135 "P +- C2"),
9136 WARN_STRICT_OVERFLOW_CONDITIONAL);
9137
9138 switch (code)
9139 {
9140 case EQ_EXPR:
9141 return constant_boolean_node (bitpos0 == bitpos1, type);
9142 case NE_EXPR:
9143 return constant_boolean_node (bitpos0 != bitpos1, type);
9144 case LT_EXPR:
9145 return constant_boolean_node (bitpos0 < bitpos1, type);
9146 case LE_EXPR:
9147 return constant_boolean_node (bitpos0 <= bitpos1, type);
9148 case GE_EXPR:
9149 return constant_boolean_node (bitpos0 >= bitpos1, type);
9150 case GT_EXPR:
9151 return constant_boolean_node (bitpos0 > bitpos1, type);
9152 default:;
9153 }
9154 }
9155 /* We can simplify the comparison to a comparison of the variable
9156 offset parts if the constant offset parts are equal.
9157 Be careful to use signed sizetype here because otherwise we
9158 mess with array offsets in the wrong way. This is possible
9159 because pointer arithmetic is restricted to retain within an
9160 object and overflow on pointer differences is undefined as of
9161 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9162 else if (bitpos0 == bitpos1
9163 && ((code == EQ_EXPR || code == NE_EXPR)
9164 || (indirect_base0 && DECL_P (base0))
9165 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9166 {
9167 /* By converting to signed sizetype we cover middle-end pointer
9168 arithmetic which operates on unsigned pointer types of size
9169 type size and ARRAY_REF offsets which are properly sign or
9170 zero extended from their type in case it is narrower than
9171 sizetype. */
9172 if (offset0 == NULL_TREE)
9173 offset0 = build_int_cst (ssizetype, 0);
9174 else
9175 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9176 if (offset1 == NULL_TREE)
9177 offset1 = build_int_cst (ssizetype, 0);
9178 else
9179 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9180
9181 if (code != EQ_EXPR
9182 && code != NE_EXPR
9183 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9184 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9185 fold_overflow_warning (("assuming pointer wraparound does not "
9186 "occur when comparing P +- C1 with "
9187 "P +- C2"),
9188 WARN_STRICT_OVERFLOW_COMPARISON);
9189
9190 return fold_build2_loc (loc, code, type, offset0, offset1);
9191 }
9192 }
9193 /* For non-equal bases we can simplify if they are addresses
9194 of local binding decls or constants. */
9195 else if (indirect_base0 && indirect_base1
9196 /* We know that !operand_equal_p (base0, base1, 0)
9197 because the if condition was false. But make
9198 sure two decls are not the same. */
9199 && base0 != base1
9200 && TREE_CODE (arg0) == ADDR_EXPR
9201 && TREE_CODE (arg1) == ADDR_EXPR
9202 && (((TREE_CODE (base0) == VAR_DECL
9203 || TREE_CODE (base0) == PARM_DECL)
9204 && (targetm.binds_local_p (base0)
9205 || CONSTANT_CLASS_P (base1)))
9206 || CONSTANT_CLASS_P (base0))
9207 && (((TREE_CODE (base1) == VAR_DECL
9208 || TREE_CODE (base1) == PARM_DECL)
9209 && (targetm.binds_local_p (base1)
9210 || CONSTANT_CLASS_P (base0)))
9211 || CONSTANT_CLASS_P (base1)))
9212 {
9213 if (code == EQ_EXPR)
9214 return omit_two_operands_loc (loc, type, boolean_false_node,
9215 arg0, arg1);
9216 else if (code == NE_EXPR)
9217 return omit_two_operands_loc (loc, type, boolean_true_node,
9218 arg0, arg1);
9219 }
9220 /* For equal offsets we can simplify to a comparison of the
9221 base addresses. */
9222 else if (bitpos0 == bitpos1
9223 && (indirect_base0
9224 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9225 && (indirect_base1
9226 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9227 && ((offset0 == offset1)
9228 || (offset0 && offset1
9229 && operand_equal_p (offset0, offset1, 0))))
9230 {
9231 if (indirect_base0)
9232 base0 = build_fold_addr_expr_loc (loc, base0);
9233 if (indirect_base1)
9234 base1 = build_fold_addr_expr_loc (loc, base1);
9235 return fold_build2_loc (loc, code, type, base0, base1);
9236 }
9237 }
9238
9239 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9240 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9241 the resulting offset is smaller in absolute value than the
9242 original one. */
9243 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9244 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9245 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9246 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9247 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9248 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9249 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9250 {
9251 tree const1 = TREE_OPERAND (arg0, 1);
9252 tree const2 = TREE_OPERAND (arg1, 1);
9253 tree variable1 = TREE_OPERAND (arg0, 0);
9254 tree variable2 = TREE_OPERAND (arg1, 0);
9255 tree cst;
9256 const char * const warnmsg = G_("assuming signed overflow does not "
9257 "occur when combining constants around "
9258 "a comparison");
9259
9260 /* Put the constant on the side where it doesn't overflow and is
9261 of lower absolute value than before. */
9262 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9263 ? MINUS_EXPR : PLUS_EXPR,
9264 const2, const1);
9265 if (!TREE_OVERFLOW (cst)
9266 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9267 {
9268 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9269 return fold_build2_loc (loc, code, type,
9270 variable1,
9271 fold_build2_loc (loc,
9272 TREE_CODE (arg1), TREE_TYPE (arg1),
9273 variable2, cst));
9274 }
9275
9276 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9277 ? MINUS_EXPR : PLUS_EXPR,
9278 const1, const2);
9279 if (!TREE_OVERFLOW (cst)
9280 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9281 {
9282 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9283 return fold_build2_loc (loc, code, type,
9284 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9285 variable1, cst),
9286 variable2);
9287 }
9288 }
9289
9290 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9291 signed arithmetic case. That form is created by the compiler
9292 often enough for folding it to be of value. One example is in
9293 computing loop trip counts after Operator Strength Reduction. */
9294 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9295 && TREE_CODE (arg0) == MULT_EXPR
9296 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9297 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9298 && integer_zerop (arg1))
9299 {
9300 tree const1 = TREE_OPERAND (arg0, 1);
9301 tree const2 = arg1; /* zero */
9302 tree variable1 = TREE_OPERAND (arg0, 0);
9303 enum tree_code cmp_code = code;
9304
9305 /* Handle unfolded multiplication by zero. */
9306 if (integer_zerop (const1))
9307 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9308
9309 fold_overflow_warning (("assuming signed overflow does not occur when "
9310 "eliminating multiplication in comparison "
9311 "with zero"),
9312 WARN_STRICT_OVERFLOW_COMPARISON);
9313
9314 /* If const1 is negative we swap the sense of the comparison. */
9315 if (tree_int_cst_sgn (const1) < 0)
9316 cmp_code = swap_tree_comparison (cmp_code);
9317
9318 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9319 }
9320
9321 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9322 if (tem)
9323 return tem;
9324
9325 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9326 {
9327 tree targ0 = strip_float_extensions (arg0);
9328 tree targ1 = strip_float_extensions (arg1);
9329 tree newtype = TREE_TYPE (targ0);
9330
9331 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9332 newtype = TREE_TYPE (targ1);
9333
9334 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9335 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9336 return fold_build2_loc (loc, code, type,
9337 fold_convert_loc (loc, newtype, targ0),
9338 fold_convert_loc (loc, newtype, targ1));
9339
9340 /* (-a) CMP (-b) -> b CMP a */
9341 if (TREE_CODE (arg0) == NEGATE_EXPR
9342 && TREE_CODE (arg1) == NEGATE_EXPR)
9343 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9344 TREE_OPERAND (arg0, 0));
9345
9346 if (TREE_CODE (arg1) == REAL_CST)
9347 {
9348 REAL_VALUE_TYPE cst;
9349 cst = TREE_REAL_CST (arg1);
9350
9351 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9352 if (TREE_CODE (arg0) == NEGATE_EXPR)
9353 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9354 TREE_OPERAND (arg0, 0),
9355 build_real (TREE_TYPE (arg1),
9356 real_value_negate (&cst)));
9357
9358 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9359 /* a CMP (-0) -> a CMP 0 */
9360 if (REAL_VALUE_MINUS_ZERO (cst))
9361 return fold_build2_loc (loc, code, type, arg0,
9362 build_real (TREE_TYPE (arg1), dconst0));
9363
9364 /* x != NaN is always true, other ops are always false. */
9365 if (REAL_VALUE_ISNAN (cst)
9366 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9367 {
9368 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9369 return omit_one_operand_loc (loc, type, tem, arg0);
9370 }
9371
9372 /* Fold comparisons against infinity. */
9373 if (REAL_VALUE_ISINF (cst)
9374 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9375 {
9376 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9377 if (tem != NULL_TREE)
9378 return tem;
9379 }
9380 }
9381
9382 /* If this is a comparison of a real constant with a PLUS_EXPR
9383 or a MINUS_EXPR of a real constant, we can convert it into a
9384 comparison with a revised real constant as long as no overflow
9385 occurs when unsafe_math_optimizations are enabled. */
9386 if (flag_unsafe_math_optimizations
9387 && TREE_CODE (arg1) == REAL_CST
9388 && (TREE_CODE (arg0) == PLUS_EXPR
9389 || TREE_CODE (arg0) == MINUS_EXPR)
9390 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9391 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9392 ? MINUS_EXPR : PLUS_EXPR,
9393 arg1, TREE_OPERAND (arg0, 1)))
9394 && !TREE_OVERFLOW (tem))
9395 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9396
9397 /* Likewise, we can simplify a comparison of a real constant with
9398 a MINUS_EXPR whose first operand is also a real constant, i.e.
9399 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9400 floating-point types only if -fassociative-math is set. */
9401 if (flag_associative_math
9402 && TREE_CODE (arg1) == REAL_CST
9403 && TREE_CODE (arg0) == MINUS_EXPR
9404 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9405 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9406 arg1))
9407 && !TREE_OVERFLOW (tem))
9408 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9409 TREE_OPERAND (arg0, 1), tem);
9410
9411 /* Fold comparisons against built-in math functions. */
9412 if (TREE_CODE (arg1) == REAL_CST
9413 && flag_unsafe_math_optimizations
9414 && ! flag_errno_math)
9415 {
9416 enum built_in_function fcode = builtin_mathfn_code (arg0);
9417
9418 if (fcode != END_BUILTINS)
9419 {
9420 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9421 if (tem != NULL_TREE)
9422 return tem;
9423 }
9424 }
9425 }
9426
9427 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9428 && CONVERT_EXPR_P (arg0))
9429 {
9430 /* If we are widening one operand of an integer comparison,
9431 see if the other operand is similarly being widened. Perhaps we
9432 can do the comparison in the narrower type. */
9433 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9434 if (tem)
9435 return tem;
9436
9437 /* Or if we are changing signedness. */
9438 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9439 if (tem)
9440 return tem;
9441 }
9442
9443 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9444 constant, we can simplify it. */
9445 if (TREE_CODE (arg1) == INTEGER_CST
9446 && (TREE_CODE (arg0) == MIN_EXPR
9447 || TREE_CODE (arg0) == MAX_EXPR)
9448 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9449 {
9450 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9451 if (tem)
9452 return tem;
9453 }
9454
9455 /* Simplify comparison of something with itself. (For IEEE
9456 floating-point, we can only do some of these simplifications.) */
9457 if (operand_equal_p (arg0, arg1, 0))
9458 {
9459 switch (code)
9460 {
9461 case EQ_EXPR:
9462 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9463 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9464 return constant_boolean_node (1, type);
9465 break;
9466
9467 case GE_EXPR:
9468 case LE_EXPR:
9469 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9470 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9471 return constant_boolean_node (1, type);
9472 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9473
9474 case NE_EXPR:
9475 /* For NE, we can only do this simplification if integer
9476 or we don't honor IEEE floating point NaNs. */
9477 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9478 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9479 break;
9480 /* ... fall through ... */
9481 case GT_EXPR:
9482 case LT_EXPR:
9483 return constant_boolean_node (0, type);
9484 default:
9485 gcc_unreachable ();
9486 }
9487 }
9488
9489 /* If we are comparing an expression that just has comparisons
9490 of two integer values, arithmetic expressions of those comparisons,
9491 and constants, we can simplify it. There are only three cases
9492 to check: the two values can either be equal, the first can be
9493 greater, or the second can be greater. Fold the expression for
9494 those three values. Since each value must be 0 or 1, we have
9495 eight possibilities, each of which corresponds to the constant 0
9496 or 1 or one of the six possible comparisons.
9497
9498 This handles common cases like (a > b) == 0 but also handles
9499 expressions like ((x > y) - (y > x)) > 0, which supposedly
9500 occur in macroized code. */
9501
9502 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9503 {
9504 tree cval1 = 0, cval2 = 0;
9505 int save_p = 0;
9506
9507 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9508 /* Don't handle degenerate cases here; they should already
9509 have been handled anyway. */
9510 && cval1 != 0 && cval2 != 0
9511 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9512 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9513 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9514 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9515 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9516 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9517 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9518 {
9519 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9520 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9521
9522 /* We can't just pass T to eval_subst in case cval1 or cval2
9523 was the same as ARG1. */
9524
9525 tree high_result
9526 = fold_build2_loc (loc, code, type,
9527 eval_subst (loc, arg0, cval1, maxval,
9528 cval2, minval),
9529 arg1);
9530 tree equal_result
9531 = fold_build2_loc (loc, code, type,
9532 eval_subst (loc, arg0, cval1, maxval,
9533 cval2, maxval),
9534 arg1);
9535 tree low_result
9536 = fold_build2_loc (loc, code, type,
9537 eval_subst (loc, arg0, cval1, minval,
9538 cval2, maxval),
9539 arg1);
9540
9541 /* All three of these results should be 0 or 1. Confirm they are.
9542 Then use those values to select the proper code to use. */
9543
9544 if (TREE_CODE (high_result) == INTEGER_CST
9545 && TREE_CODE (equal_result) == INTEGER_CST
9546 && TREE_CODE (low_result) == INTEGER_CST)
9547 {
9548 /* Make a 3-bit mask with the high-order bit being the
9549 value for `>', the next for '=', and the low for '<'. */
9550 switch ((integer_onep (high_result) * 4)
9551 + (integer_onep (equal_result) * 2)
9552 + integer_onep (low_result))
9553 {
9554 case 0:
9555 /* Always false. */
9556 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9557 case 1:
9558 code = LT_EXPR;
9559 break;
9560 case 2:
9561 code = EQ_EXPR;
9562 break;
9563 case 3:
9564 code = LE_EXPR;
9565 break;
9566 case 4:
9567 code = GT_EXPR;
9568 break;
9569 case 5:
9570 code = NE_EXPR;
9571 break;
9572 case 6:
9573 code = GE_EXPR;
9574 break;
9575 case 7:
9576 /* Always true. */
9577 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9578 }
9579
9580 if (save_p)
9581 {
9582 tem = save_expr (build2 (code, type, cval1, cval2));
9583 SET_EXPR_LOCATION (tem, loc);
9584 return tem;
9585 }
9586 return fold_build2_loc (loc, code, type, cval1, cval2);
9587 }
9588 }
9589 }
9590
9591 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9592 into a single range test. */
9593 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9594 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9595 && TREE_CODE (arg1) == INTEGER_CST
9596 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9597 && !integer_zerop (TREE_OPERAND (arg0, 1))
9598 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9599 && !TREE_OVERFLOW (arg1))
9600 {
9601 tem = fold_div_compare (loc, code, type, arg0, arg1);
9602 if (tem != NULL_TREE)
9603 return tem;
9604 }
9605
9606 /* Fold ~X op ~Y as Y op X. */
9607 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9608 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9609 {
9610 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9611 return fold_build2_loc (loc, code, type,
9612 fold_convert_loc (loc, cmp_type,
9613 TREE_OPERAND (arg1, 0)),
9614 TREE_OPERAND (arg0, 0));
9615 }
9616
9617 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9618 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9619 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9620 {
9621 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9622 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9623 TREE_OPERAND (arg0, 0),
9624 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9625 fold_convert_loc (loc, cmp_type, arg1)));
9626 }
9627
9628 return NULL_TREE;
9629 }
9630
9631
9632 /* Subroutine of fold_binary. Optimize complex multiplications of the
9633 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9634 argument EXPR represents the expression "z" of type TYPE. */
9635
9636 static tree
9637 fold_mult_zconjz (location_t loc, tree type, tree expr)
9638 {
9639 tree itype = TREE_TYPE (type);
9640 tree rpart, ipart, tem;
9641
9642 if (TREE_CODE (expr) == COMPLEX_EXPR)
9643 {
9644 rpart = TREE_OPERAND (expr, 0);
9645 ipart = TREE_OPERAND (expr, 1);
9646 }
9647 else if (TREE_CODE (expr) == COMPLEX_CST)
9648 {
9649 rpart = TREE_REALPART (expr);
9650 ipart = TREE_IMAGPART (expr);
9651 }
9652 else
9653 {
9654 expr = save_expr (expr);
9655 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9656 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9657 }
9658
9659 rpart = save_expr (rpart);
9660 ipart = save_expr (ipart);
9661 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9662 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9663 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9664 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9665 build_zero_cst (itype));
9666 }
9667
9668
9669 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9670 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9671 guarantees that P and N have the same least significant log2(M) bits.
9672 N is not otherwise constrained. In particular, N is not normalized to
9673 0 <= N < M as is common. In general, the precise value of P is unknown.
9674 M is chosen as large as possible such that constant N can be determined.
9675
9676 Returns M and sets *RESIDUE to N.
9677
9678 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9679 account. This is not always possible due to PR 35705.
9680 */
9681
9682 static unsigned HOST_WIDE_INT
9683 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9684 bool allow_func_align)
9685 {
9686 enum tree_code code;
9687
9688 *residue = 0;
9689
9690 code = TREE_CODE (expr);
9691 if (code == ADDR_EXPR)
9692 {
9693 unsigned int bitalign;
9694 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9695 *residue /= BITS_PER_UNIT;
9696 return bitalign / BITS_PER_UNIT;
9697 }
9698 else if (code == POINTER_PLUS_EXPR)
9699 {
9700 tree op0, op1;
9701 unsigned HOST_WIDE_INT modulus;
9702 enum tree_code inner_code;
9703
9704 op0 = TREE_OPERAND (expr, 0);
9705 STRIP_NOPS (op0);
9706 modulus = get_pointer_modulus_and_residue (op0, residue,
9707 allow_func_align);
9708
9709 op1 = TREE_OPERAND (expr, 1);
9710 STRIP_NOPS (op1);
9711 inner_code = TREE_CODE (op1);
9712 if (inner_code == INTEGER_CST)
9713 {
9714 *residue += tree_to_hwi (op1);
9715 return modulus;
9716 }
9717 else if (inner_code == MULT_EXPR)
9718 {
9719 op1 = TREE_OPERAND (op1, 1);
9720 if (TREE_CODE (op1) == INTEGER_CST)
9721 {
9722 unsigned HOST_WIDE_INT align;
9723
9724 /* Compute the greatest power-of-2 divisor of op1. */
9725 align = tree_to_hwi (op1);
9726 align &= -align;
9727
9728 /* If align is non-zero and less than *modulus, replace
9729 *modulus with align., If align is 0, then either op1 is 0
9730 or the greatest power-of-2 divisor of op1 doesn't fit in an
9731 unsigned HOST_WIDE_INT. In either case, no additional
9732 constraint is imposed. */
9733 if (align)
9734 modulus = MIN (modulus, align);
9735
9736 return modulus;
9737 }
9738 }
9739 }
9740
9741 /* If we get here, we were unable to determine anything useful about the
9742 expression. */
9743 return 1;
9744 }
9745
9746 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9747 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9748
9749 static bool
9750 vec_cst_ctor_to_array (tree arg, tree *elts)
9751 {
9752 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9753
9754 if (TREE_CODE (arg) == VECTOR_CST)
9755 {
9756 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9757 elts[i] = VECTOR_CST_ELT (arg, i);
9758 }
9759 else if (TREE_CODE (arg) == CONSTRUCTOR)
9760 {
9761 constructor_elt *elt;
9762
9763 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9764 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9765 return false;
9766 else
9767 elts[i] = elt->value;
9768 }
9769 else
9770 return false;
9771 for (; i < nelts; i++)
9772 elts[i]
9773 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9774 return true;
9775 }
9776
9777 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9778 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9779 NULL_TREE otherwise. */
9780
9781 static tree
9782 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9783 {
9784 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9785 tree *elts;
9786 bool need_ctor = false;
9787
9788 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9789 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9790 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9791 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9792 return NULL_TREE;
9793
9794 elts = XALLOCAVEC (tree, nelts * 3);
9795 if (!vec_cst_ctor_to_array (arg0, elts)
9796 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9797 return NULL_TREE;
9798
9799 for (i = 0; i < nelts; i++)
9800 {
9801 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9802 need_ctor = true;
9803 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9804 }
9805
9806 if (need_ctor)
9807 {
9808 vec<constructor_elt, va_gc> *v;
9809 vec_alloc (v, nelts);
9810 for (i = 0; i < nelts; i++)
9811 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9812 return build_constructor (type, v);
9813 }
9814 else
9815 return build_vector (type, &elts[2 * nelts]);
9816 }
9817
9818 /* Try to fold a pointer difference of type TYPE two address expressions of
9819 array references AREF0 and AREF1 using location LOC. Return a
9820 simplified expression for the difference or NULL_TREE. */
9821
9822 static tree
9823 fold_addr_of_array_ref_difference (location_t loc, tree type,
9824 tree aref0, tree aref1)
9825 {
9826 tree base0 = TREE_OPERAND (aref0, 0);
9827 tree base1 = TREE_OPERAND (aref1, 0);
9828 tree base_offset = build_int_cst (type, 0);
9829
9830 /* If the bases are array references as well, recurse. If the bases
9831 are pointer indirections compute the difference of the pointers.
9832 If the bases are equal, we are set. */
9833 if ((TREE_CODE (base0) == ARRAY_REF
9834 && TREE_CODE (base1) == ARRAY_REF
9835 && (base_offset
9836 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9837 || (INDIRECT_REF_P (base0)
9838 && INDIRECT_REF_P (base1)
9839 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9840 TREE_OPERAND (base0, 0),
9841 TREE_OPERAND (base1, 0))))
9842 || operand_equal_p (base0, base1, 0))
9843 {
9844 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9845 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9846 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9847 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9848 return fold_build2_loc (loc, PLUS_EXPR, type,
9849 base_offset,
9850 fold_build2_loc (loc, MULT_EXPR, type,
9851 diff, esz));
9852 }
9853 return NULL_TREE;
9854 }
9855
9856 /* If the real or vector real constant CST of type TYPE has an exact
9857 inverse, return it, else return NULL. */
9858
9859 static tree
9860 exact_inverse (tree type, tree cst)
9861 {
9862 REAL_VALUE_TYPE r;
9863 tree unit_type, *elts;
9864 enum machine_mode mode;
9865 unsigned vec_nelts, i;
9866
9867 switch (TREE_CODE (cst))
9868 {
9869 case REAL_CST:
9870 r = TREE_REAL_CST (cst);
9871
9872 if (exact_real_inverse (TYPE_MODE (type), &r))
9873 return build_real (type, r);
9874
9875 return NULL_TREE;
9876
9877 case VECTOR_CST:
9878 vec_nelts = VECTOR_CST_NELTS (cst);
9879 elts = XALLOCAVEC (tree, vec_nelts);
9880 unit_type = TREE_TYPE (type);
9881 mode = TYPE_MODE (unit_type);
9882
9883 for (i = 0; i < vec_nelts; i++)
9884 {
9885 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9886 if (!exact_real_inverse (mode, &r))
9887 return NULL_TREE;
9888 elts[i] = build_real (unit_type, r);
9889 }
9890
9891 return build_vector (type, elts);
9892
9893 default:
9894 return NULL_TREE;
9895 }
9896 }
9897
9898 /* Mask out the tz least significant bits of X of type TYPE where
9899 tz is the number of trailing zeroes in Y. */
9900 static wide_int
9901 mask_with_tz (tree type, wide_int x, wide_int y)
9902 {
9903 int tz = wi::ctz (y);
9904 if (tz > 0)
9905 {
9906 wide_int mask;
9907
9908 mask = wi::mask (tz, true, TYPE_PRECISION (type));
9909 return mask & x;
9910 }
9911 return x;
9912 }
9913
9914 /* Fold a binary expression of code CODE and type TYPE with operands
9915 OP0 and OP1. LOC is the location of the resulting expression.
9916 Return the folded expression if folding is successful. Otherwise,
9917 return NULL_TREE. */
9918
9919 tree
9920 fold_binary_loc (location_t loc,
9921 enum tree_code code, tree type, tree op0, tree op1)
9922 {
9923 enum tree_code_class kind = TREE_CODE_CLASS (code);
9924 tree arg0, arg1, tem;
9925 tree t1 = NULL_TREE;
9926 bool strict_overflow_p;
9927 unsigned int prec;
9928
9929 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9930 && TREE_CODE_LENGTH (code) == 2
9931 && op0 != NULL_TREE
9932 && op1 != NULL_TREE);
9933
9934 arg0 = op0;
9935 arg1 = op1;
9936
9937 /* Strip any conversions that don't change the mode. This is
9938 safe for every expression, except for a comparison expression
9939 because its signedness is derived from its operands. So, in
9940 the latter case, only strip conversions that don't change the
9941 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9942 preserved.
9943
9944 Note that this is done as an internal manipulation within the
9945 constant folder, in order to find the simplest representation
9946 of the arguments so that their form can be studied. In any
9947 cases, the appropriate type conversions should be put back in
9948 the tree that will get out of the constant folder. */
9949
9950 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9951 {
9952 STRIP_SIGN_NOPS (arg0);
9953 STRIP_SIGN_NOPS (arg1);
9954 }
9955 else
9956 {
9957 STRIP_NOPS (arg0);
9958 STRIP_NOPS (arg1);
9959 }
9960
9961 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9962 constant but we can't do arithmetic on them. */
9963 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9964 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9965 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9966 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9967 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9968 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9969 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9970 {
9971 if (kind == tcc_binary)
9972 {
9973 /* Make sure type and arg0 have the same saturating flag. */
9974 gcc_assert (TYPE_SATURATING (type)
9975 == TYPE_SATURATING (TREE_TYPE (arg0)));
9976 tem = const_binop (code, arg0, arg1);
9977 }
9978 else if (kind == tcc_comparison)
9979 tem = fold_relational_const (code, type, arg0, arg1);
9980 else
9981 tem = NULL_TREE;
9982
9983 if (tem != NULL_TREE)
9984 {
9985 if (TREE_TYPE (tem) != type)
9986 tem = fold_convert_loc (loc, type, tem);
9987 return tem;
9988 }
9989 }
9990
9991 /* If this is a commutative operation, and ARG0 is a constant, move it
9992 to ARG1 to reduce the number of tests below. */
9993 if (commutative_tree_code (code)
9994 && tree_swap_operands_p (arg0, arg1, true))
9995 return fold_build2_loc (loc, code, type, op1, op0);
9996
9997 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9998
9999 First check for cases where an arithmetic operation is applied to a
10000 compound, conditional, or comparison operation. Push the arithmetic
10001 operation inside the compound or conditional to see if any folding
10002 can then be done. Convert comparison to conditional for this purpose.
10003 The also optimizes non-constant cases that used to be done in
10004 expand_expr.
10005
10006 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10007 one of the operands is a comparison and the other is a comparison, a
10008 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10009 code below would make the expression more complex. Change it to a
10010 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10011 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10012
10013 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10014 || code == EQ_EXPR || code == NE_EXPR)
10015 && TREE_CODE (type) != VECTOR_TYPE
10016 && ((truth_value_p (TREE_CODE (arg0))
10017 && (truth_value_p (TREE_CODE (arg1))
10018 || (TREE_CODE (arg1) == BIT_AND_EXPR
10019 && integer_onep (TREE_OPERAND (arg1, 1)))))
10020 || (truth_value_p (TREE_CODE (arg1))
10021 && (truth_value_p (TREE_CODE (arg0))
10022 || (TREE_CODE (arg0) == BIT_AND_EXPR
10023 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10024 {
10025 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10026 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10027 : TRUTH_XOR_EXPR,
10028 boolean_type_node,
10029 fold_convert_loc (loc, boolean_type_node, arg0),
10030 fold_convert_loc (loc, boolean_type_node, arg1));
10031
10032 if (code == EQ_EXPR)
10033 tem = invert_truthvalue_loc (loc, tem);
10034
10035 return fold_convert_loc (loc, type, tem);
10036 }
10037
10038 if (TREE_CODE_CLASS (code) == tcc_binary
10039 || TREE_CODE_CLASS (code) == tcc_comparison)
10040 {
10041 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10042 {
10043 tem = fold_build2_loc (loc, code, type,
10044 fold_convert_loc (loc, TREE_TYPE (op0),
10045 TREE_OPERAND (arg0, 1)), op1);
10046 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10047 tem);
10048 }
10049 if (TREE_CODE (arg1) == COMPOUND_EXPR
10050 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10051 {
10052 tem = fold_build2_loc (loc, code, type, op0,
10053 fold_convert_loc (loc, TREE_TYPE (op1),
10054 TREE_OPERAND (arg1, 1)));
10055 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10056 tem);
10057 }
10058
10059 if (TREE_CODE (arg0) == COND_EXPR
10060 || TREE_CODE (arg0) == VEC_COND_EXPR
10061 || COMPARISON_CLASS_P (arg0))
10062 {
10063 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10064 arg0, arg1,
10065 /*cond_first_p=*/1);
10066 if (tem != NULL_TREE)
10067 return tem;
10068 }
10069
10070 if (TREE_CODE (arg1) == COND_EXPR
10071 || TREE_CODE (arg1) == VEC_COND_EXPR
10072 || COMPARISON_CLASS_P (arg1))
10073 {
10074 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10075 arg1, arg0,
10076 /*cond_first_p=*/0);
10077 if (tem != NULL_TREE)
10078 return tem;
10079 }
10080 }
10081
10082 switch (code)
10083 {
10084 case MEM_REF:
10085 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10086 if (TREE_CODE (arg0) == ADDR_EXPR
10087 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10088 {
10089 tree iref = TREE_OPERAND (arg0, 0);
10090 return fold_build2 (MEM_REF, type,
10091 TREE_OPERAND (iref, 0),
10092 int_const_binop (PLUS_EXPR, arg1,
10093 TREE_OPERAND (iref, 1)));
10094 }
10095
10096 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10097 if (TREE_CODE (arg0) == ADDR_EXPR
10098 && handled_component_p (TREE_OPERAND (arg0, 0)))
10099 {
10100 tree base;
10101 HOST_WIDE_INT coffset;
10102 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10103 &coffset);
10104 if (!base)
10105 return NULL_TREE;
10106 return fold_build2 (MEM_REF, type,
10107 build_fold_addr_expr (base),
10108 int_const_binop (PLUS_EXPR, arg1,
10109 size_int (coffset)));
10110 }
10111
10112 return NULL_TREE;
10113
10114 case POINTER_PLUS_EXPR:
10115 /* 0 +p index -> (type)index */
10116 if (integer_zerop (arg0))
10117 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10118
10119 /* PTR +p 0 -> PTR */
10120 if (integer_zerop (arg1))
10121 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10122
10123 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10124 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10125 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10126 return fold_convert_loc (loc, type,
10127 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10128 fold_convert_loc (loc, sizetype,
10129 arg1),
10130 fold_convert_loc (loc, sizetype,
10131 arg0)));
10132
10133 /* (PTR +p B) +p A -> PTR +p (B + A) */
10134 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10135 {
10136 tree inner;
10137 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10138 tree arg00 = TREE_OPERAND (arg0, 0);
10139 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10140 arg01, fold_convert_loc (loc, sizetype, arg1));
10141 return fold_convert_loc (loc, type,
10142 fold_build_pointer_plus_loc (loc,
10143 arg00, inner));
10144 }
10145
10146 /* PTR_CST +p CST -> CST1 */
10147 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10148 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10149 fold_convert_loc (loc, type, arg1));
10150
10151 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10152 of the array. Loop optimizer sometimes produce this type of
10153 expressions. */
10154 if (TREE_CODE (arg0) == ADDR_EXPR)
10155 {
10156 tem = try_move_mult_to_index (loc, arg0,
10157 fold_convert_loc (loc,
10158 ssizetype, arg1));
10159 if (tem)
10160 return fold_convert_loc (loc, type, tem);
10161 }
10162
10163 return NULL_TREE;
10164
10165 case PLUS_EXPR:
10166 /* A + (-B) -> A - B */
10167 if (TREE_CODE (arg1) == NEGATE_EXPR)
10168 return fold_build2_loc (loc, MINUS_EXPR, type,
10169 fold_convert_loc (loc, type, arg0),
10170 fold_convert_loc (loc, type,
10171 TREE_OPERAND (arg1, 0)));
10172 /* (-A) + B -> B - A */
10173 if (TREE_CODE (arg0) == NEGATE_EXPR
10174 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10175 return fold_build2_loc (loc, MINUS_EXPR, type,
10176 fold_convert_loc (loc, type, arg1),
10177 fold_convert_loc (loc, type,
10178 TREE_OPERAND (arg0, 0)));
10179
10180 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10181 {
10182 /* Convert ~A + 1 to -A. */
10183 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10184 && integer_onep (arg1))
10185 return fold_build1_loc (loc, NEGATE_EXPR, type,
10186 fold_convert_loc (loc, type,
10187 TREE_OPERAND (arg0, 0)));
10188
10189 /* ~X + X is -1. */
10190 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10191 && !TYPE_OVERFLOW_TRAPS (type))
10192 {
10193 tree tem = TREE_OPERAND (arg0, 0);
10194
10195 STRIP_NOPS (tem);
10196 if (operand_equal_p (tem, arg1, 0))
10197 {
10198 t1 = build_all_ones_cst (type);
10199 return omit_one_operand_loc (loc, type, t1, arg1);
10200 }
10201 }
10202
10203 /* X + ~X is -1. */
10204 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10205 && !TYPE_OVERFLOW_TRAPS (type))
10206 {
10207 tree tem = TREE_OPERAND (arg1, 0);
10208
10209 STRIP_NOPS (tem);
10210 if (operand_equal_p (arg0, tem, 0))
10211 {
10212 t1 = build_all_ones_cst (type);
10213 return omit_one_operand_loc (loc, type, t1, arg0);
10214 }
10215 }
10216
10217 /* X + (X / CST) * -CST is X % CST. */
10218 if (TREE_CODE (arg1) == MULT_EXPR
10219 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10220 && operand_equal_p (arg0,
10221 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10222 {
10223 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10224 tree cst1 = TREE_OPERAND (arg1, 1);
10225 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10226 cst1, cst0);
10227 if (sum && integer_zerop (sum))
10228 return fold_convert_loc (loc, type,
10229 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10230 TREE_TYPE (arg0), arg0,
10231 cst0));
10232 }
10233 }
10234
10235 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10236 one. Make sure the type is not saturating and has the signedness of
10237 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10238 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10239 if ((TREE_CODE (arg0) == MULT_EXPR
10240 || TREE_CODE (arg1) == MULT_EXPR)
10241 && !TYPE_SATURATING (type)
10242 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10243 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10244 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10245 {
10246 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10247 if (tem)
10248 return tem;
10249 }
10250
10251 if (! FLOAT_TYPE_P (type))
10252 {
10253 if (integer_zerop (arg1))
10254 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10255
10256 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10257 with a constant, and the two constants have no bits in common,
10258 we should treat this as a BIT_IOR_EXPR since this may produce more
10259 simplifications. */
10260 if (TREE_CODE (arg0) == BIT_AND_EXPR
10261 && TREE_CODE (arg1) == BIT_AND_EXPR
10262 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10263 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10264 && integer_zerop (const_binop (BIT_AND_EXPR,
10265 TREE_OPERAND (arg0, 1),
10266 TREE_OPERAND (arg1, 1))))
10267 {
10268 code = BIT_IOR_EXPR;
10269 goto bit_ior;
10270 }
10271
10272 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10273 (plus (plus (mult) (mult)) (foo)) so that we can
10274 take advantage of the factoring cases below. */
10275 if (TYPE_OVERFLOW_WRAPS (type)
10276 && (((TREE_CODE (arg0) == PLUS_EXPR
10277 || TREE_CODE (arg0) == MINUS_EXPR)
10278 && TREE_CODE (arg1) == MULT_EXPR)
10279 || ((TREE_CODE (arg1) == PLUS_EXPR
10280 || TREE_CODE (arg1) == MINUS_EXPR)
10281 && TREE_CODE (arg0) == MULT_EXPR)))
10282 {
10283 tree parg0, parg1, parg, marg;
10284 enum tree_code pcode;
10285
10286 if (TREE_CODE (arg1) == MULT_EXPR)
10287 parg = arg0, marg = arg1;
10288 else
10289 parg = arg1, marg = arg0;
10290 pcode = TREE_CODE (parg);
10291 parg0 = TREE_OPERAND (parg, 0);
10292 parg1 = TREE_OPERAND (parg, 1);
10293 STRIP_NOPS (parg0);
10294 STRIP_NOPS (parg1);
10295
10296 if (TREE_CODE (parg0) == MULT_EXPR
10297 && TREE_CODE (parg1) != MULT_EXPR)
10298 return fold_build2_loc (loc, pcode, type,
10299 fold_build2_loc (loc, PLUS_EXPR, type,
10300 fold_convert_loc (loc, type,
10301 parg0),
10302 fold_convert_loc (loc, type,
10303 marg)),
10304 fold_convert_loc (loc, type, parg1));
10305 if (TREE_CODE (parg0) != MULT_EXPR
10306 && TREE_CODE (parg1) == MULT_EXPR)
10307 return
10308 fold_build2_loc (loc, PLUS_EXPR, type,
10309 fold_convert_loc (loc, type, parg0),
10310 fold_build2_loc (loc, pcode, type,
10311 fold_convert_loc (loc, type, marg),
10312 fold_convert_loc (loc, type,
10313 parg1)));
10314 }
10315 }
10316 else
10317 {
10318 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10319 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10320 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10321
10322 /* Likewise if the operands are reversed. */
10323 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10324 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10325
10326 /* Convert X + -C into X - C. */
10327 if (TREE_CODE (arg1) == REAL_CST
10328 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10329 {
10330 tem = fold_negate_const (arg1, type);
10331 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10332 return fold_build2_loc (loc, MINUS_EXPR, type,
10333 fold_convert_loc (loc, type, arg0),
10334 fold_convert_loc (loc, type, tem));
10335 }
10336
10337 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10338 to __complex__ ( x, y ). This is not the same for SNaNs or
10339 if signed zeros are involved. */
10340 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10341 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10342 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10343 {
10344 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10345 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10346 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10347 bool arg0rz = false, arg0iz = false;
10348 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10349 || (arg0i && (arg0iz = real_zerop (arg0i))))
10350 {
10351 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10352 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10353 if (arg0rz && arg1i && real_zerop (arg1i))
10354 {
10355 tree rp = arg1r ? arg1r
10356 : build1 (REALPART_EXPR, rtype, arg1);
10357 tree ip = arg0i ? arg0i
10358 : build1 (IMAGPART_EXPR, rtype, arg0);
10359 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10360 }
10361 else if (arg0iz && arg1r && real_zerop (arg1r))
10362 {
10363 tree rp = arg0r ? arg0r
10364 : build1 (REALPART_EXPR, rtype, arg0);
10365 tree ip = arg1i ? arg1i
10366 : build1 (IMAGPART_EXPR, rtype, arg1);
10367 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10368 }
10369 }
10370 }
10371
10372 if (flag_unsafe_math_optimizations
10373 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10374 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10375 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10376 return tem;
10377
10378 /* Convert x+x into x*2.0. */
10379 if (operand_equal_p (arg0, arg1, 0)
10380 && SCALAR_FLOAT_TYPE_P (type))
10381 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10382 build_real (type, dconst2));
10383
10384 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10385 We associate floats only if the user has specified
10386 -fassociative-math. */
10387 if (flag_associative_math
10388 && TREE_CODE (arg1) == PLUS_EXPR
10389 && TREE_CODE (arg0) != MULT_EXPR)
10390 {
10391 tree tree10 = TREE_OPERAND (arg1, 0);
10392 tree tree11 = TREE_OPERAND (arg1, 1);
10393 if (TREE_CODE (tree11) == MULT_EXPR
10394 && TREE_CODE (tree10) == MULT_EXPR)
10395 {
10396 tree tree0;
10397 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10398 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10399 }
10400 }
10401 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10402 We associate floats only if the user has specified
10403 -fassociative-math. */
10404 if (flag_associative_math
10405 && TREE_CODE (arg0) == PLUS_EXPR
10406 && TREE_CODE (arg1) != MULT_EXPR)
10407 {
10408 tree tree00 = TREE_OPERAND (arg0, 0);
10409 tree tree01 = TREE_OPERAND (arg0, 1);
10410 if (TREE_CODE (tree01) == MULT_EXPR
10411 && TREE_CODE (tree00) == MULT_EXPR)
10412 {
10413 tree tree0;
10414 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10415 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10416 }
10417 }
10418 }
10419
10420 bit_rotate:
10421 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10422 is a rotate of A by C1 bits. */
10423 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10424 is a rotate of A by B bits. */
10425 {
10426 enum tree_code code0, code1;
10427 tree rtype;
10428 code0 = TREE_CODE (arg0);
10429 code1 = TREE_CODE (arg1);
10430 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10431 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10432 && operand_equal_p (TREE_OPERAND (arg0, 0),
10433 TREE_OPERAND (arg1, 0), 0)
10434 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10435 TYPE_UNSIGNED (rtype))
10436 /* Only create rotates in complete modes. Other cases are not
10437 expanded properly. */
10438 && (element_precision (rtype)
10439 == element_precision (TYPE_MODE (rtype))))
10440 {
10441 tree tree01, tree11;
10442 enum tree_code code01, code11;
10443
10444 tree01 = TREE_OPERAND (arg0, 1);
10445 tree11 = TREE_OPERAND (arg1, 1);
10446 STRIP_NOPS (tree01);
10447 STRIP_NOPS (tree11);
10448 code01 = TREE_CODE (tree01);
10449 code11 = TREE_CODE (tree11);
10450 if (code01 == INTEGER_CST
10451 && code11 == INTEGER_CST
10452 && (wi::add (tree01, tree11)
10453 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10454 {
10455 tem = build2_loc (loc, LROTATE_EXPR,
10456 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10457 TREE_OPERAND (arg0, 0),
10458 code0 == LSHIFT_EXPR ? tree01 : tree11);
10459 return fold_convert_loc (loc, type, tem);
10460 }
10461 else if (code11 == MINUS_EXPR)
10462 {
10463 tree tree110, tree111;
10464 tree110 = TREE_OPERAND (tree11, 0);
10465 tree111 = TREE_OPERAND (tree11, 1);
10466 STRIP_NOPS (tree110);
10467 STRIP_NOPS (tree111);
10468 if (TREE_CODE (tree110) == INTEGER_CST
10469 && 0 == compare_tree_int (tree110,
10470 element_precision
10471 (TREE_TYPE (TREE_OPERAND
10472 (arg0, 0))))
10473 && operand_equal_p (tree01, tree111, 0))
10474 return
10475 fold_convert_loc (loc, type,
10476 build2 ((code0 == LSHIFT_EXPR
10477 ? LROTATE_EXPR
10478 : RROTATE_EXPR),
10479 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10480 TREE_OPERAND (arg0, 0), tree01));
10481 }
10482 else if (code01 == MINUS_EXPR)
10483 {
10484 tree tree010, tree011;
10485 tree010 = TREE_OPERAND (tree01, 0);
10486 tree011 = TREE_OPERAND (tree01, 1);
10487 STRIP_NOPS (tree010);
10488 STRIP_NOPS (tree011);
10489 if (TREE_CODE (tree010) == INTEGER_CST
10490 && 0 == compare_tree_int (tree010,
10491 element_precision
10492 (TREE_TYPE (TREE_OPERAND
10493 (arg0, 0))))
10494 && operand_equal_p (tree11, tree011, 0))
10495 return fold_convert_loc
10496 (loc, type,
10497 build2 ((code0 != LSHIFT_EXPR
10498 ? LROTATE_EXPR
10499 : RROTATE_EXPR),
10500 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10501 TREE_OPERAND (arg0, 0), tree11));
10502 }
10503 }
10504 }
10505
10506 associate:
10507 /* In most languages, can't associate operations on floats through
10508 parentheses. Rather than remember where the parentheses were, we
10509 don't associate floats at all, unless the user has specified
10510 -fassociative-math.
10511 And, we need to make sure type is not saturating. */
10512
10513 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10514 && !TYPE_SATURATING (type))
10515 {
10516 tree var0, con0, lit0, minus_lit0;
10517 tree var1, con1, lit1, minus_lit1;
10518 tree atype = type;
10519 bool ok = true;
10520
10521 /* Split both trees into variables, constants, and literals. Then
10522 associate each group together, the constants with literals,
10523 then the result with variables. This increases the chances of
10524 literals being recombined later and of generating relocatable
10525 expressions for the sum of a constant and literal. */
10526 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10527 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10528 code == MINUS_EXPR);
10529
10530 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10531 if (code == MINUS_EXPR)
10532 code = PLUS_EXPR;
10533
10534 /* With undefined overflow prefer doing association in a type
10535 which wraps on overflow, if that is one of the operand types. */
10536 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10537 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10538 {
10539 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10540 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10541 atype = TREE_TYPE (arg0);
10542 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10543 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10544 atype = TREE_TYPE (arg1);
10545 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10546 }
10547
10548 /* With undefined overflow we can only associate constants with one
10549 variable, and constants whose association doesn't overflow. */
10550 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10551 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10552 {
10553 if (var0 && var1)
10554 {
10555 tree tmp0 = var0;
10556 tree tmp1 = var1;
10557
10558 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10559 tmp0 = TREE_OPERAND (tmp0, 0);
10560 if (CONVERT_EXPR_P (tmp0)
10561 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10562 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10563 <= TYPE_PRECISION (atype)))
10564 tmp0 = TREE_OPERAND (tmp0, 0);
10565 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10566 tmp1 = TREE_OPERAND (tmp1, 0);
10567 if (CONVERT_EXPR_P (tmp1)
10568 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10569 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10570 <= TYPE_PRECISION (atype)))
10571 tmp1 = TREE_OPERAND (tmp1, 0);
10572 /* The only case we can still associate with two variables
10573 is if they are the same, modulo negation and bit-pattern
10574 preserving conversions. */
10575 if (!operand_equal_p (tmp0, tmp1, 0))
10576 ok = false;
10577 }
10578 }
10579
10580 /* Only do something if we found more than two objects. Otherwise,
10581 nothing has changed and we risk infinite recursion. */
10582 if (ok
10583 && (2 < ((var0 != 0) + (var1 != 0)
10584 + (con0 != 0) + (con1 != 0)
10585 + (lit0 != 0) + (lit1 != 0)
10586 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10587 {
10588 bool any_overflows = false;
10589 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10590 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10591 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10592 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10593 var0 = associate_trees (loc, var0, var1, code, atype);
10594 con0 = associate_trees (loc, con0, con1, code, atype);
10595 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10596 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10597 code, atype);
10598
10599 /* Preserve the MINUS_EXPR if the negative part of the literal is
10600 greater than the positive part. Otherwise, the multiplicative
10601 folding code (i.e extract_muldiv) may be fooled in case
10602 unsigned constants are subtracted, like in the following
10603 example: ((X*2 + 4) - 8U)/2. */
10604 if (minus_lit0 && lit0)
10605 {
10606 if (TREE_CODE (lit0) == INTEGER_CST
10607 && TREE_CODE (minus_lit0) == INTEGER_CST
10608 && tree_int_cst_lt (lit0, minus_lit0))
10609 {
10610 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10611 MINUS_EXPR, atype);
10612 lit0 = 0;
10613 }
10614 else
10615 {
10616 lit0 = associate_trees (loc, lit0, minus_lit0,
10617 MINUS_EXPR, atype);
10618 minus_lit0 = 0;
10619 }
10620 }
10621
10622 /* Don't introduce overflows through reassociation. */
10623 if (!any_overflows
10624 && ((lit0 && TREE_OVERFLOW (lit0))
10625 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10626 return NULL_TREE;
10627
10628 if (minus_lit0)
10629 {
10630 if (con0 == 0)
10631 return
10632 fold_convert_loc (loc, type,
10633 associate_trees (loc, var0, minus_lit0,
10634 MINUS_EXPR, atype));
10635 else
10636 {
10637 con0 = associate_trees (loc, con0, minus_lit0,
10638 MINUS_EXPR, atype);
10639 return
10640 fold_convert_loc (loc, type,
10641 associate_trees (loc, var0, con0,
10642 PLUS_EXPR, atype));
10643 }
10644 }
10645
10646 con0 = associate_trees (loc, con0, lit0, code, atype);
10647 return
10648 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10649 code, atype));
10650 }
10651 }
10652
10653 return NULL_TREE;
10654
10655 case MINUS_EXPR:
10656 /* Pointer simplifications for subtraction, simple reassociations. */
10657 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10658 {
10659 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10660 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10661 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10662 {
10663 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10664 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10665 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10666 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10667 return fold_build2_loc (loc, PLUS_EXPR, type,
10668 fold_build2_loc (loc, MINUS_EXPR, type,
10669 arg00, arg10),
10670 fold_build2_loc (loc, MINUS_EXPR, type,
10671 arg01, arg11));
10672 }
10673 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10674 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10675 {
10676 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10677 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10678 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10679 fold_convert_loc (loc, type, arg1));
10680 if (tmp)
10681 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10682 }
10683 }
10684 /* A - (-B) -> A + B */
10685 if (TREE_CODE (arg1) == NEGATE_EXPR)
10686 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10687 fold_convert_loc (loc, type,
10688 TREE_OPERAND (arg1, 0)));
10689 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10690 if (TREE_CODE (arg0) == NEGATE_EXPR
10691 && negate_expr_p (arg1)
10692 && reorder_operands_p (arg0, arg1))
10693 return fold_build2_loc (loc, MINUS_EXPR, type,
10694 fold_convert_loc (loc, type,
10695 negate_expr (arg1)),
10696 fold_convert_loc (loc, type,
10697 TREE_OPERAND (arg0, 0)));
10698 /* Convert -A - 1 to ~A. */
10699 if (TREE_CODE (type) != COMPLEX_TYPE
10700 && TREE_CODE (arg0) == NEGATE_EXPR
10701 && integer_onep (arg1)
10702 && !TYPE_OVERFLOW_TRAPS (type))
10703 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10704 fold_convert_loc (loc, type,
10705 TREE_OPERAND (arg0, 0)));
10706
10707 /* Convert -1 - A to ~A. */
10708 if (TREE_CODE (type) != COMPLEX_TYPE
10709 && integer_all_onesp (arg0))
10710 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10711
10712
10713 /* X - (X / Y) * Y is X % Y. */
10714 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10715 && TREE_CODE (arg1) == MULT_EXPR
10716 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10717 && operand_equal_p (arg0,
10718 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10719 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10720 TREE_OPERAND (arg1, 1), 0))
10721 return
10722 fold_convert_loc (loc, type,
10723 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10724 arg0, TREE_OPERAND (arg1, 1)));
10725
10726 if (! FLOAT_TYPE_P (type))
10727 {
10728 if (integer_zerop (arg0))
10729 return negate_expr (fold_convert_loc (loc, type, arg1));
10730 if (integer_zerop (arg1))
10731 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10732
10733 /* Fold A - (A & B) into ~B & A. */
10734 if (!TREE_SIDE_EFFECTS (arg0)
10735 && TREE_CODE (arg1) == BIT_AND_EXPR)
10736 {
10737 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10738 {
10739 tree arg10 = fold_convert_loc (loc, type,
10740 TREE_OPERAND (arg1, 0));
10741 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10742 fold_build1_loc (loc, BIT_NOT_EXPR,
10743 type, arg10),
10744 fold_convert_loc (loc, type, arg0));
10745 }
10746 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10747 {
10748 tree arg11 = fold_convert_loc (loc,
10749 type, TREE_OPERAND (arg1, 1));
10750 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10751 fold_build1_loc (loc, BIT_NOT_EXPR,
10752 type, arg11),
10753 fold_convert_loc (loc, type, arg0));
10754 }
10755 }
10756
10757 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10758 any power of 2 minus 1. */
10759 if (TREE_CODE (arg0) == BIT_AND_EXPR
10760 && TREE_CODE (arg1) == BIT_AND_EXPR
10761 && operand_equal_p (TREE_OPERAND (arg0, 0),
10762 TREE_OPERAND (arg1, 0), 0))
10763 {
10764 tree mask0 = TREE_OPERAND (arg0, 1);
10765 tree mask1 = TREE_OPERAND (arg1, 1);
10766 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10767
10768 if (operand_equal_p (tem, mask1, 0))
10769 {
10770 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10771 TREE_OPERAND (arg0, 0), mask1);
10772 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10773 }
10774 }
10775 }
10776
10777 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10778 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10779 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10780
10781 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10782 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10783 (-ARG1 + ARG0) reduces to -ARG1. */
10784 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10785 return negate_expr (fold_convert_loc (loc, type, arg1));
10786
10787 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10788 __complex__ ( x, -y ). This is not the same for SNaNs or if
10789 signed zeros are involved. */
10790 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10791 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10792 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10793 {
10794 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10795 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10796 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10797 bool arg0rz = false, arg0iz = false;
10798 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10799 || (arg0i && (arg0iz = real_zerop (arg0i))))
10800 {
10801 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10802 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10803 if (arg0rz && arg1i && real_zerop (arg1i))
10804 {
10805 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10806 arg1r ? arg1r
10807 : build1 (REALPART_EXPR, rtype, arg1));
10808 tree ip = arg0i ? arg0i
10809 : build1 (IMAGPART_EXPR, rtype, arg0);
10810 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10811 }
10812 else if (arg0iz && arg1r && real_zerop (arg1r))
10813 {
10814 tree rp = arg0r ? arg0r
10815 : build1 (REALPART_EXPR, rtype, arg0);
10816 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10817 arg1i ? arg1i
10818 : build1 (IMAGPART_EXPR, rtype, arg1));
10819 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10820 }
10821 }
10822 }
10823
10824 /* Fold &x - &x. This can happen from &x.foo - &x.
10825 This is unsafe for certain floats even in non-IEEE formats.
10826 In IEEE, it is unsafe because it does wrong for NaNs.
10827 Also note that operand_equal_p is always false if an operand
10828 is volatile. */
10829
10830 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10831 && operand_equal_p (arg0, arg1, 0))
10832 return build_zero_cst (type);
10833
10834 /* A - B -> A + (-B) if B is easily negatable. */
10835 if (negate_expr_p (arg1)
10836 && ((FLOAT_TYPE_P (type)
10837 /* Avoid this transformation if B is a positive REAL_CST. */
10838 && (TREE_CODE (arg1) != REAL_CST
10839 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10840 || INTEGRAL_TYPE_P (type)))
10841 return fold_build2_loc (loc, PLUS_EXPR, type,
10842 fold_convert_loc (loc, type, arg0),
10843 fold_convert_loc (loc, type,
10844 negate_expr (arg1)));
10845
10846 /* Try folding difference of addresses. */
10847 {
10848 HOST_WIDE_INT diff;
10849
10850 if ((TREE_CODE (arg0) == ADDR_EXPR
10851 || TREE_CODE (arg1) == ADDR_EXPR)
10852 && ptr_difference_const (arg0, arg1, &diff))
10853 return build_int_cst_type (type, diff);
10854 }
10855
10856 /* Fold &a[i] - &a[j] to i-j. */
10857 if (TREE_CODE (arg0) == ADDR_EXPR
10858 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10859 && TREE_CODE (arg1) == ADDR_EXPR
10860 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10861 {
10862 tree tem = fold_addr_of_array_ref_difference (loc, type,
10863 TREE_OPERAND (arg0, 0),
10864 TREE_OPERAND (arg1, 0));
10865 if (tem)
10866 return tem;
10867 }
10868
10869 if (FLOAT_TYPE_P (type)
10870 && flag_unsafe_math_optimizations
10871 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10872 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10873 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10874 return tem;
10875
10876 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10877 one. Make sure the type is not saturating and has the signedness of
10878 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10879 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10880 if ((TREE_CODE (arg0) == MULT_EXPR
10881 || TREE_CODE (arg1) == MULT_EXPR)
10882 && !TYPE_SATURATING (type)
10883 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10884 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10885 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10886 {
10887 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10888 if (tem)
10889 return tem;
10890 }
10891
10892 goto associate;
10893
10894 case MULT_EXPR:
10895 /* (-A) * (-B) -> A * B */
10896 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10897 return fold_build2_loc (loc, MULT_EXPR, type,
10898 fold_convert_loc (loc, type,
10899 TREE_OPERAND (arg0, 0)),
10900 fold_convert_loc (loc, type,
10901 negate_expr (arg1)));
10902 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10903 return fold_build2_loc (loc, MULT_EXPR, type,
10904 fold_convert_loc (loc, type,
10905 negate_expr (arg0)),
10906 fold_convert_loc (loc, type,
10907 TREE_OPERAND (arg1, 0)));
10908
10909 if (! FLOAT_TYPE_P (type))
10910 {
10911 if (integer_zerop (arg1))
10912 return omit_one_operand_loc (loc, type, arg1, arg0);
10913 if (integer_onep (arg1))
10914 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10915 /* Transform x * -1 into -x. Make sure to do the negation
10916 on the original operand with conversions not stripped
10917 because we can only strip non-sign-changing conversions. */
10918 if (integer_minus_onep (arg1))
10919 return fold_convert_loc (loc, type, negate_expr (op0));
10920 /* Transform x * -C into -x * C if x is easily negatable. */
10921 if (TREE_CODE (arg1) == INTEGER_CST
10922 && tree_int_cst_sgn (arg1) == -1
10923 && negate_expr_p (arg0)
10924 && (tem = negate_expr (arg1)) != arg1
10925 && !TREE_OVERFLOW (tem))
10926 return fold_build2_loc (loc, MULT_EXPR, type,
10927 fold_convert_loc (loc, type,
10928 negate_expr (arg0)),
10929 tem);
10930
10931 /* (a * (1 << b)) is (a << b) */
10932 if (TREE_CODE (arg1) == LSHIFT_EXPR
10933 && integer_onep (TREE_OPERAND (arg1, 0)))
10934 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10935 TREE_OPERAND (arg1, 1));
10936 if (TREE_CODE (arg0) == LSHIFT_EXPR
10937 && integer_onep (TREE_OPERAND (arg0, 0)))
10938 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10939 TREE_OPERAND (arg0, 1));
10940
10941 /* (A + A) * C -> A * 2 * C */
10942 if (TREE_CODE (arg0) == PLUS_EXPR
10943 && TREE_CODE (arg1) == INTEGER_CST
10944 && operand_equal_p (TREE_OPERAND (arg0, 0),
10945 TREE_OPERAND (arg0, 1), 0))
10946 return fold_build2_loc (loc, MULT_EXPR, type,
10947 omit_one_operand_loc (loc, type,
10948 TREE_OPERAND (arg0, 0),
10949 TREE_OPERAND (arg0, 1)),
10950 fold_build2_loc (loc, MULT_EXPR, type,
10951 build_int_cst (type, 2) , arg1));
10952
10953 strict_overflow_p = false;
10954 if (TREE_CODE (arg1) == INTEGER_CST
10955 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10956 &strict_overflow_p)))
10957 {
10958 if (strict_overflow_p)
10959 fold_overflow_warning (("assuming signed overflow does not "
10960 "occur when simplifying "
10961 "multiplication"),
10962 WARN_STRICT_OVERFLOW_MISC);
10963 return fold_convert_loc (loc, type, tem);
10964 }
10965
10966 /* Optimize z * conj(z) for integer complex numbers. */
10967 if (TREE_CODE (arg0) == CONJ_EXPR
10968 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10969 return fold_mult_zconjz (loc, type, arg1);
10970 if (TREE_CODE (arg1) == CONJ_EXPR
10971 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10972 return fold_mult_zconjz (loc, type, arg0);
10973 }
10974 else
10975 {
10976 /* Maybe fold x * 0 to 0. The expressions aren't the same
10977 when x is NaN, since x * 0 is also NaN. Nor are they the
10978 same in modes with signed zeros, since multiplying a
10979 negative value by 0 gives -0, not +0. */
10980 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10981 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10982 && real_zerop (arg1))
10983 return omit_one_operand_loc (loc, type, arg1, arg0);
10984 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10985 Likewise for complex arithmetic with signed zeros. */
10986 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10987 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10988 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10989 && real_onep (arg1))
10990 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10991
10992 /* Transform x * -1.0 into -x. */
10993 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10994 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10995 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10996 && real_minus_onep (arg1))
10997 return fold_convert_loc (loc, type, negate_expr (arg0));
10998
10999 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11000 the result for floating point types due to rounding so it is applied
11001 only if -fassociative-math was specify. */
11002 if (flag_associative_math
11003 && TREE_CODE (arg0) == RDIV_EXPR
11004 && TREE_CODE (arg1) == REAL_CST
11005 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11006 {
11007 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11008 arg1);
11009 if (tem)
11010 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11011 TREE_OPERAND (arg0, 1));
11012 }
11013
11014 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11015 if (operand_equal_p (arg0, arg1, 0))
11016 {
11017 tree tem = fold_strip_sign_ops (arg0);
11018 if (tem != NULL_TREE)
11019 {
11020 tem = fold_convert_loc (loc, type, tem);
11021 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11022 }
11023 }
11024
11025 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11026 This is not the same for NaNs or if signed zeros are
11027 involved. */
11028 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11029 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11030 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11031 && TREE_CODE (arg1) == COMPLEX_CST
11032 && real_zerop (TREE_REALPART (arg1)))
11033 {
11034 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11035 if (real_onep (TREE_IMAGPART (arg1)))
11036 return
11037 fold_build2_loc (loc, COMPLEX_EXPR, type,
11038 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11039 rtype, arg0)),
11040 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11041 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11042 return
11043 fold_build2_loc (loc, COMPLEX_EXPR, type,
11044 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11045 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11046 rtype, arg0)));
11047 }
11048
11049 /* Optimize z * conj(z) for floating point complex numbers.
11050 Guarded by flag_unsafe_math_optimizations as non-finite
11051 imaginary components don't produce scalar results. */
11052 if (flag_unsafe_math_optimizations
11053 && TREE_CODE (arg0) == CONJ_EXPR
11054 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11055 return fold_mult_zconjz (loc, type, arg1);
11056 if (flag_unsafe_math_optimizations
11057 && TREE_CODE (arg1) == CONJ_EXPR
11058 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11059 return fold_mult_zconjz (loc, type, arg0);
11060
11061 if (flag_unsafe_math_optimizations)
11062 {
11063 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11064 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11065
11066 /* Optimizations of root(...)*root(...). */
11067 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11068 {
11069 tree rootfn, arg;
11070 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11071 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11072
11073 /* Optimize sqrt(x)*sqrt(x) as x. */
11074 if (BUILTIN_SQRT_P (fcode0)
11075 && operand_equal_p (arg00, arg10, 0)
11076 && ! HONOR_SNANS (TYPE_MODE (type)))
11077 return arg00;
11078
11079 /* Optimize root(x)*root(y) as root(x*y). */
11080 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11081 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11082 return build_call_expr_loc (loc, rootfn, 1, arg);
11083 }
11084
11085 /* Optimize expN(x)*expN(y) as expN(x+y). */
11086 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11087 {
11088 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11089 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11090 CALL_EXPR_ARG (arg0, 0),
11091 CALL_EXPR_ARG (arg1, 0));
11092 return build_call_expr_loc (loc, expfn, 1, arg);
11093 }
11094
11095 /* Optimizations of pow(...)*pow(...). */
11096 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11097 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11098 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11099 {
11100 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11101 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11102 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11103 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11104
11105 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11106 if (operand_equal_p (arg01, arg11, 0))
11107 {
11108 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11109 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11110 arg00, arg10);
11111 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11112 }
11113
11114 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11115 if (operand_equal_p (arg00, arg10, 0))
11116 {
11117 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11118 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11119 arg01, arg11);
11120 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11121 }
11122 }
11123
11124 /* Optimize tan(x)*cos(x) as sin(x). */
11125 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11126 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11127 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11128 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11129 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11130 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11131 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11132 CALL_EXPR_ARG (arg1, 0), 0))
11133 {
11134 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11135
11136 if (sinfn != NULL_TREE)
11137 return build_call_expr_loc (loc, sinfn, 1,
11138 CALL_EXPR_ARG (arg0, 0));
11139 }
11140
11141 /* Optimize x*pow(x,c) as pow(x,c+1). */
11142 if (fcode1 == BUILT_IN_POW
11143 || fcode1 == BUILT_IN_POWF
11144 || fcode1 == BUILT_IN_POWL)
11145 {
11146 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11147 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11148 if (TREE_CODE (arg11) == REAL_CST
11149 && !TREE_OVERFLOW (arg11)
11150 && operand_equal_p (arg0, arg10, 0))
11151 {
11152 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11153 REAL_VALUE_TYPE c;
11154 tree arg;
11155
11156 c = TREE_REAL_CST (arg11);
11157 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11158 arg = build_real (type, c);
11159 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11160 }
11161 }
11162
11163 /* Optimize pow(x,c)*x as pow(x,c+1). */
11164 if (fcode0 == BUILT_IN_POW
11165 || fcode0 == BUILT_IN_POWF
11166 || fcode0 == BUILT_IN_POWL)
11167 {
11168 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11169 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11170 if (TREE_CODE (arg01) == REAL_CST
11171 && !TREE_OVERFLOW (arg01)
11172 && operand_equal_p (arg1, arg00, 0))
11173 {
11174 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11175 REAL_VALUE_TYPE c;
11176 tree arg;
11177
11178 c = TREE_REAL_CST (arg01);
11179 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11180 arg = build_real (type, c);
11181 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11182 }
11183 }
11184
11185 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11186 if (!in_gimple_form
11187 && optimize
11188 && operand_equal_p (arg0, arg1, 0))
11189 {
11190 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11191
11192 if (powfn)
11193 {
11194 tree arg = build_real (type, dconst2);
11195 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11196 }
11197 }
11198 }
11199 }
11200 goto associate;
11201
11202 case BIT_IOR_EXPR:
11203 bit_ior:
11204 if (integer_all_onesp (arg1))
11205 return omit_one_operand_loc (loc, type, arg1, arg0);
11206 if (integer_zerop (arg1))
11207 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11208 if (operand_equal_p (arg0, arg1, 0))
11209 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11210
11211 /* ~X | X is -1. */
11212 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11213 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11214 {
11215 t1 = build_zero_cst (type);
11216 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11217 return omit_one_operand_loc (loc, type, t1, arg1);
11218 }
11219
11220 /* X | ~X is -1. */
11221 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11222 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11223 {
11224 t1 = build_zero_cst (type);
11225 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11226 return omit_one_operand_loc (loc, type, t1, arg0);
11227 }
11228
11229 /* Canonicalize (X & C1) | C2. */
11230 if (TREE_CODE (arg0) == BIT_AND_EXPR
11231 && TREE_CODE (arg1) == INTEGER_CST
11232 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11233 {
11234 wide_int c1, c2, c3, msk;
11235 int width = TYPE_PRECISION (type), w;
11236 bool try_simplify = true;
11237 c1 = TREE_OPERAND (arg0, 1);
11238 c2 = arg1;
11239
11240 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11241 if ((c1 & c2) == c1)
11242 return omit_one_operand_loc (loc, type, arg1,
11243 TREE_OPERAND (arg0, 0));
11244
11245 msk = wi::mask (width, false, TYPE_PRECISION (TREE_TYPE (arg1)));
11246
11247 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11248 if (msk.and_not (c1 | c2) == 0)
11249 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11250 TREE_OPERAND (arg0, 0), arg1);
11251
11252 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11253 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11254 mode which allows further optimizations. */
11255 c1 &= msk;
11256 c2 &= msk;
11257 c3 = c1.and_not (c2);
11258 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11259 {
11260 wide_int mask = wi::mask (width - w, false,
11261 TYPE_PRECISION (type));
11262 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11263 {
11264 c3 = mask;
11265 break;
11266 }
11267 }
11268
11269 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11270 with that optimization from the BIT_AND_EXPR optimizations.
11271 This could end up in an infinite recursion. */
11272 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11273 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11274 == INTEGER_CST)
11275 {
11276 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11277 wide_int masked = mask_with_tz (type, c3, t);
11278
11279 try_simplify = (masked != c1);
11280 }
11281
11282 if (try_simplify && c3 != c1)
11283 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11284 fold_build2_loc (loc, BIT_AND_EXPR, type,
11285 TREE_OPERAND (arg0, 0),
11286 wide_int_to_tree (type,
11287 c3)),
11288 arg1);
11289 }
11290
11291 /* (X & Y) | Y is (X, Y). */
11292 if (TREE_CODE (arg0) == BIT_AND_EXPR
11293 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11294 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11295 /* (X & Y) | X is (Y, X). */
11296 if (TREE_CODE (arg0) == BIT_AND_EXPR
11297 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11298 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11299 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11300 /* X | (X & Y) is (Y, X). */
11301 if (TREE_CODE (arg1) == BIT_AND_EXPR
11302 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11303 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11304 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11305 /* X | (Y & X) is (Y, X). */
11306 if (TREE_CODE (arg1) == BIT_AND_EXPR
11307 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11308 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11309 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11310
11311 /* (X & ~Y) | (~X & Y) is X ^ Y */
11312 if (TREE_CODE (arg0) == BIT_AND_EXPR
11313 && TREE_CODE (arg1) == BIT_AND_EXPR)
11314 {
11315 tree a0, a1, l0, l1, n0, n1;
11316
11317 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11318 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11319
11320 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11321 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11322
11323 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11324 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11325
11326 if ((operand_equal_p (n0, a0, 0)
11327 && operand_equal_p (n1, a1, 0))
11328 || (operand_equal_p (n0, a1, 0)
11329 && operand_equal_p (n1, a0, 0)))
11330 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11331 }
11332
11333 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11334 if (t1 != NULL_TREE)
11335 return t1;
11336
11337 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11338
11339 This results in more efficient code for machines without a NAND
11340 instruction. Combine will canonicalize to the first form
11341 which will allow use of NAND instructions provided by the
11342 backend if they exist. */
11343 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11344 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11345 {
11346 return
11347 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11348 build2 (BIT_AND_EXPR, type,
11349 fold_convert_loc (loc, type,
11350 TREE_OPERAND (arg0, 0)),
11351 fold_convert_loc (loc, type,
11352 TREE_OPERAND (arg1, 0))));
11353 }
11354
11355 /* See if this can be simplified into a rotate first. If that
11356 is unsuccessful continue in the association code. */
11357 goto bit_rotate;
11358
11359 case BIT_XOR_EXPR:
11360 if (integer_zerop (arg1))
11361 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11362 if (integer_all_onesp (arg1))
11363 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11364 if (operand_equal_p (arg0, arg1, 0))
11365 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11366
11367 /* ~X ^ X is -1. */
11368 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11369 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11370 {
11371 t1 = build_zero_cst (type);
11372 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11373 return omit_one_operand_loc (loc, type, t1, arg1);
11374 }
11375
11376 /* X ^ ~X is -1. */
11377 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11378 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11379 {
11380 t1 = build_zero_cst (type);
11381 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11382 return omit_one_operand_loc (loc, type, t1, arg0);
11383 }
11384
11385 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11386 with a constant, and the two constants have no bits in common,
11387 we should treat this as a BIT_IOR_EXPR since this may produce more
11388 simplifications. */
11389 if (TREE_CODE (arg0) == BIT_AND_EXPR
11390 && TREE_CODE (arg1) == BIT_AND_EXPR
11391 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11392 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11393 && integer_zerop (const_binop (BIT_AND_EXPR,
11394 TREE_OPERAND (arg0, 1),
11395 TREE_OPERAND (arg1, 1))))
11396 {
11397 code = BIT_IOR_EXPR;
11398 goto bit_ior;
11399 }
11400
11401 /* (X | Y) ^ X -> Y & ~ X*/
11402 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11403 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11404 {
11405 tree t2 = TREE_OPERAND (arg0, 1);
11406 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11407 arg1);
11408 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11409 fold_convert_loc (loc, type, t2),
11410 fold_convert_loc (loc, type, t1));
11411 return t1;
11412 }
11413
11414 /* (Y | X) ^ X -> Y & ~ X*/
11415 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11416 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11417 {
11418 tree t2 = TREE_OPERAND (arg0, 0);
11419 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11420 arg1);
11421 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11422 fold_convert_loc (loc, type, t2),
11423 fold_convert_loc (loc, type, t1));
11424 return t1;
11425 }
11426
11427 /* X ^ (X | Y) -> Y & ~ X*/
11428 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11429 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11430 {
11431 tree t2 = TREE_OPERAND (arg1, 1);
11432 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11433 arg0);
11434 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11435 fold_convert_loc (loc, type, t2),
11436 fold_convert_loc (loc, type, t1));
11437 return t1;
11438 }
11439
11440 /* X ^ (Y | X) -> Y & ~ X*/
11441 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11442 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11443 {
11444 tree t2 = TREE_OPERAND (arg1, 0);
11445 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11446 arg0);
11447 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11448 fold_convert_loc (loc, type, t2),
11449 fold_convert_loc (loc, type, t1));
11450 return t1;
11451 }
11452
11453 /* Convert ~X ^ ~Y to X ^ Y. */
11454 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11455 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11456 return fold_build2_loc (loc, code, type,
11457 fold_convert_loc (loc, type,
11458 TREE_OPERAND (arg0, 0)),
11459 fold_convert_loc (loc, type,
11460 TREE_OPERAND (arg1, 0)));
11461
11462 /* Convert ~X ^ C to X ^ ~C. */
11463 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11464 && TREE_CODE (arg1) == INTEGER_CST)
11465 return fold_build2_loc (loc, code, type,
11466 fold_convert_loc (loc, type,
11467 TREE_OPERAND (arg0, 0)),
11468 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11469
11470 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11471 if (TREE_CODE (arg0) == BIT_AND_EXPR
11472 && integer_onep (TREE_OPERAND (arg0, 1))
11473 && integer_onep (arg1))
11474 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11475 build_zero_cst (TREE_TYPE (arg0)));
11476
11477 /* Fold (X & Y) ^ Y as ~X & Y. */
11478 if (TREE_CODE (arg0) == BIT_AND_EXPR
11479 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11480 {
11481 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11482 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11483 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11484 fold_convert_loc (loc, type, arg1));
11485 }
11486 /* Fold (X & Y) ^ X as ~Y & X. */
11487 if (TREE_CODE (arg0) == BIT_AND_EXPR
11488 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11489 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11490 {
11491 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11492 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11493 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11494 fold_convert_loc (loc, type, arg1));
11495 }
11496 /* Fold X ^ (X & Y) as X & ~Y. */
11497 if (TREE_CODE (arg1) == BIT_AND_EXPR
11498 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11499 {
11500 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11501 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11502 fold_convert_loc (loc, type, arg0),
11503 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11504 }
11505 /* Fold X ^ (Y & X) as ~Y & X. */
11506 if (TREE_CODE (arg1) == BIT_AND_EXPR
11507 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11508 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11509 {
11510 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11511 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11512 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11513 fold_convert_loc (loc, type, arg0));
11514 }
11515
11516 /* See if this can be simplified into a rotate first. If that
11517 is unsuccessful continue in the association code. */
11518 goto bit_rotate;
11519
11520 case BIT_AND_EXPR:
11521 if (integer_all_onesp (arg1))
11522 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11523 if (integer_zerop (arg1))
11524 return omit_one_operand_loc (loc, type, arg1, arg0);
11525 if (operand_equal_p (arg0, arg1, 0))
11526 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11527
11528 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11529 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11530 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11531 || (TREE_CODE (arg0) == EQ_EXPR
11532 && integer_zerop (TREE_OPERAND (arg0, 1))))
11533 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11534 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11535
11536 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11537 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11538 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11539 || (TREE_CODE (arg1) == EQ_EXPR
11540 && integer_zerop (TREE_OPERAND (arg1, 1))))
11541 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11542 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11543
11544 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11545 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11546 && TREE_CODE (arg1) == INTEGER_CST
11547 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11548 {
11549 tree tmp1 = fold_convert_loc (loc, type, arg1);
11550 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11551 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11552 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11553 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11554 return
11555 fold_convert_loc (loc, type,
11556 fold_build2_loc (loc, BIT_IOR_EXPR,
11557 type, tmp2, tmp3));
11558 }
11559
11560 /* (X | Y) & Y is (X, Y). */
11561 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11562 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11563 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11564 /* (X | Y) & X is (Y, X). */
11565 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11566 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11567 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11568 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11569 /* X & (X | Y) is (Y, X). */
11570 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11571 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11572 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11573 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11574 /* X & (Y | X) is (Y, X). */
11575 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11576 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11577 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11578 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11579
11580 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11581 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11582 && integer_onep (TREE_OPERAND (arg0, 1))
11583 && integer_onep (arg1))
11584 {
11585 tree tem2;
11586 tem = TREE_OPERAND (arg0, 0);
11587 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11588 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11589 tem, tem2);
11590 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11591 build_zero_cst (TREE_TYPE (tem)));
11592 }
11593 /* Fold ~X & 1 as (X & 1) == 0. */
11594 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11595 && integer_onep (arg1))
11596 {
11597 tree tem2;
11598 tem = TREE_OPERAND (arg0, 0);
11599 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11600 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11601 tem, tem2);
11602 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11603 build_zero_cst (TREE_TYPE (tem)));
11604 }
11605 /* Fold !X & 1 as X == 0. */
11606 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11607 && integer_onep (arg1))
11608 {
11609 tem = TREE_OPERAND (arg0, 0);
11610 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11611 build_zero_cst (TREE_TYPE (tem)));
11612 }
11613
11614 /* Fold (X ^ Y) & Y as ~X & Y. */
11615 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11616 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11617 {
11618 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11619 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11620 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11621 fold_convert_loc (loc, type, arg1));
11622 }
11623 /* Fold (X ^ Y) & X as ~Y & X. */
11624 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11625 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11626 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11627 {
11628 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11629 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11630 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11631 fold_convert_loc (loc, type, arg1));
11632 }
11633 /* Fold X & (X ^ Y) as X & ~Y. */
11634 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11635 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11636 {
11637 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11638 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11639 fold_convert_loc (loc, type, arg0),
11640 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11641 }
11642 /* Fold X & (Y ^ X) as ~Y & X. */
11643 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11644 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11645 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11646 {
11647 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11648 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11649 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11650 fold_convert_loc (loc, type, arg0));
11651 }
11652
11653 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11654 multiple of 1 << CST. */
11655 if (TREE_CODE (arg1) == INTEGER_CST)
11656 {
11657 wide_int cst1 = arg1;
11658 wide_int ncst1 = -cst1;
11659 if ((cst1 & ncst1) == ncst1
11660 && multiple_of_p (type, arg0,
11661 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11662 return fold_convert_loc (loc, type, arg0);
11663 }
11664
11665 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11666 bits from CST2. */
11667 if (TREE_CODE (arg1) == INTEGER_CST
11668 && TREE_CODE (arg0) == MULT_EXPR
11669 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11670 {
11671 wide_int masked = mask_with_tz (type, arg1, TREE_OPERAND (arg0, 1));
11672
11673 if (masked == 0)
11674 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11675 arg0, arg1);
11676 else if (masked != arg1)
11677 return fold_build2_loc (loc, code, type, op0,
11678 wide_int_to_tree (type, masked));
11679 }
11680
11681 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11682 ((A & N) + B) & M -> (A + B) & M
11683 Similarly if (N & M) == 0,
11684 ((A | N) + B) & M -> (A + B) & M
11685 and for - instead of + (or unary - instead of +)
11686 and/or ^ instead of |.
11687 If B is constant and (B & M) == 0, fold into A & M. */
11688 if (TREE_CODE (arg1) == INTEGER_CST)
11689 {
11690 wide_int cst1 = arg1;
11691 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11692 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11693 && (TREE_CODE (arg0) == PLUS_EXPR
11694 || TREE_CODE (arg0) == MINUS_EXPR
11695 || TREE_CODE (arg0) == NEGATE_EXPR)
11696 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11697 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11698 {
11699 tree pmop[2];
11700 int which = 0;
11701 wide_int cst0;
11702
11703 /* Now we know that arg0 is (C + D) or (C - D) or
11704 -C and arg1 (M) is == (1LL << cst) - 1.
11705 Store C into PMOP[0] and D into PMOP[1]. */
11706 pmop[0] = TREE_OPERAND (arg0, 0);
11707 pmop[1] = NULL;
11708 if (TREE_CODE (arg0) != NEGATE_EXPR)
11709 {
11710 pmop[1] = TREE_OPERAND (arg0, 1);
11711 which = 1;
11712 }
11713
11714 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11715 which = -1;
11716
11717 for (; which >= 0; which--)
11718 switch (TREE_CODE (pmop[which]))
11719 {
11720 case BIT_AND_EXPR:
11721 case BIT_IOR_EXPR:
11722 case BIT_XOR_EXPR:
11723 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11724 != INTEGER_CST)
11725 break;
11726 cst0 = TREE_OPERAND (pmop[which], 1);
11727 cst0 &= cst1;
11728 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11729 {
11730 if (cst0 != cst1)
11731 break;
11732 }
11733 else if (cst0 != 0)
11734 break;
11735 /* If C or D is of the form (A & N) where
11736 (N & M) == M, or of the form (A | N) or
11737 (A ^ N) where (N & M) == 0, replace it with A. */
11738 pmop[which] = TREE_OPERAND (pmop[which], 0);
11739 break;
11740 case INTEGER_CST:
11741 /* If C or D is a N where (N & M) == 0, it can be
11742 omitted (assumed 0). */
11743 if ((TREE_CODE (arg0) == PLUS_EXPR
11744 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11745 && (cst1 & pmop[which]) == 0)
11746 pmop[which] = NULL;
11747 break;
11748 default:
11749 break;
11750 }
11751
11752 /* Only build anything new if we optimized one or both arguments
11753 above. */
11754 if (pmop[0] != TREE_OPERAND (arg0, 0)
11755 || (TREE_CODE (arg0) != NEGATE_EXPR
11756 && pmop[1] != TREE_OPERAND (arg0, 1)))
11757 {
11758 tree utype = TREE_TYPE (arg0);
11759 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11760 {
11761 /* Perform the operations in a type that has defined
11762 overflow behavior. */
11763 utype = unsigned_type_for (TREE_TYPE (arg0));
11764 if (pmop[0] != NULL)
11765 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11766 if (pmop[1] != NULL)
11767 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11768 }
11769
11770 if (TREE_CODE (arg0) == NEGATE_EXPR)
11771 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11772 else if (TREE_CODE (arg0) == PLUS_EXPR)
11773 {
11774 if (pmop[0] != NULL && pmop[1] != NULL)
11775 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11776 pmop[0], pmop[1]);
11777 else if (pmop[0] != NULL)
11778 tem = pmop[0];
11779 else if (pmop[1] != NULL)
11780 tem = pmop[1];
11781 else
11782 return build_int_cst (type, 0);
11783 }
11784 else if (pmop[0] == NULL)
11785 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11786 else
11787 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11788 pmop[0], pmop[1]);
11789 /* TEM is now the new binary +, - or unary - replacement. */
11790 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11791 fold_convert_loc (loc, utype, arg1));
11792 return fold_convert_loc (loc, type, tem);
11793 }
11794 }
11795 }
11796
11797 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11798 if (t1 != NULL_TREE)
11799 return t1;
11800 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11801 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11802 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11803 {
11804 wide_int mask;
11805 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11806
11807 mask = wide_int::from (arg1, prec, UNSIGNED);
11808 if (mask == -1)
11809 return
11810 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11811 }
11812
11813 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11814
11815 This results in more efficient code for machines without a NOR
11816 instruction. Combine will canonicalize to the first form
11817 which will allow use of NOR instructions provided by the
11818 backend if they exist. */
11819 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11820 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11821 {
11822 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11823 build2 (BIT_IOR_EXPR, type,
11824 fold_convert_loc (loc, type,
11825 TREE_OPERAND (arg0, 0)),
11826 fold_convert_loc (loc, type,
11827 TREE_OPERAND (arg1, 0))));
11828 }
11829
11830 /* If arg0 is derived from the address of an object or function, we may
11831 be able to fold this expression using the object or function's
11832 alignment. */
11833 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11834 {
11835 unsigned HOST_WIDE_INT modulus, residue;
11836 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11837
11838 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11839 integer_onep (arg1));
11840
11841 /* This works because modulus is a power of 2. If this weren't the
11842 case, we'd have to replace it by its greatest power-of-2
11843 divisor: modulus & -modulus. */
11844 if (low < modulus)
11845 return build_int_cst (type, residue & low);
11846 }
11847
11848 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11849 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11850 if the new mask might be further optimized. */
11851 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11852 || TREE_CODE (arg0) == RSHIFT_EXPR)
11853 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11854 && tree_fits_hwi_p (arg1)
11855 && tree_to_uhwi (TREE_OPERAND (arg0, 1))
11856 < TYPE_PRECISION (TREE_TYPE (arg0))
11857 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11858 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0)
11859 {
11860 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11861 unsigned HOST_WIDE_INT mask
11862 = tree_to_hwi (arg1, TYPE_SIGN (TREE_TYPE (arg1)));
11863 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11864 tree shift_type = TREE_TYPE (arg0);
11865
11866 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11867 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11868 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11869 && TYPE_PRECISION (TREE_TYPE (arg0))
11870 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11871 {
11872 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11873 tree arg00 = TREE_OPERAND (arg0, 0);
11874 /* See if more bits can be proven as zero because of
11875 zero extension. */
11876 if (TREE_CODE (arg00) == NOP_EXPR
11877 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11878 {
11879 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11880 if (TYPE_PRECISION (inner_type)
11881 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11882 && TYPE_PRECISION (inner_type) < prec)
11883 {
11884 prec = TYPE_PRECISION (inner_type);
11885 /* See if we can shorten the right shift. */
11886 if (shiftc < prec)
11887 shift_type = inner_type;
11888 }
11889 }
11890 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11891 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11892 zerobits <<= prec - shiftc;
11893 /* For arithmetic shift if sign bit could be set, zerobits
11894 can contain actually sign bits, so no transformation is
11895 possible, unless MASK masks them all away. In that
11896 case the shift needs to be converted into logical shift. */
11897 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11898 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11899 {
11900 if ((mask & zerobits) == 0)
11901 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11902 else
11903 zerobits = 0;
11904 }
11905 }
11906
11907 /* ((X << 16) & 0xff00) is (X, 0). */
11908 if ((mask & zerobits) == mask)
11909 return omit_one_operand_loc (loc, type,
11910 build_int_cst (type, 0), arg0);
11911
11912 newmask = mask | zerobits;
11913 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11914 {
11915 /* Only do the transformation if NEWMASK is some integer
11916 mode's mask. */
11917 for (prec = BITS_PER_UNIT;
11918 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11919 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11920 break;
11921 if (prec < HOST_BITS_PER_WIDE_INT
11922 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11923 {
11924 tree newmaskt;
11925
11926 if (shift_type != TREE_TYPE (arg0))
11927 {
11928 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11929 fold_convert_loc (loc, shift_type,
11930 TREE_OPERAND (arg0, 0)),
11931 TREE_OPERAND (arg0, 1));
11932 tem = fold_convert_loc (loc, type, tem);
11933 }
11934 else
11935 tem = op0;
11936 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11937 if (!tree_int_cst_equal (newmaskt, arg1))
11938 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11939 }
11940 }
11941 }
11942
11943 goto associate;
11944
11945 case RDIV_EXPR:
11946 /* Don't touch a floating-point divide by zero unless the mode
11947 of the constant can represent infinity. */
11948 if (TREE_CODE (arg1) == REAL_CST
11949 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11950 && real_zerop (arg1))
11951 return NULL_TREE;
11952
11953 /* Optimize A / A to 1.0 if we don't care about
11954 NaNs or Infinities. Skip the transformation
11955 for non-real operands. */
11956 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11957 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11958 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11959 && operand_equal_p (arg0, arg1, 0))
11960 {
11961 tree r = build_real (TREE_TYPE (arg0), dconst1);
11962
11963 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11964 }
11965
11966 /* The complex version of the above A / A optimization. */
11967 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11968 && operand_equal_p (arg0, arg1, 0))
11969 {
11970 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11971 if (! HONOR_NANS (TYPE_MODE (elem_type))
11972 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11973 {
11974 tree r = build_real (elem_type, dconst1);
11975 /* omit_two_operands will call fold_convert for us. */
11976 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11977 }
11978 }
11979
11980 /* (-A) / (-B) -> A / B */
11981 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11982 return fold_build2_loc (loc, RDIV_EXPR, type,
11983 TREE_OPERAND (arg0, 0),
11984 negate_expr (arg1));
11985 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11986 return fold_build2_loc (loc, RDIV_EXPR, type,
11987 negate_expr (arg0),
11988 TREE_OPERAND (arg1, 0));
11989
11990 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11991 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11992 && real_onep (arg1))
11993 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11994
11995 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11996 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11997 && real_minus_onep (arg1))
11998 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11999 negate_expr (arg0)));
12000
12001 /* If ARG1 is a constant, we can convert this to a multiply by the
12002 reciprocal. This does not have the same rounding properties,
12003 so only do this if -freciprocal-math. We can actually
12004 always safely do it if ARG1 is a power of two, but it's hard to
12005 tell if it is or not in a portable manner. */
12006 if (optimize
12007 && (TREE_CODE (arg1) == REAL_CST
12008 || (TREE_CODE (arg1) == COMPLEX_CST
12009 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12010 || (TREE_CODE (arg1) == VECTOR_CST
12011 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12012 {
12013 if (flag_reciprocal_math
12014 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12015 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12016 /* Find the reciprocal if optimizing and the result is exact.
12017 TODO: Complex reciprocal not implemented. */
12018 if (TREE_CODE (arg1) != COMPLEX_CST)
12019 {
12020 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12021
12022 if (inverse)
12023 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12024 }
12025 }
12026 /* Convert A/B/C to A/(B*C). */
12027 if (flag_reciprocal_math
12028 && TREE_CODE (arg0) == RDIV_EXPR)
12029 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12030 fold_build2_loc (loc, MULT_EXPR, type,
12031 TREE_OPERAND (arg0, 1), arg1));
12032
12033 /* Convert A/(B/C) to (A/B)*C. */
12034 if (flag_reciprocal_math
12035 && TREE_CODE (arg1) == RDIV_EXPR)
12036 return fold_build2_loc (loc, MULT_EXPR, type,
12037 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12038 TREE_OPERAND (arg1, 0)),
12039 TREE_OPERAND (arg1, 1));
12040
12041 /* Convert C1/(X*C2) into (C1/C2)/X. */
12042 if (flag_reciprocal_math
12043 && TREE_CODE (arg1) == MULT_EXPR
12044 && TREE_CODE (arg0) == REAL_CST
12045 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12046 {
12047 tree tem = const_binop (RDIV_EXPR, arg0,
12048 TREE_OPERAND (arg1, 1));
12049 if (tem)
12050 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12051 TREE_OPERAND (arg1, 0));
12052 }
12053
12054 if (flag_unsafe_math_optimizations)
12055 {
12056 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12057 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12058
12059 /* Optimize sin(x)/cos(x) as tan(x). */
12060 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12061 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12062 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12063 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12064 CALL_EXPR_ARG (arg1, 0), 0))
12065 {
12066 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12067
12068 if (tanfn != NULL_TREE)
12069 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12070 }
12071
12072 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12073 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12074 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12075 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12076 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12077 CALL_EXPR_ARG (arg1, 0), 0))
12078 {
12079 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12080
12081 if (tanfn != NULL_TREE)
12082 {
12083 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12084 CALL_EXPR_ARG (arg0, 0));
12085 return fold_build2_loc (loc, RDIV_EXPR, type,
12086 build_real (type, dconst1), tmp);
12087 }
12088 }
12089
12090 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12091 NaNs or Infinities. */
12092 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12093 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12094 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12095 {
12096 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12097 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12098
12099 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12100 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12101 && operand_equal_p (arg00, arg01, 0))
12102 {
12103 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12104
12105 if (cosfn != NULL_TREE)
12106 return build_call_expr_loc (loc, cosfn, 1, arg00);
12107 }
12108 }
12109
12110 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12111 NaNs or Infinities. */
12112 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12113 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12114 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12115 {
12116 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12117 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12118
12119 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12120 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12121 && operand_equal_p (arg00, arg01, 0))
12122 {
12123 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12124
12125 if (cosfn != NULL_TREE)
12126 {
12127 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12128 return fold_build2_loc (loc, RDIV_EXPR, type,
12129 build_real (type, dconst1),
12130 tmp);
12131 }
12132 }
12133 }
12134
12135 /* Optimize pow(x,c)/x as pow(x,c-1). */
12136 if (fcode0 == BUILT_IN_POW
12137 || fcode0 == BUILT_IN_POWF
12138 || fcode0 == BUILT_IN_POWL)
12139 {
12140 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12141 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12142 if (TREE_CODE (arg01) == REAL_CST
12143 && !TREE_OVERFLOW (arg01)
12144 && operand_equal_p (arg1, arg00, 0))
12145 {
12146 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12147 REAL_VALUE_TYPE c;
12148 tree arg;
12149
12150 c = TREE_REAL_CST (arg01);
12151 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12152 arg = build_real (type, c);
12153 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12154 }
12155 }
12156
12157 /* Optimize a/root(b/c) into a*root(c/b). */
12158 if (BUILTIN_ROOT_P (fcode1))
12159 {
12160 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12161
12162 if (TREE_CODE (rootarg) == RDIV_EXPR)
12163 {
12164 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12165 tree b = TREE_OPERAND (rootarg, 0);
12166 tree c = TREE_OPERAND (rootarg, 1);
12167
12168 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12169
12170 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12171 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12172 }
12173 }
12174
12175 /* Optimize x/expN(y) into x*expN(-y). */
12176 if (BUILTIN_EXPONENT_P (fcode1))
12177 {
12178 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12179 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12180 arg1 = build_call_expr_loc (loc,
12181 expfn, 1,
12182 fold_convert_loc (loc, type, arg));
12183 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12184 }
12185
12186 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12187 if (fcode1 == BUILT_IN_POW
12188 || fcode1 == BUILT_IN_POWF
12189 || fcode1 == BUILT_IN_POWL)
12190 {
12191 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12192 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12193 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12194 tree neg11 = fold_convert_loc (loc, type,
12195 negate_expr (arg11));
12196 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12197 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12198 }
12199 }
12200 return NULL_TREE;
12201
12202 case TRUNC_DIV_EXPR:
12203 /* Optimize (X & (-A)) / A where A is a power of 2,
12204 to X >> log2(A) */
12205 if (TREE_CODE (arg0) == BIT_AND_EXPR
12206 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12207 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12208 {
12209 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12210 arg1, TREE_OPERAND (arg0, 1));
12211 if (sum && integer_zerop (sum)) {
12212 tree pow2 = build_int_cst (integer_type_node,
12213 wi::exact_log2 (arg1));
12214 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12215 TREE_OPERAND (arg0, 0), pow2);
12216 }
12217 }
12218
12219 /* Fall through */
12220
12221 case FLOOR_DIV_EXPR:
12222 /* Simplify A / (B << N) where A and B are positive and B is
12223 a power of 2, to A >> (N + log2(B)). */
12224 strict_overflow_p = false;
12225 if (TREE_CODE (arg1) == LSHIFT_EXPR
12226 && (TYPE_UNSIGNED (type)
12227 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12228 {
12229 tree sval = TREE_OPERAND (arg1, 0);
12230 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12231 {
12232 tree sh_cnt = TREE_OPERAND (arg1, 1);
12233 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12234 wi::exact_log2 (sval));
12235
12236 if (strict_overflow_p)
12237 fold_overflow_warning (("assuming signed overflow does not "
12238 "occur when simplifying A / (B << N)"),
12239 WARN_STRICT_OVERFLOW_MISC);
12240
12241 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12242 sh_cnt, pow2);
12243 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12244 fold_convert_loc (loc, type, arg0), sh_cnt);
12245 }
12246 }
12247
12248 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12249 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12250 if (INTEGRAL_TYPE_P (type)
12251 && TYPE_UNSIGNED (type)
12252 && code == FLOOR_DIV_EXPR)
12253 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12254
12255 /* Fall through */
12256
12257 case ROUND_DIV_EXPR:
12258 case CEIL_DIV_EXPR:
12259 case EXACT_DIV_EXPR:
12260 if (integer_onep (arg1))
12261 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12262 if (integer_zerop (arg1))
12263 return NULL_TREE;
12264 /* X / -1 is -X. */
12265 if (!TYPE_UNSIGNED (type)
12266 && TREE_CODE (arg1) == INTEGER_CST
12267 && wi::eq_p (arg1, -1))
12268 return fold_convert_loc (loc, type, negate_expr (arg0));
12269
12270 /* Convert -A / -B to A / B when the type is signed and overflow is
12271 undefined. */
12272 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12273 && TREE_CODE (arg0) == NEGATE_EXPR
12274 && negate_expr_p (arg1))
12275 {
12276 if (INTEGRAL_TYPE_P (type))
12277 fold_overflow_warning (("assuming signed overflow does not occur "
12278 "when distributing negation across "
12279 "division"),
12280 WARN_STRICT_OVERFLOW_MISC);
12281 return fold_build2_loc (loc, code, type,
12282 fold_convert_loc (loc, type,
12283 TREE_OPERAND (arg0, 0)),
12284 fold_convert_loc (loc, type,
12285 negate_expr (arg1)));
12286 }
12287 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12288 && TREE_CODE (arg1) == NEGATE_EXPR
12289 && negate_expr_p (arg0))
12290 {
12291 if (INTEGRAL_TYPE_P (type))
12292 fold_overflow_warning (("assuming signed overflow does not occur "
12293 "when distributing negation across "
12294 "division"),
12295 WARN_STRICT_OVERFLOW_MISC);
12296 return fold_build2_loc (loc, code, type,
12297 fold_convert_loc (loc, type,
12298 negate_expr (arg0)),
12299 fold_convert_loc (loc, type,
12300 TREE_OPERAND (arg1, 0)));
12301 }
12302
12303 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12304 operation, EXACT_DIV_EXPR.
12305
12306 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12307 At one time others generated faster code, it's not clear if they do
12308 after the last round to changes to the DIV code in expmed.c. */
12309 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12310 && multiple_of_p (type, arg0, arg1))
12311 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12312
12313 strict_overflow_p = false;
12314 if (TREE_CODE (arg1) == INTEGER_CST
12315 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12316 &strict_overflow_p)))
12317 {
12318 if (strict_overflow_p)
12319 fold_overflow_warning (("assuming signed overflow does not occur "
12320 "when simplifying division"),
12321 WARN_STRICT_OVERFLOW_MISC);
12322 return fold_convert_loc (loc, type, tem);
12323 }
12324
12325 return NULL_TREE;
12326
12327 case CEIL_MOD_EXPR:
12328 case FLOOR_MOD_EXPR:
12329 case ROUND_MOD_EXPR:
12330 case TRUNC_MOD_EXPR:
12331 /* X % 1 is always zero, but be sure to preserve any side
12332 effects in X. */
12333 if (integer_onep (arg1))
12334 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12335
12336 /* X % 0, return X % 0 unchanged so that we can get the
12337 proper warnings and errors. */
12338 if (integer_zerop (arg1))
12339 return NULL_TREE;
12340
12341 /* 0 % X is always zero, but be sure to preserve any side
12342 effects in X. Place this after checking for X == 0. */
12343 if (integer_zerop (arg0))
12344 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12345
12346 /* X % -1 is zero. */
12347 if (!TYPE_UNSIGNED (type)
12348 && TREE_CODE (arg1) == INTEGER_CST
12349 && wi::eq_p (arg1, -1))
12350 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12351
12352 /* X % -C is the same as X % C. */
12353 if (code == TRUNC_MOD_EXPR
12354 && TYPE_SIGN (type) == SIGNED
12355 && TREE_CODE (arg1) == INTEGER_CST
12356 && !TREE_OVERFLOW (arg1)
12357 && wi::neg_p (arg1)
12358 && !TYPE_OVERFLOW_TRAPS (type)
12359 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12360 && !sign_bit_p (arg1, arg1))
12361 return fold_build2_loc (loc, code, type,
12362 fold_convert_loc (loc, type, arg0),
12363 fold_convert_loc (loc, type,
12364 negate_expr (arg1)));
12365
12366 /* X % -Y is the same as X % Y. */
12367 if (code == TRUNC_MOD_EXPR
12368 && !TYPE_UNSIGNED (type)
12369 && TREE_CODE (arg1) == NEGATE_EXPR
12370 && !TYPE_OVERFLOW_TRAPS (type))
12371 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12372 fold_convert_loc (loc, type,
12373 TREE_OPERAND (arg1, 0)));
12374
12375 strict_overflow_p = false;
12376 if (TREE_CODE (arg1) == INTEGER_CST
12377 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12378 &strict_overflow_p)))
12379 {
12380 if (strict_overflow_p)
12381 fold_overflow_warning (("assuming signed overflow does not occur "
12382 "when simplifying modulus"),
12383 WARN_STRICT_OVERFLOW_MISC);
12384 return fold_convert_loc (loc, type, tem);
12385 }
12386
12387 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12388 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12389 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12390 && (TYPE_UNSIGNED (type)
12391 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12392 {
12393 tree c = arg1;
12394 /* Also optimize A % (C << N) where C is a power of 2,
12395 to A & ((C << N) - 1). */
12396 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12397 c = TREE_OPERAND (arg1, 0);
12398
12399 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12400 {
12401 tree mask
12402 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12403 build_int_cst (TREE_TYPE (arg1), 1));
12404 if (strict_overflow_p)
12405 fold_overflow_warning (("assuming signed overflow does not "
12406 "occur when simplifying "
12407 "X % (power of two)"),
12408 WARN_STRICT_OVERFLOW_MISC);
12409 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12410 fold_convert_loc (loc, type, arg0),
12411 fold_convert_loc (loc, type, mask));
12412 }
12413 }
12414
12415 return NULL_TREE;
12416
12417 case LROTATE_EXPR:
12418 case RROTATE_EXPR:
12419 if (integer_all_onesp (arg0))
12420 return omit_one_operand_loc (loc, type, arg0, arg1);
12421 goto shift;
12422
12423 case RSHIFT_EXPR:
12424 /* Optimize -1 >> x for arithmetic right shifts. */
12425 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12426 && tree_expr_nonnegative_p (arg1))
12427 return omit_one_operand_loc (loc, type, arg0, arg1);
12428 /* ... fall through ... */
12429
12430 case LSHIFT_EXPR:
12431 shift:
12432 if (integer_zerop (arg1))
12433 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12434 if (integer_zerop (arg0))
12435 return omit_one_operand_loc (loc, type, arg0, arg1);
12436
12437 /* Prefer vector1 << scalar to vector1 << vector2
12438 if vector2 is uniform. */
12439 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12440 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12441 return fold_build2_loc (loc, code, type, op0, tem);
12442
12443 /* Since negative shift count is not well-defined,
12444 don't try to compute it in the compiler. */
12445 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12446 return NULL_TREE;
12447
12448 prec = element_precision (type);
12449
12450 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12451 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12452 && tree_to_uhwi (arg1) < prec
12453 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12454 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12455 {
12456 HOST_WIDE_INT low = (tree_to_shwi (TREE_OPERAND (arg0, 1))
12457 + tree_to_shwi (arg1));
12458
12459 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12460 being well defined. */
12461 if (low >= prec)
12462 {
12463 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12464 low = low % prec;
12465 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12466 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12467 TREE_OPERAND (arg0, 0));
12468 else
12469 low = prec - 1;
12470 }
12471
12472 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12473 build_int_cst (TREE_TYPE (arg1), low));
12474 }
12475
12476 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12477 into x & ((unsigned)-1 >> c) for unsigned types. */
12478 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12479 || (TYPE_UNSIGNED (type)
12480 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12481 && tree_fits_shwi_p (arg1)
12482 && tree_to_shwi (arg1) < prec
12483 && tree_fits_shwi_p (TREE_OPERAND (arg0, 1))
12484 && tree_to_shwi (TREE_OPERAND (arg0, 1)) < prec)
12485 {
12486 HOST_WIDE_INT low0 = tree_to_shwi (TREE_OPERAND (arg0, 1));
12487 HOST_WIDE_INT low1 = tree_to_shwi (arg1);
12488 tree lshift;
12489 tree arg00;
12490
12491 if (low0 == low1)
12492 {
12493 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12494
12495 lshift = build_minus_one_cst (type);
12496 lshift = const_binop (code, lshift, arg1);
12497
12498 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12499 }
12500 }
12501
12502 /* Rewrite an LROTATE_EXPR by a constant into an
12503 RROTATE_EXPR by a new constant. */
12504 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12505 {
12506 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12507 tem = const_binop (MINUS_EXPR, tem, arg1);
12508 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12509 }
12510
12511 /* If we have a rotate of a bit operation with the rotate count and
12512 the second operand of the bit operation both constant,
12513 permute the two operations. */
12514 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12515 && (TREE_CODE (arg0) == BIT_AND_EXPR
12516 || TREE_CODE (arg0) == BIT_IOR_EXPR
12517 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12519 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12520 fold_build2_loc (loc, code, type,
12521 TREE_OPERAND (arg0, 0), arg1),
12522 fold_build2_loc (loc, code, type,
12523 TREE_OPERAND (arg0, 1), arg1));
12524
12525 /* Two consecutive rotates adding up to the some integer
12526 multiple of the precision of the type can be ignored. */
12527 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12528 && TREE_CODE (arg0) == RROTATE_EXPR
12529 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12530 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12531 prec) == 0)
12532 return TREE_OPERAND (arg0, 0);
12533
12534 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12535 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12536 if the latter can be further optimized. */
12537 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12538 && TREE_CODE (arg0) == BIT_AND_EXPR
12539 && TREE_CODE (arg1) == INTEGER_CST
12540 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12541 {
12542 tree mask = fold_build2_loc (loc, code, type,
12543 fold_convert_loc (loc, type,
12544 TREE_OPERAND (arg0, 1)),
12545 arg1);
12546 tree shift = fold_build2_loc (loc, code, type,
12547 fold_convert_loc (loc, type,
12548 TREE_OPERAND (arg0, 0)),
12549 arg1);
12550 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12551 if (tem)
12552 return tem;
12553 }
12554
12555 return NULL_TREE;
12556
12557 case MIN_EXPR:
12558 if (operand_equal_p (arg0, arg1, 0))
12559 return omit_one_operand_loc (loc, type, arg0, arg1);
12560 if (INTEGRAL_TYPE_P (type)
12561 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12562 return omit_one_operand_loc (loc, type, arg1, arg0);
12563 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12564 if (tem)
12565 return tem;
12566 goto associate;
12567
12568 case MAX_EXPR:
12569 if (operand_equal_p (arg0, arg1, 0))
12570 return omit_one_operand_loc (loc, type, arg0, arg1);
12571 if (INTEGRAL_TYPE_P (type)
12572 && TYPE_MAX_VALUE (type)
12573 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12574 return omit_one_operand_loc (loc, type, arg1, arg0);
12575 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12576 if (tem)
12577 return tem;
12578 goto associate;
12579
12580 case TRUTH_ANDIF_EXPR:
12581 /* Note that the operands of this must be ints
12582 and their values must be 0 or 1.
12583 ("true" is a fixed value perhaps depending on the language.) */
12584 /* If first arg is constant zero, return it. */
12585 if (integer_zerop (arg0))
12586 return fold_convert_loc (loc, type, arg0);
12587 case TRUTH_AND_EXPR:
12588 /* If either arg is constant true, drop it. */
12589 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12590 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12591 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12592 /* Preserve sequence points. */
12593 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12594 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12595 /* If second arg is constant zero, result is zero, but first arg
12596 must be evaluated. */
12597 if (integer_zerop (arg1))
12598 return omit_one_operand_loc (loc, type, arg1, arg0);
12599 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12600 case will be handled here. */
12601 if (integer_zerop (arg0))
12602 return omit_one_operand_loc (loc, type, arg0, arg1);
12603
12604 /* !X && X is always false. */
12605 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12606 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12607 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12608 /* X && !X is always false. */
12609 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12610 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12611 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12612
12613 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12614 means A >= Y && A != MAX, but in this case we know that
12615 A < X <= MAX. */
12616
12617 if (!TREE_SIDE_EFFECTS (arg0)
12618 && !TREE_SIDE_EFFECTS (arg1))
12619 {
12620 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12621 if (tem && !operand_equal_p (tem, arg0, 0))
12622 return fold_build2_loc (loc, code, type, tem, arg1);
12623
12624 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12625 if (tem && !operand_equal_p (tem, arg1, 0))
12626 return fold_build2_loc (loc, code, type, arg0, tem);
12627 }
12628
12629 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12630 != NULL_TREE)
12631 return tem;
12632
12633 return NULL_TREE;
12634
12635 case TRUTH_ORIF_EXPR:
12636 /* Note that the operands of this must be ints
12637 and their values must be 0 or true.
12638 ("true" is a fixed value perhaps depending on the language.) */
12639 /* If first arg is constant true, return it. */
12640 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12641 return fold_convert_loc (loc, type, arg0);
12642 case TRUTH_OR_EXPR:
12643 /* If either arg is constant zero, drop it. */
12644 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12645 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12646 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12647 /* Preserve sequence points. */
12648 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12649 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12650 /* If second arg is constant true, result is true, but we must
12651 evaluate first arg. */
12652 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12653 return omit_one_operand_loc (loc, type, arg1, arg0);
12654 /* Likewise for first arg, but note this only occurs here for
12655 TRUTH_OR_EXPR. */
12656 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12657 return omit_one_operand_loc (loc, type, arg0, arg1);
12658
12659 /* !X || X is always true. */
12660 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12661 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12662 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12663 /* X || !X is always true. */
12664 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12665 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12666 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12667
12668 /* (X && !Y) || (!X && Y) is X ^ Y */
12669 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12670 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12671 {
12672 tree a0, a1, l0, l1, n0, n1;
12673
12674 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12675 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12676
12677 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12678 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12679
12680 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12681 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12682
12683 if ((operand_equal_p (n0, a0, 0)
12684 && operand_equal_p (n1, a1, 0))
12685 || (operand_equal_p (n0, a1, 0)
12686 && operand_equal_p (n1, a0, 0)))
12687 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12688 }
12689
12690 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12691 != NULL_TREE)
12692 return tem;
12693
12694 return NULL_TREE;
12695
12696 case TRUTH_XOR_EXPR:
12697 /* If the second arg is constant zero, drop it. */
12698 if (integer_zerop (arg1))
12699 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12700 /* If the second arg is constant true, this is a logical inversion. */
12701 if (integer_onep (arg1))
12702 {
12703 tem = invert_truthvalue_loc (loc, arg0);
12704 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12705 }
12706 /* Identical arguments cancel to zero. */
12707 if (operand_equal_p (arg0, arg1, 0))
12708 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12709
12710 /* !X ^ X is always true. */
12711 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12712 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12713 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12714
12715 /* X ^ !X is always true. */
12716 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12717 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12718 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12719
12720 return NULL_TREE;
12721
12722 case EQ_EXPR:
12723 case NE_EXPR:
12724 STRIP_NOPS (arg0);
12725 STRIP_NOPS (arg1);
12726
12727 tem = fold_comparison (loc, code, type, op0, op1);
12728 if (tem != NULL_TREE)
12729 return tem;
12730
12731 /* bool_var != 0 becomes bool_var. */
12732 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12733 && code == NE_EXPR)
12734 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12735
12736 /* bool_var == 1 becomes bool_var. */
12737 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12738 && code == EQ_EXPR)
12739 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12740
12741 /* bool_var != 1 becomes !bool_var. */
12742 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12743 && code == NE_EXPR)
12744 return fold_convert_loc (loc, type,
12745 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12746 TREE_TYPE (arg0), arg0));
12747
12748 /* bool_var == 0 becomes !bool_var. */
12749 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12750 && code == EQ_EXPR)
12751 return fold_convert_loc (loc, type,
12752 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12753 TREE_TYPE (arg0), arg0));
12754
12755 /* !exp != 0 becomes !exp */
12756 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12757 && code == NE_EXPR)
12758 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12759
12760 /* If this is an equality comparison of the address of two non-weak,
12761 unaliased symbols neither of which are extern (since we do not
12762 have access to attributes for externs), then we know the result. */
12763 if (TREE_CODE (arg0) == ADDR_EXPR
12764 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12765 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12766 && ! lookup_attribute ("alias",
12767 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12768 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12769 && TREE_CODE (arg1) == ADDR_EXPR
12770 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12771 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12772 && ! lookup_attribute ("alias",
12773 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12774 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12775 {
12776 /* We know that we're looking at the address of two
12777 non-weak, unaliased, static _DECL nodes.
12778
12779 It is both wasteful and incorrect to call operand_equal_p
12780 to compare the two ADDR_EXPR nodes. It is wasteful in that
12781 all we need to do is test pointer equality for the arguments
12782 to the two ADDR_EXPR nodes. It is incorrect to use
12783 operand_equal_p as that function is NOT equivalent to a
12784 C equality test. It can in fact return false for two
12785 objects which would test as equal using the C equality
12786 operator. */
12787 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12788 return constant_boolean_node (equal
12789 ? code == EQ_EXPR : code != EQ_EXPR,
12790 type);
12791 }
12792
12793 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12794 a MINUS_EXPR of a constant, we can convert it into a comparison with
12795 a revised constant as long as no overflow occurs. */
12796 if (TREE_CODE (arg1) == INTEGER_CST
12797 && (TREE_CODE (arg0) == PLUS_EXPR
12798 || TREE_CODE (arg0) == MINUS_EXPR)
12799 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12800 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12801 ? MINUS_EXPR : PLUS_EXPR,
12802 fold_convert_loc (loc, TREE_TYPE (arg0),
12803 arg1),
12804 TREE_OPERAND (arg0, 1)))
12805 && !TREE_OVERFLOW (tem))
12806 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12807
12808 /* Similarly for a NEGATE_EXPR. */
12809 if (TREE_CODE (arg0) == NEGATE_EXPR
12810 && TREE_CODE (arg1) == INTEGER_CST
12811 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12812 arg1)))
12813 && TREE_CODE (tem) == INTEGER_CST
12814 && !TREE_OVERFLOW (tem))
12815 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12816
12817 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12818 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12819 && TREE_CODE (arg1) == INTEGER_CST
12820 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12821 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12822 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12823 fold_convert_loc (loc,
12824 TREE_TYPE (arg0),
12825 arg1),
12826 TREE_OPERAND (arg0, 1)));
12827
12828 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12829 if ((TREE_CODE (arg0) == PLUS_EXPR
12830 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12831 || TREE_CODE (arg0) == MINUS_EXPR)
12832 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12833 0)),
12834 arg1, 0)
12835 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12836 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12837 {
12838 tree val = TREE_OPERAND (arg0, 1);
12839 return omit_two_operands_loc (loc, type,
12840 fold_build2_loc (loc, code, type,
12841 val,
12842 build_int_cst (TREE_TYPE (val),
12843 0)),
12844 TREE_OPERAND (arg0, 0), arg1);
12845 }
12846
12847 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12848 if (TREE_CODE (arg0) == MINUS_EXPR
12849 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12850 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12851 1)),
12852 arg1, 0)
12853 && wi::bit_and (TREE_OPERAND (arg0, 0), 1) == 1)
12854 {
12855 return omit_two_operands_loc (loc, type,
12856 code == NE_EXPR
12857 ? boolean_true_node : boolean_false_node,
12858 TREE_OPERAND (arg0, 1), arg1);
12859 }
12860
12861 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12862 for !=. Don't do this for ordered comparisons due to overflow. */
12863 if (TREE_CODE (arg0) == MINUS_EXPR
12864 && integer_zerop (arg1))
12865 return fold_build2_loc (loc, code, type,
12866 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12867
12868 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12869 if (TREE_CODE (arg0) == ABS_EXPR
12870 && (integer_zerop (arg1) || real_zerop (arg1)))
12871 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12872
12873 /* If this is an EQ or NE comparison with zero and ARG0 is
12874 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12875 two operations, but the latter can be done in one less insn
12876 on machines that have only two-operand insns or on which a
12877 constant cannot be the first operand. */
12878 if (TREE_CODE (arg0) == BIT_AND_EXPR
12879 && integer_zerop (arg1))
12880 {
12881 tree arg00 = TREE_OPERAND (arg0, 0);
12882 tree arg01 = TREE_OPERAND (arg0, 1);
12883 if (TREE_CODE (arg00) == LSHIFT_EXPR
12884 && integer_onep (TREE_OPERAND (arg00, 0)))
12885 {
12886 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12887 arg01, TREE_OPERAND (arg00, 1));
12888 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12889 build_int_cst (TREE_TYPE (arg0), 1));
12890 return fold_build2_loc (loc, code, type,
12891 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12892 arg1);
12893 }
12894 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12895 && integer_onep (TREE_OPERAND (arg01, 0)))
12896 {
12897 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12898 arg00, TREE_OPERAND (arg01, 1));
12899 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12900 build_int_cst (TREE_TYPE (arg0), 1));
12901 return fold_build2_loc (loc, code, type,
12902 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12903 arg1);
12904 }
12905 }
12906
12907 /* If this is an NE or EQ comparison of zero against the result of a
12908 signed MOD operation whose second operand is a power of 2, make
12909 the MOD operation unsigned since it is simpler and equivalent. */
12910 if (integer_zerop (arg1)
12911 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12912 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12913 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12914 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12915 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12916 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12917 {
12918 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12919 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12920 fold_convert_loc (loc, newtype,
12921 TREE_OPERAND (arg0, 0)),
12922 fold_convert_loc (loc, newtype,
12923 TREE_OPERAND (arg0, 1)));
12924
12925 return fold_build2_loc (loc, code, type, newmod,
12926 fold_convert_loc (loc, newtype, arg1));
12927 }
12928
12929 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12930 C1 is a valid shift constant, and C2 is a power of two, i.e.
12931 a single bit. */
12932 if (TREE_CODE (arg0) == BIT_AND_EXPR
12933 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12934 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12935 == INTEGER_CST
12936 && integer_pow2p (TREE_OPERAND (arg0, 1))
12937 && integer_zerop (arg1))
12938 {
12939 tree itype = TREE_TYPE (arg0);
12940 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12941 prec = TYPE_PRECISION (itype);
12942
12943 /* Check for a valid shift count. */
12944 if (wi::ltu_p (arg001, prec))
12945 {
12946 tree arg01 = TREE_OPERAND (arg0, 1);
12947 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12948 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12949 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12950 can be rewritten as (X & (C2 << C1)) != 0. */
12951 if ((log2 + tree_to_uhwi (arg001)) < prec)
12952 {
12953 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12954 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12955 return fold_build2_loc (loc, code, type, tem,
12956 fold_convert_loc (loc, itype, arg1));
12957 }
12958 /* Otherwise, for signed (arithmetic) shifts,
12959 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12960 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12961 else if (!TYPE_UNSIGNED (itype))
12962 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12963 arg000, build_int_cst (itype, 0));
12964 /* Otherwise, of unsigned (logical) shifts,
12965 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12966 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12967 else
12968 return omit_one_operand_loc (loc, type,
12969 code == EQ_EXPR ? integer_one_node
12970 : integer_zero_node,
12971 arg000);
12972 }
12973 }
12974
12975 /* If we have (A & C) == C where C is a power of 2, convert this into
12976 (A & C) != 0. Similarly for NE_EXPR. */
12977 if (TREE_CODE (arg0) == BIT_AND_EXPR
12978 && integer_pow2p (TREE_OPERAND (arg0, 1))
12979 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12980 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12981 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12982 integer_zero_node));
12983
12984 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12985 bit, then fold the expression into A < 0 or A >= 0. */
12986 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12987 if (tem)
12988 return tem;
12989
12990 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12991 Similarly for NE_EXPR. */
12992 if (TREE_CODE (arg0) == BIT_AND_EXPR
12993 && TREE_CODE (arg1) == INTEGER_CST
12994 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12995 {
12996 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12997 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12998 TREE_OPERAND (arg0, 1));
12999 tree dandnotc
13000 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13001 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13002 notc);
13003 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13004 if (integer_nonzerop (dandnotc))
13005 return omit_one_operand_loc (loc, type, rslt, arg0);
13006 }
13007
13008 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13009 Similarly for NE_EXPR. */
13010 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13011 && TREE_CODE (arg1) == INTEGER_CST
13012 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13013 {
13014 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13015 tree candnotd
13016 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13017 TREE_OPERAND (arg0, 1),
13018 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13019 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13020 if (integer_nonzerop (candnotd))
13021 return omit_one_operand_loc (loc, type, rslt, arg0);
13022 }
13023
13024 /* If this is a comparison of a field, we may be able to simplify it. */
13025 if ((TREE_CODE (arg0) == COMPONENT_REF
13026 || TREE_CODE (arg0) == BIT_FIELD_REF)
13027 /* Handle the constant case even without -O
13028 to make sure the warnings are given. */
13029 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13030 {
13031 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13032 if (t1)
13033 return t1;
13034 }
13035
13036 /* Optimize comparisons of strlen vs zero to a compare of the
13037 first character of the string vs zero. To wit,
13038 strlen(ptr) == 0 => *ptr == 0
13039 strlen(ptr) != 0 => *ptr != 0
13040 Other cases should reduce to one of these two (or a constant)
13041 due to the return value of strlen being unsigned. */
13042 if (TREE_CODE (arg0) == CALL_EXPR
13043 && integer_zerop (arg1))
13044 {
13045 tree fndecl = get_callee_fndecl (arg0);
13046
13047 if (fndecl
13048 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13049 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13050 && call_expr_nargs (arg0) == 1
13051 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13052 {
13053 tree iref = build_fold_indirect_ref_loc (loc,
13054 CALL_EXPR_ARG (arg0, 0));
13055 return fold_build2_loc (loc, code, type, iref,
13056 build_int_cst (TREE_TYPE (iref), 0));
13057 }
13058 }
13059
13060 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13061 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13062 if (TREE_CODE (arg0) == RSHIFT_EXPR
13063 && integer_zerop (arg1)
13064 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13065 {
13066 tree arg00 = TREE_OPERAND (arg0, 0);
13067 tree arg01 = TREE_OPERAND (arg0, 1);
13068 tree itype = TREE_TYPE (arg00);
13069 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
13070 {
13071 if (TYPE_UNSIGNED (itype))
13072 {
13073 itype = signed_type_for (itype);
13074 arg00 = fold_convert_loc (loc, itype, arg00);
13075 }
13076 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13077 type, arg00, build_zero_cst (itype));
13078 }
13079 }
13080
13081 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13082 if (integer_zerop (arg1)
13083 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13084 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13085 TREE_OPERAND (arg0, 1));
13086
13087 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13088 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13089 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13090 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13091 build_zero_cst (TREE_TYPE (arg0)));
13092 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13093 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13094 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13095 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13096 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13097 build_zero_cst (TREE_TYPE (arg0)));
13098
13099 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13100 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13101 && TREE_CODE (arg1) == INTEGER_CST
13102 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13103 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13104 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13105 TREE_OPERAND (arg0, 1), arg1));
13106
13107 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13108 (X & C) == 0 when C is a single bit. */
13109 if (TREE_CODE (arg0) == BIT_AND_EXPR
13110 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13111 && integer_zerop (arg1)
13112 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13113 {
13114 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13115 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13116 TREE_OPERAND (arg0, 1));
13117 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13118 type, tem,
13119 fold_convert_loc (loc, TREE_TYPE (arg0),
13120 arg1));
13121 }
13122
13123 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13124 constant C is a power of two, i.e. a single bit. */
13125 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13126 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13127 && integer_zerop (arg1)
13128 && integer_pow2p (TREE_OPERAND (arg0, 1))
13129 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13130 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13131 {
13132 tree arg00 = TREE_OPERAND (arg0, 0);
13133 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13134 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13135 }
13136
13137 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13138 when is C is a power of two, i.e. a single bit. */
13139 if (TREE_CODE (arg0) == BIT_AND_EXPR
13140 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13141 && integer_zerop (arg1)
13142 && integer_pow2p (TREE_OPERAND (arg0, 1))
13143 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13144 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13145 {
13146 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13147 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13148 arg000, TREE_OPERAND (arg0, 1));
13149 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13150 tem, build_int_cst (TREE_TYPE (tem), 0));
13151 }
13152
13153 if (integer_zerop (arg1)
13154 && tree_expr_nonzero_p (arg0))
13155 {
13156 tree res = constant_boolean_node (code==NE_EXPR, type);
13157 return omit_one_operand_loc (loc, type, res, arg0);
13158 }
13159
13160 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13161 if (TREE_CODE (arg0) == NEGATE_EXPR
13162 && TREE_CODE (arg1) == NEGATE_EXPR)
13163 return fold_build2_loc (loc, code, type,
13164 TREE_OPERAND (arg0, 0),
13165 fold_convert_loc (loc, TREE_TYPE (arg0),
13166 TREE_OPERAND (arg1, 0)));
13167
13168 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13169 if (TREE_CODE (arg0) == BIT_AND_EXPR
13170 && TREE_CODE (arg1) == BIT_AND_EXPR)
13171 {
13172 tree arg00 = TREE_OPERAND (arg0, 0);
13173 tree arg01 = TREE_OPERAND (arg0, 1);
13174 tree arg10 = TREE_OPERAND (arg1, 0);
13175 tree arg11 = TREE_OPERAND (arg1, 1);
13176 tree itype = TREE_TYPE (arg0);
13177
13178 if (operand_equal_p (arg01, arg11, 0))
13179 return fold_build2_loc (loc, code, type,
13180 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13181 fold_build2_loc (loc,
13182 BIT_XOR_EXPR, itype,
13183 arg00, arg10),
13184 arg01),
13185 build_zero_cst (itype));
13186
13187 if (operand_equal_p (arg01, arg10, 0))
13188 return fold_build2_loc (loc, code, type,
13189 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13190 fold_build2_loc (loc,
13191 BIT_XOR_EXPR, itype,
13192 arg00, arg11),
13193 arg01),
13194 build_zero_cst (itype));
13195
13196 if (operand_equal_p (arg00, arg11, 0))
13197 return fold_build2_loc (loc, code, type,
13198 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13199 fold_build2_loc (loc,
13200 BIT_XOR_EXPR, itype,
13201 arg01, arg10),
13202 arg00),
13203 build_zero_cst (itype));
13204
13205 if (operand_equal_p (arg00, arg10, 0))
13206 return fold_build2_loc (loc, code, type,
13207 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13208 fold_build2_loc (loc,
13209 BIT_XOR_EXPR, itype,
13210 arg01, arg11),
13211 arg00),
13212 build_zero_cst (itype));
13213 }
13214
13215 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13216 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13217 {
13218 tree arg00 = TREE_OPERAND (arg0, 0);
13219 tree arg01 = TREE_OPERAND (arg0, 1);
13220 tree arg10 = TREE_OPERAND (arg1, 0);
13221 tree arg11 = TREE_OPERAND (arg1, 1);
13222 tree itype = TREE_TYPE (arg0);
13223
13224 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13225 operand_equal_p guarantees no side-effects so we don't need
13226 to use omit_one_operand on Z. */
13227 if (operand_equal_p (arg01, arg11, 0))
13228 return fold_build2_loc (loc, code, type, arg00,
13229 fold_convert_loc (loc, TREE_TYPE (arg00),
13230 arg10));
13231 if (operand_equal_p (arg01, arg10, 0))
13232 return fold_build2_loc (loc, code, type, arg00,
13233 fold_convert_loc (loc, TREE_TYPE (arg00),
13234 arg11));
13235 if (operand_equal_p (arg00, arg11, 0))
13236 return fold_build2_loc (loc, code, type, arg01,
13237 fold_convert_loc (loc, TREE_TYPE (arg01),
13238 arg10));
13239 if (operand_equal_p (arg00, arg10, 0))
13240 return fold_build2_loc (loc, code, type, arg01,
13241 fold_convert_loc (loc, TREE_TYPE (arg01),
13242 arg11));
13243
13244 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13245 if (TREE_CODE (arg01) == INTEGER_CST
13246 && TREE_CODE (arg11) == INTEGER_CST)
13247 {
13248 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13249 fold_convert_loc (loc, itype, arg11));
13250 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13251 return fold_build2_loc (loc, code, type, tem,
13252 fold_convert_loc (loc, itype, arg10));
13253 }
13254 }
13255
13256 /* Attempt to simplify equality/inequality comparisons of complex
13257 values. Only lower the comparison if the result is known or
13258 can be simplified to a single scalar comparison. */
13259 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13260 || TREE_CODE (arg0) == COMPLEX_CST)
13261 && (TREE_CODE (arg1) == COMPLEX_EXPR
13262 || TREE_CODE (arg1) == COMPLEX_CST))
13263 {
13264 tree real0, imag0, real1, imag1;
13265 tree rcond, icond;
13266
13267 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13268 {
13269 real0 = TREE_OPERAND (arg0, 0);
13270 imag0 = TREE_OPERAND (arg0, 1);
13271 }
13272 else
13273 {
13274 real0 = TREE_REALPART (arg0);
13275 imag0 = TREE_IMAGPART (arg0);
13276 }
13277
13278 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13279 {
13280 real1 = TREE_OPERAND (arg1, 0);
13281 imag1 = TREE_OPERAND (arg1, 1);
13282 }
13283 else
13284 {
13285 real1 = TREE_REALPART (arg1);
13286 imag1 = TREE_IMAGPART (arg1);
13287 }
13288
13289 rcond = fold_binary_loc (loc, code, type, real0, real1);
13290 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13291 {
13292 if (integer_zerop (rcond))
13293 {
13294 if (code == EQ_EXPR)
13295 return omit_two_operands_loc (loc, type, boolean_false_node,
13296 imag0, imag1);
13297 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13298 }
13299 else
13300 {
13301 if (code == NE_EXPR)
13302 return omit_two_operands_loc (loc, type, boolean_true_node,
13303 imag0, imag1);
13304 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13305 }
13306 }
13307
13308 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13309 if (icond && TREE_CODE (icond) == INTEGER_CST)
13310 {
13311 if (integer_zerop (icond))
13312 {
13313 if (code == EQ_EXPR)
13314 return omit_two_operands_loc (loc, type, boolean_false_node,
13315 real0, real1);
13316 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13317 }
13318 else
13319 {
13320 if (code == NE_EXPR)
13321 return omit_two_operands_loc (loc, type, boolean_true_node,
13322 real0, real1);
13323 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13324 }
13325 }
13326 }
13327
13328 return NULL_TREE;
13329
13330 case LT_EXPR:
13331 case GT_EXPR:
13332 case LE_EXPR:
13333 case GE_EXPR:
13334 tem = fold_comparison (loc, code, type, op0, op1);
13335 if (tem != NULL_TREE)
13336 return tem;
13337
13338 /* Transform comparisons of the form X +- C CMP X. */
13339 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13340 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13341 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13342 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13343 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13344 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13345 {
13346 tree arg01 = TREE_OPERAND (arg0, 1);
13347 enum tree_code code0 = TREE_CODE (arg0);
13348 int is_positive;
13349
13350 if (TREE_CODE (arg01) == REAL_CST)
13351 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13352 else
13353 is_positive = tree_int_cst_sgn (arg01);
13354
13355 /* (X - c) > X becomes false. */
13356 if (code == GT_EXPR
13357 && ((code0 == MINUS_EXPR && is_positive >= 0)
13358 || (code0 == PLUS_EXPR && is_positive <= 0)))
13359 {
13360 if (TREE_CODE (arg01) == INTEGER_CST
13361 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13362 fold_overflow_warning (("assuming signed overflow does not "
13363 "occur when assuming that (X - c) > X "
13364 "is always false"),
13365 WARN_STRICT_OVERFLOW_ALL);
13366 return constant_boolean_node (0, type);
13367 }
13368
13369 /* Likewise (X + c) < X becomes false. */
13370 if (code == LT_EXPR
13371 && ((code0 == PLUS_EXPR && is_positive >= 0)
13372 || (code0 == MINUS_EXPR && is_positive <= 0)))
13373 {
13374 if (TREE_CODE (arg01) == INTEGER_CST
13375 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13376 fold_overflow_warning (("assuming signed overflow does not "
13377 "occur when assuming that "
13378 "(X + c) < X is always false"),
13379 WARN_STRICT_OVERFLOW_ALL);
13380 return constant_boolean_node (0, type);
13381 }
13382
13383 /* Convert (X - c) <= X to true. */
13384 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13385 && code == LE_EXPR
13386 && ((code0 == MINUS_EXPR && is_positive >= 0)
13387 || (code0 == PLUS_EXPR && is_positive <= 0)))
13388 {
13389 if (TREE_CODE (arg01) == INTEGER_CST
13390 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13391 fold_overflow_warning (("assuming signed overflow does not "
13392 "occur when assuming that "
13393 "(X - c) <= X is always true"),
13394 WARN_STRICT_OVERFLOW_ALL);
13395 return constant_boolean_node (1, type);
13396 }
13397
13398 /* Convert (X + c) >= X to true. */
13399 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13400 && code == GE_EXPR
13401 && ((code0 == PLUS_EXPR && is_positive >= 0)
13402 || (code0 == MINUS_EXPR && is_positive <= 0)))
13403 {
13404 if (TREE_CODE (arg01) == INTEGER_CST
13405 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13406 fold_overflow_warning (("assuming signed overflow does not "
13407 "occur when assuming that "
13408 "(X + c) >= X is always true"),
13409 WARN_STRICT_OVERFLOW_ALL);
13410 return constant_boolean_node (1, type);
13411 }
13412
13413 if (TREE_CODE (arg01) == INTEGER_CST)
13414 {
13415 /* Convert X + c > X and X - c < X to true for integers. */
13416 if (code == GT_EXPR
13417 && ((code0 == PLUS_EXPR && is_positive > 0)
13418 || (code0 == MINUS_EXPR && is_positive < 0)))
13419 {
13420 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13421 fold_overflow_warning (("assuming signed overflow does "
13422 "not occur when assuming that "
13423 "(X + c) > X is always true"),
13424 WARN_STRICT_OVERFLOW_ALL);
13425 return constant_boolean_node (1, type);
13426 }
13427
13428 if (code == LT_EXPR
13429 && ((code0 == MINUS_EXPR && is_positive > 0)
13430 || (code0 == PLUS_EXPR && is_positive < 0)))
13431 {
13432 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13433 fold_overflow_warning (("assuming signed overflow does "
13434 "not occur when assuming that "
13435 "(X - c) < X is always true"),
13436 WARN_STRICT_OVERFLOW_ALL);
13437 return constant_boolean_node (1, type);
13438 }
13439
13440 /* Convert X + c <= X and X - c >= X to false for integers. */
13441 if (code == LE_EXPR
13442 && ((code0 == PLUS_EXPR && is_positive > 0)
13443 || (code0 == MINUS_EXPR && is_positive < 0)))
13444 {
13445 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13446 fold_overflow_warning (("assuming signed overflow does "
13447 "not occur when assuming that "
13448 "(X + c) <= X is always false"),
13449 WARN_STRICT_OVERFLOW_ALL);
13450 return constant_boolean_node (0, type);
13451 }
13452
13453 if (code == GE_EXPR
13454 && ((code0 == MINUS_EXPR && is_positive > 0)
13455 || (code0 == PLUS_EXPR && is_positive < 0)))
13456 {
13457 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13458 fold_overflow_warning (("assuming signed overflow does "
13459 "not occur when assuming that "
13460 "(X - c) >= X is always false"),
13461 WARN_STRICT_OVERFLOW_ALL);
13462 return constant_boolean_node (0, type);
13463 }
13464 }
13465 }
13466
13467 /* Comparisons with the highest or lowest possible integer of
13468 the specified precision will have known values. */
13469 {
13470 tree arg1_type = TREE_TYPE (arg1);
13471 unsigned int prec = TYPE_PRECISION (arg1_type);
13472
13473 if (TREE_CODE (arg1) == INTEGER_CST
13474 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13475 {
13476 wide_int max = wi::max_value (arg1_type);
13477 wide_int signed_max = wi::max_value (prec, SIGNED);
13478 wide_int min = wi::min_value (arg1_type);
13479 wide_int wi_arg1 = arg1;
13480
13481 if (wi_arg1 == max)
13482 switch (code)
13483 {
13484 case GT_EXPR:
13485 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13486
13487 case GE_EXPR:
13488 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13489
13490 case LE_EXPR:
13491 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13492
13493 case LT_EXPR:
13494 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13495
13496 /* The GE_EXPR and LT_EXPR cases above are not normally
13497 reached because of previous transformations. */
13498
13499 default:
13500 break;
13501 }
13502 else if (wi_arg1 == (max - 1))
13503 switch (code)
13504 {
13505 case GT_EXPR:
13506 arg1 = const_binop (PLUS_EXPR, arg1,
13507 build_int_cst (TREE_TYPE (arg1), 1));
13508 return fold_build2_loc (loc, EQ_EXPR, type,
13509 fold_convert_loc (loc,
13510 TREE_TYPE (arg1), arg0),
13511 arg1);
13512 case LE_EXPR:
13513 arg1 = const_binop (PLUS_EXPR, arg1,
13514 build_int_cst (TREE_TYPE (arg1), 1));
13515 return fold_build2_loc (loc, NE_EXPR, type,
13516 fold_convert_loc (loc, TREE_TYPE (arg1),
13517 arg0),
13518 arg1);
13519 default:
13520 break;
13521 }
13522 else if (wi_arg1 == min)
13523 switch (code)
13524 {
13525 case LT_EXPR:
13526 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13527
13528 case LE_EXPR:
13529 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13530
13531 case GE_EXPR:
13532 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13533
13534 case GT_EXPR:
13535 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13536
13537 default:
13538 break;
13539 }
13540 else if (wi_arg1 == (min + 1))
13541 switch (code)
13542 {
13543 case GE_EXPR:
13544 arg1 = const_binop (MINUS_EXPR, arg1,
13545 build_int_cst (TREE_TYPE (arg1), 1));
13546 return fold_build2_loc (loc, NE_EXPR, type,
13547 fold_convert_loc (loc,
13548 TREE_TYPE (arg1), arg0),
13549 arg1);
13550 case LT_EXPR:
13551 arg1 = const_binop (MINUS_EXPR, arg1,
13552 build_int_cst (TREE_TYPE (arg1), 1));
13553 return fold_build2_loc (loc, EQ_EXPR, type,
13554 fold_convert_loc (loc, TREE_TYPE (arg1),
13555 arg0),
13556 arg1);
13557 default:
13558 break;
13559 }
13560
13561 else if (wi_arg1 == signed_max
13562 && TYPE_UNSIGNED (arg1_type)
13563 /* KENNY QUESTIONS THE CHECKING OF THE BITSIZE
13564 HERE. HE FEELS THAT THE PRECISION SHOULD BE
13565 CHECKED */
13566
13567 /* We will flip the signedness of the comparison operator
13568 associated with the mode of arg1, so the sign bit is
13569 specified by this mode. Check that arg1 is the signed
13570 max associated with this sign bit. */
13571 && prec == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13572 /* signed_type does not work on pointer types. */
13573 && INTEGRAL_TYPE_P (arg1_type))
13574 {
13575 /* The following case also applies to X < signed_max+1
13576 and X >= signed_max+1 because previous transformations. */
13577 if (code == LE_EXPR || code == GT_EXPR)
13578 {
13579 tree st;
13580 st = signed_type_for (TREE_TYPE (arg1));
13581 return fold_build2_loc (loc,
13582 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13583 type, fold_convert_loc (loc, st, arg0),
13584 build_int_cst (st, 0));
13585 }
13586 }
13587 }
13588 }
13589
13590 /* If we are comparing an ABS_EXPR with a constant, we can
13591 convert all the cases into explicit comparisons, but they may
13592 well not be faster than doing the ABS and one comparison.
13593 But ABS (X) <= C is a range comparison, which becomes a subtraction
13594 and a comparison, and is probably faster. */
13595 if (code == LE_EXPR
13596 && TREE_CODE (arg1) == INTEGER_CST
13597 && TREE_CODE (arg0) == ABS_EXPR
13598 && ! TREE_SIDE_EFFECTS (arg0)
13599 && (0 != (tem = negate_expr (arg1)))
13600 && TREE_CODE (tem) == INTEGER_CST
13601 && !TREE_OVERFLOW (tem))
13602 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13603 build2 (GE_EXPR, type,
13604 TREE_OPERAND (arg0, 0), tem),
13605 build2 (LE_EXPR, type,
13606 TREE_OPERAND (arg0, 0), arg1));
13607
13608 /* Convert ABS_EXPR<x> >= 0 to true. */
13609 strict_overflow_p = false;
13610 if (code == GE_EXPR
13611 && (integer_zerop (arg1)
13612 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13613 && real_zerop (arg1)))
13614 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13615 {
13616 if (strict_overflow_p)
13617 fold_overflow_warning (("assuming signed overflow does not occur "
13618 "when simplifying comparison of "
13619 "absolute value and zero"),
13620 WARN_STRICT_OVERFLOW_CONDITIONAL);
13621 return omit_one_operand_loc (loc, type,
13622 constant_boolean_node (true, type),
13623 arg0);
13624 }
13625
13626 /* Convert ABS_EXPR<x> < 0 to false. */
13627 strict_overflow_p = false;
13628 if (code == LT_EXPR
13629 && (integer_zerop (arg1) || real_zerop (arg1))
13630 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13631 {
13632 if (strict_overflow_p)
13633 fold_overflow_warning (("assuming signed overflow does not occur "
13634 "when simplifying comparison of "
13635 "absolute value and zero"),
13636 WARN_STRICT_OVERFLOW_CONDITIONAL);
13637 return omit_one_operand_loc (loc, type,
13638 constant_boolean_node (false, type),
13639 arg0);
13640 }
13641
13642 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13643 and similarly for >= into !=. */
13644 if ((code == LT_EXPR || code == GE_EXPR)
13645 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13646 && TREE_CODE (arg1) == LSHIFT_EXPR
13647 && integer_onep (TREE_OPERAND (arg1, 0)))
13648 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13649 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13650 TREE_OPERAND (arg1, 1)),
13651 build_zero_cst (TREE_TYPE (arg0)));
13652
13653 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13654 otherwise Y might be >= # of bits in X's type and thus e.g.
13655 (unsigned char) (1 << Y) for Y 15 might be 0.
13656 If the cast is widening, then 1 << Y should have unsigned type,
13657 otherwise if Y is number of bits in the signed shift type minus 1,
13658 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13659 31 might be 0xffffffff80000000. */
13660 if ((code == LT_EXPR || code == GE_EXPR)
13661 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13662 && CONVERT_EXPR_P (arg1)
13663 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13664 && (TYPE_PRECISION (TREE_TYPE (arg1))
13665 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13666 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13667 || (TYPE_PRECISION (TREE_TYPE (arg1))
13668 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13669 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13670 {
13671 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13672 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13673 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13674 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13675 build_zero_cst (TREE_TYPE (arg0)));
13676 }
13677
13678 return NULL_TREE;
13679
13680 case UNORDERED_EXPR:
13681 case ORDERED_EXPR:
13682 case UNLT_EXPR:
13683 case UNLE_EXPR:
13684 case UNGT_EXPR:
13685 case UNGE_EXPR:
13686 case UNEQ_EXPR:
13687 case LTGT_EXPR:
13688 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13689 {
13690 t1 = fold_relational_const (code, type, arg0, arg1);
13691 if (t1 != NULL_TREE)
13692 return t1;
13693 }
13694
13695 /* If the first operand is NaN, the result is constant. */
13696 if (TREE_CODE (arg0) == REAL_CST
13697 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13698 && (code != LTGT_EXPR || ! flag_trapping_math))
13699 {
13700 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13701 ? integer_zero_node
13702 : integer_one_node;
13703 return omit_one_operand_loc (loc, type, t1, arg1);
13704 }
13705
13706 /* If the second operand is NaN, the result is constant. */
13707 if (TREE_CODE (arg1) == REAL_CST
13708 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13709 && (code != LTGT_EXPR || ! flag_trapping_math))
13710 {
13711 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13712 ? integer_zero_node
13713 : integer_one_node;
13714 return omit_one_operand_loc (loc, type, t1, arg0);
13715 }
13716
13717 /* Simplify unordered comparison of something with itself. */
13718 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13719 && operand_equal_p (arg0, arg1, 0))
13720 return constant_boolean_node (1, type);
13721
13722 if (code == LTGT_EXPR
13723 && !flag_trapping_math
13724 && operand_equal_p (arg0, arg1, 0))
13725 return constant_boolean_node (0, type);
13726
13727 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13728 {
13729 tree targ0 = strip_float_extensions (arg0);
13730 tree targ1 = strip_float_extensions (arg1);
13731 tree newtype = TREE_TYPE (targ0);
13732
13733 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13734 newtype = TREE_TYPE (targ1);
13735
13736 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13737 return fold_build2_loc (loc, code, type,
13738 fold_convert_loc (loc, newtype, targ0),
13739 fold_convert_loc (loc, newtype, targ1));
13740 }
13741
13742 return NULL_TREE;
13743
13744 case COMPOUND_EXPR:
13745 /* When pedantic, a compound expression can be neither an lvalue
13746 nor an integer constant expression. */
13747 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13748 return NULL_TREE;
13749 /* Don't let (0, 0) be null pointer constant. */
13750 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13751 : fold_convert_loc (loc, type, arg1);
13752 return pedantic_non_lvalue_loc (loc, tem);
13753
13754 case COMPLEX_EXPR:
13755 if ((TREE_CODE (arg0) == REAL_CST
13756 && TREE_CODE (arg1) == REAL_CST)
13757 || (TREE_CODE (arg0) == INTEGER_CST
13758 && TREE_CODE (arg1) == INTEGER_CST))
13759 return build_complex (type, arg0, arg1);
13760 if (TREE_CODE (arg0) == REALPART_EXPR
13761 && TREE_CODE (arg1) == IMAGPART_EXPR
13762 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13763 && operand_equal_p (TREE_OPERAND (arg0, 0),
13764 TREE_OPERAND (arg1, 0), 0))
13765 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13766 TREE_OPERAND (arg1, 0));
13767 return NULL_TREE;
13768
13769 case ASSERT_EXPR:
13770 /* An ASSERT_EXPR should never be passed to fold_binary. */
13771 gcc_unreachable ();
13772
13773 case VEC_PACK_TRUNC_EXPR:
13774 case VEC_PACK_FIX_TRUNC_EXPR:
13775 {
13776 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13777 tree *elts;
13778
13779 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13780 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13781 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13782 return NULL_TREE;
13783
13784 elts = XALLOCAVEC (tree, nelts);
13785 if (!vec_cst_ctor_to_array (arg0, elts)
13786 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13787 return NULL_TREE;
13788
13789 for (i = 0; i < nelts; i++)
13790 {
13791 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13792 ? NOP_EXPR : FIX_TRUNC_EXPR,
13793 TREE_TYPE (type), elts[i]);
13794 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13795 return NULL_TREE;
13796 }
13797
13798 return build_vector (type, elts);
13799 }
13800
13801 case VEC_WIDEN_MULT_LO_EXPR:
13802 case VEC_WIDEN_MULT_HI_EXPR:
13803 case VEC_WIDEN_MULT_EVEN_EXPR:
13804 case VEC_WIDEN_MULT_ODD_EXPR:
13805 {
13806 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13807 unsigned int out, ofs, scale;
13808 tree *elts;
13809
13810 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13811 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13812 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13813 return NULL_TREE;
13814
13815 elts = XALLOCAVEC (tree, nelts * 4);
13816 if (!vec_cst_ctor_to_array (arg0, elts)
13817 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13818 return NULL_TREE;
13819
13820 if (code == VEC_WIDEN_MULT_LO_EXPR)
13821 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13822 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13823 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13824 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13825 scale = 1, ofs = 0;
13826 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13827 scale = 1, ofs = 1;
13828
13829 for (out = 0; out < nelts; out++)
13830 {
13831 unsigned int in1 = (out << scale) + ofs;
13832 unsigned int in2 = in1 + nelts * 2;
13833 tree t1, t2;
13834
13835 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13836 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13837
13838 if (t1 == NULL_TREE || t2 == NULL_TREE)
13839 return NULL_TREE;
13840 elts[out] = const_binop (MULT_EXPR, t1, t2);
13841 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13842 return NULL_TREE;
13843 }
13844
13845 return build_vector (type, elts);
13846 }
13847
13848 default:
13849 return NULL_TREE;
13850 } /* switch (code) */
13851 }
13852
13853 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13854 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13855 of GOTO_EXPR. */
13856
13857 static tree
13858 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13859 {
13860 switch (TREE_CODE (*tp))
13861 {
13862 case LABEL_EXPR:
13863 return *tp;
13864
13865 case GOTO_EXPR:
13866 *walk_subtrees = 0;
13867
13868 /* ... fall through ... */
13869
13870 default:
13871 return NULL_TREE;
13872 }
13873 }
13874
13875 /* Return whether the sub-tree ST contains a label which is accessible from
13876 outside the sub-tree. */
13877
13878 static bool
13879 contains_label_p (tree st)
13880 {
13881 return
13882 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13883 }
13884
13885 /* Fold a ternary expression of code CODE and type TYPE with operands
13886 OP0, OP1, and OP2. Return the folded expression if folding is
13887 successful. Otherwise, return NULL_TREE. */
13888
13889 tree
13890 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13891 tree op0, tree op1, tree op2)
13892 {
13893 tree tem;
13894 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13895 enum tree_code_class kind = TREE_CODE_CLASS (code);
13896
13897 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13898 && TREE_CODE_LENGTH (code) == 3);
13899
13900 /* Strip any conversions that don't change the mode. This is safe
13901 for every expression, except for a comparison expression because
13902 its signedness is derived from its operands. So, in the latter
13903 case, only strip conversions that don't change the signedness.
13904
13905 Note that this is done as an internal manipulation within the
13906 constant folder, in order to find the simplest representation of
13907 the arguments so that their form can be studied. In any cases,
13908 the appropriate type conversions should be put back in the tree
13909 that will get out of the constant folder. */
13910 if (op0)
13911 {
13912 arg0 = op0;
13913 STRIP_NOPS (arg0);
13914 }
13915
13916 if (op1)
13917 {
13918 arg1 = op1;
13919 STRIP_NOPS (arg1);
13920 }
13921
13922 if (op2)
13923 {
13924 arg2 = op2;
13925 STRIP_NOPS (arg2);
13926 }
13927
13928 switch (code)
13929 {
13930 case COMPONENT_REF:
13931 if (TREE_CODE (arg0) == CONSTRUCTOR
13932 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13933 {
13934 unsigned HOST_WIDE_INT idx;
13935 tree field, value;
13936 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13937 if (field == arg1)
13938 return value;
13939 }
13940 return NULL_TREE;
13941
13942 case COND_EXPR:
13943 case VEC_COND_EXPR:
13944 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13945 so all simple results must be passed through pedantic_non_lvalue. */
13946 if (TREE_CODE (arg0) == INTEGER_CST)
13947 {
13948 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13949 tem = integer_zerop (arg0) ? op2 : op1;
13950 /* Only optimize constant conditions when the selected branch
13951 has the same type as the COND_EXPR. This avoids optimizing
13952 away "c ? x : throw", where the throw has a void type.
13953 Avoid throwing away that operand which contains label. */
13954 if ((!TREE_SIDE_EFFECTS (unused_op)
13955 || !contains_label_p (unused_op))
13956 && (! VOID_TYPE_P (TREE_TYPE (tem))
13957 || VOID_TYPE_P (type)))
13958 return pedantic_non_lvalue_loc (loc, tem);
13959 return NULL_TREE;
13960 }
13961 else if (TREE_CODE (arg0) == VECTOR_CST)
13962 {
13963 if (integer_all_onesp (arg0))
13964 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13965 if (integer_zerop (arg0))
13966 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13967
13968 if ((TREE_CODE (arg1) == VECTOR_CST
13969 || TREE_CODE (arg1) == CONSTRUCTOR)
13970 && (TREE_CODE (arg2) == VECTOR_CST
13971 || TREE_CODE (arg2) == CONSTRUCTOR))
13972 {
13973 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13974 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13975 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13976 for (i = 0; i < nelts; i++)
13977 {
13978 tree val = VECTOR_CST_ELT (arg0, i);
13979 if (integer_all_onesp (val))
13980 sel[i] = i;
13981 else if (integer_zerop (val))
13982 sel[i] = nelts + i;
13983 else /* Currently unreachable. */
13984 return NULL_TREE;
13985 }
13986 tree t = fold_vec_perm (type, arg1, arg2, sel);
13987 if (t != NULL_TREE)
13988 return t;
13989 }
13990 }
13991
13992 if (operand_equal_p (arg1, op2, 0))
13993 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13994
13995 /* If we have A op B ? A : C, we may be able to convert this to a
13996 simpler expression, depending on the operation and the values
13997 of B and C. Signed zeros prevent all of these transformations,
13998 for reasons given above each one.
13999
14000 Also try swapping the arguments and inverting the conditional. */
14001 if (COMPARISON_CLASS_P (arg0)
14002 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14003 arg1, TREE_OPERAND (arg0, 1))
14004 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14005 {
14006 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14007 if (tem)
14008 return tem;
14009 }
14010
14011 if (COMPARISON_CLASS_P (arg0)
14012 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14013 op2,
14014 TREE_OPERAND (arg0, 1))
14015 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14016 {
14017 location_t loc0 = expr_location_or (arg0, loc);
14018 tem = fold_invert_truthvalue (loc0, arg0);
14019 if (tem && COMPARISON_CLASS_P (tem))
14020 {
14021 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14022 if (tem)
14023 return tem;
14024 }
14025 }
14026
14027 /* If the second operand is simpler than the third, swap them
14028 since that produces better jump optimization results. */
14029 if (truth_value_p (TREE_CODE (arg0))
14030 && tree_swap_operands_p (op1, op2, false))
14031 {
14032 location_t loc0 = expr_location_or (arg0, loc);
14033 /* See if this can be inverted. If it can't, possibly because
14034 it was a floating-point inequality comparison, don't do
14035 anything. */
14036 tem = fold_invert_truthvalue (loc0, arg0);
14037 if (tem)
14038 return fold_build3_loc (loc, code, type, tem, op2, op1);
14039 }
14040
14041 /* Convert A ? 1 : 0 to simply A. */
14042 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14043 : (integer_onep (op1)
14044 && !VECTOR_TYPE_P (type)))
14045 && integer_zerop (op2)
14046 /* If we try to convert OP0 to our type, the
14047 call to fold will try to move the conversion inside
14048 a COND, which will recurse. In that case, the COND_EXPR
14049 is probably the best choice, so leave it alone. */
14050 && type == TREE_TYPE (arg0))
14051 return pedantic_non_lvalue_loc (loc, arg0);
14052
14053 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14054 over COND_EXPR in cases such as floating point comparisons. */
14055 if (integer_zerop (op1)
14056 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14057 : (integer_onep (op2)
14058 && !VECTOR_TYPE_P (type)))
14059 && truth_value_p (TREE_CODE (arg0)))
14060 return pedantic_non_lvalue_loc (loc,
14061 fold_convert_loc (loc, type,
14062 invert_truthvalue_loc (loc,
14063 arg0)));
14064
14065 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14066 if (TREE_CODE (arg0) == LT_EXPR
14067 && integer_zerop (TREE_OPERAND (arg0, 1))
14068 && integer_zerop (op2)
14069 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14070 {
14071 /* sign_bit_p only checks ARG1 bits within A's precision.
14072 If <sign bit of A> has wider type than A, bits outside
14073 of A's precision in <sign bit of A> need to be checked.
14074 If they are all 0, this optimization needs to be done
14075 in unsigned A's type, if they are all 1 in signed A's type,
14076 otherwise this can't be done. */
14077 if (TYPE_PRECISION (TREE_TYPE (tem))
14078 < TYPE_PRECISION (TREE_TYPE (arg1))
14079 && TYPE_PRECISION (TREE_TYPE (tem))
14080 < TYPE_PRECISION (type))
14081 {
14082 wide_int mask;
14083 wide_int wi_arg1 = arg1;
14084 int inner_width, outer_width;
14085 tree tem_type;
14086
14087 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14088 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14089 if (outer_width > TYPE_PRECISION (type))
14090 outer_width = TYPE_PRECISION (type);
14091
14092 mask = wi::shifted_mask
14093 (inner_width, outer_width - inner_width, false,
14094 TYPE_PRECISION (TREE_TYPE (arg1)));
14095
14096 if (wi_arg1 == mask)
14097 {
14098 tem_type = signed_type_for (TREE_TYPE (tem));
14099 tem = fold_convert_loc (loc, tem_type, tem);
14100 }
14101 else if ((wi_arg1 & mask) == 0)
14102 {
14103 tem_type = unsigned_type_for (TREE_TYPE (tem));
14104 tem = fold_convert_loc (loc, tem_type, tem);
14105 }
14106 else
14107 tem = NULL;
14108 }
14109
14110 if (tem)
14111 return
14112 fold_convert_loc (loc, type,
14113 fold_build2_loc (loc, BIT_AND_EXPR,
14114 TREE_TYPE (tem), tem,
14115 fold_convert_loc (loc,
14116 TREE_TYPE (tem),
14117 arg1)));
14118 }
14119
14120 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14121 already handled above. */
14122 if (TREE_CODE (arg0) == BIT_AND_EXPR
14123 && integer_onep (TREE_OPERAND (arg0, 1))
14124 && integer_zerop (op2)
14125 && integer_pow2p (arg1))
14126 {
14127 tree tem = TREE_OPERAND (arg0, 0);
14128 STRIP_NOPS (tem);
14129 if (TREE_CODE (tem) == RSHIFT_EXPR
14130 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14131 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14132 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14133 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14134 TREE_OPERAND (tem, 0), arg1);
14135 }
14136
14137 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14138 is probably obsolete because the first operand should be a
14139 truth value (that's why we have the two cases above), but let's
14140 leave it in until we can confirm this for all front-ends. */
14141 if (integer_zerop (op2)
14142 && TREE_CODE (arg0) == NE_EXPR
14143 && integer_zerop (TREE_OPERAND (arg0, 1))
14144 && integer_pow2p (arg1)
14145 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14146 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14147 arg1, OEP_ONLY_CONST))
14148 return pedantic_non_lvalue_loc (loc,
14149 fold_convert_loc (loc, type,
14150 TREE_OPERAND (arg0, 0)));
14151
14152 /* Disable the transformations below for vectors, since
14153 fold_binary_op_with_conditional_arg may undo them immediately,
14154 yielding an infinite loop. */
14155 if (code == VEC_COND_EXPR)
14156 return NULL_TREE;
14157
14158 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14159 if (integer_zerop (op2)
14160 && truth_value_p (TREE_CODE (arg0))
14161 && truth_value_p (TREE_CODE (arg1))
14162 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14163 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14164 : TRUTH_ANDIF_EXPR,
14165 type, fold_convert_loc (loc, type, arg0), arg1);
14166
14167 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14168 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14169 && truth_value_p (TREE_CODE (arg0))
14170 && truth_value_p (TREE_CODE (arg1))
14171 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14172 {
14173 location_t loc0 = expr_location_or (arg0, loc);
14174 /* Only perform transformation if ARG0 is easily inverted. */
14175 tem = fold_invert_truthvalue (loc0, arg0);
14176 if (tem)
14177 return fold_build2_loc (loc, code == VEC_COND_EXPR
14178 ? BIT_IOR_EXPR
14179 : TRUTH_ORIF_EXPR,
14180 type, fold_convert_loc (loc, type, tem),
14181 arg1);
14182 }
14183
14184 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14185 if (integer_zerop (arg1)
14186 && truth_value_p (TREE_CODE (arg0))
14187 && truth_value_p (TREE_CODE (op2))
14188 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14189 {
14190 location_t loc0 = expr_location_or (arg0, loc);
14191 /* Only perform transformation if ARG0 is easily inverted. */
14192 tem = fold_invert_truthvalue (loc0, arg0);
14193 if (tem)
14194 return fold_build2_loc (loc, code == VEC_COND_EXPR
14195 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14196 type, fold_convert_loc (loc, type, tem),
14197 op2);
14198 }
14199
14200 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14201 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14202 && truth_value_p (TREE_CODE (arg0))
14203 && truth_value_p (TREE_CODE (op2))
14204 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14205 return fold_build2_loc (loc, code == VEC_COND_EXPR
14206 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14207 type, fold_convert_loc (loc, type, arg0), op2);
14208
14209 return NULL_TREE;
14210
14211 case CALL_EXPR:
14212 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14213 of fold_ternary on them. */
14214 gcc_unreachable ();
14215
14216 case BIT_FIELD_REF:
14217 if ((TREE_CODE (arg0) == VECTOR_CST
14218 || (TREE_CODE (arg0) == CONSTRUCTOR
14219 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14220 && (type == TREE_TYPE (TREE_TYPE (arg0))
14221 || (TREE_CODE (type) == VECTOR_TYPE
14222 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14223 {
14224 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14225 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14226 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14227 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14228
14229 if (n != 0
14230 && (idx % width) == 0
14231 && (n % width) == 0
14232 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14233 {
14234 idx = idx / width;
14235 n = n / width;
14236
14237 if (TREE_CODE (arg0) == VECTOR_CST)
14238 {
14239 if (n == 1)
14240 return VECTOR_CST_ELT (arg0, idx);
14241
14242 tree *vals = XALLOCAVEC (tree, n);
14243 for (unsigned i = 0; i < n; ++i)
14244 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14245 return build_vector (type, vals);
14246 }
14247
14248 /* Constructor elements can be subvectors. */
14249 unsigned HOST_WIDE_INT k = 1;
14250 if (CONSTRUCTOR_NELTS (arg0) != 0)
14251 {
14252 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14253 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14254 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14255 }
14256
14257 /* We keep an exact subset of the constructor elements. */
14258 if ((idx % k) == 0 && (n % k) == 0)
14259 {
14260 if (CONSTRUCTOR_NELTS (arg0) == 0)
14261 return build_constructor (type, NULL);
14262 idx /= k;
14263 n /= k;
14264 if (n == 1)
14265 {
14266 if (idx < CONSTRUCTOR_NELTS (arg0))
14267 return CONSTRUCTOR_ELT (arg0, idx)->value;
14268 return build_zero_cst (type);
14269 }
14270
14271 vec<constructor_elt, va_gc> *vals;
14272 vec_alloc (vals, n);
14273 for (unsigned i = 0;
14274 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14275 ++i)
14276 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14277 CONSTRUCTOR_ELT
14278 (arg0, idx + i)->value);
14279 return build_constructor (type, vals);
14280 }
14281 /* The bitfield references a single constructor element. */
14282 else if (idx + n <= (idx / k + 1) * k)
14283 {
14284 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14285 return build_zero_cst (type);
14286 else if (n == k)
14287 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14288 else
14289 return fold_build3_loc (loc, code, type,
14290 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14291 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14292 }
14293 }
14294 }
14295
14296 /* A bit-field-ref that referenced the full argument can be stripped. */
14297 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14298 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14299 && integer_zerop (op2))
14300 return fold_convert_loc (loc, type, arg0);
14301
14302 /* On constants we can use native encode/interpret to constant
14303 fold (nearly) all BIT_FIELD_REFs. */
14304 if (CONSTANT_CLASS_P (arg0)
14305 && can_native_interpret_type_p (type)
14306 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14307 /* This limitation should not be necessary, we just need to
14308 round this up to mode size. */
14309 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14310 /* Need bit-shifting of the buffer to relax the following. */
14311 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14312 {
14313 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14314 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14315 unsigned HOST_WIDE_INT clen;
14316 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14317 /* ??? We cannot tell native_encode_expr to start at
14318 some random byte only. So limit us to a reasonable amount
14319 of work. */
14320 if (clen <= 4096)
14321 {
14322 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14323 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14324 if (len > 0
14325 && len * BITS_PER_UNIT >= bitpos + bitsize)
14326 {
14327 tree v = native_interpret_expr (type,
14328 b + bitpos / BITS_PER_UNIT,
14329 bitsize / BITS_PER_UNIT);
14330 if (v)
14331 return v;
14332 }
14333 }
14334 }
14335
14336 return NULL_TREE;
14337
14338 case FMA_EXPR:
14339 /* For integers we can decompose the FMA if possible. */
14340 if (TREE_CODE (arg0) == INTEGER_CST
14341 && TREE_CODE (arg1) == INTEGER_CST)
14342 return fold_build2_loc (loc, PLUS_EXPR, type,
14343 const_binop (MULT_EXPR, arg0, arg1), arg2);
14344 if (integer_zerop (arg2))
14345 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14346
14347 return fold_fma (loc, type, arg0, arg1, arg2);
14348
14349 case VEC_PERM_EXPR:
14350 if (TREE_CODE (arg2) == VECTOR_CST)
14351 {
14352 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14353 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14354 bool need_mask_canon = false;
14355 bool all_in_vec0 = true;
14356 bool all_in_vec1 = true;
14357 bool maybe_identity = true;
14358 bool single_arg = (op0 == op1);
14359 bool changed = false;
14360 int nelts_cnt = single_arg ? nelts : nelts * 2;
14361
14362 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14363 for (i = 0; i < nelts; i++)
14364 {
14365 tree val = VECTOR_CST_ELT (arg2, i);
14366 wide_int t;
14367
14368 if (TREE_CODE (val) != INTEGER_CST)
14369 return NULL_TREE;
14370
14371 /* Make sure that the perm value is in an acceptable
14372 range. */
14373 t = val;
14374 if (wi::gtu_p (t, nelts_cnt))
14375 {
14376 need_mask_canon = true;
14377 sel[i] = t.to_uhwi () & (nelts_cnt - 1);
14378 }
14379 else
14380 sel[i] = t.to_uhwi ();
14381
14382 if (sel[i] < nelts)
14383 all_in_vec1 = false;
14384 else
14385 all_in_vec0 = false;
14386
14387 if ((sel[i] & (nelts-1)) != i)
14388 maybe_identity = false;
14389 }
14390
14391 if (maybe_identity)
14392 {
14393 if (all_in_vec0)
14394 return op0;
14395 if (all_in_vec1)
14396 return op1;
14397 }
14398
14399 if (all_in_vec0)
14400 op1 = op0;
14401 else if (all_in_vec1)
14402 {
14403 op0 = op1;
14404 for (i = 0; i < nelts; i++)
14405 sel[i] -= nelts;
14406 need_mask_canon = true;
14407 }
14408
14409 if ((TREE_CODE (op0) == VECTOR_CST
14410 || TREE_CODE (op0) == CONSTRUCTOR)
14411 && (TREE_CODE (op1) == VECTOR_CST
14412 || TREE_CODE (op1) == CONSTRUCTOR))
14413 {
14414 tree t = fold_vec_perm (type, op0, op1, sel);
14415 if (t != NULL_TREE)
14416 return t;
14417 }
14418
14419 if (op0 == op1 && !single_arg)
14420 changed = true;
14421
14422 if (need_mask_canon && arg2 == op2)
14423 {
14424 tree *tsel = XALLOCAVEC (tree, nelts);
14425 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14426 for (i = 0; i < nelts; i++)
14427 tsel[i] = build_int_cst (eltype, sel[i]);
14428 op2 = build_vector (TREE_TYPE (arg2), tsel);
14429 changed = true;
14430 }
14431
14432 if (changed)
14433 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14434 }
14435 return NULL_TREE;
14436
14437 default:
14438 return NULL_TREE;
14439 } /* switch (code) */
14440 }
14441
14442 /* Perform constant folding and related simplification of EXPR.
14443 The related simplifications include x*1 => x, x*0 => 0, etc.,
14444 and application of the associative law.
14445 NOP_EXPR conversions may be removed freely (as long as we
14446 are careful not to change the type of the overall expression).
14447 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14448 but we can constant-fold them if they have constant operands. */
14449
14450 #ifdef ENABLE_FOLD_CHECKING
14451 # define fold(x) fold_1 (x)
14452 static tree fold_1 (tree);
14453 static
14454 #endif
14455 tree
14456 fold (tree expr)
14457 {
14458 const tree t = expr;
14459 enum tree_code code = TREE_CODE (t);
14460 enum tree_code_class kind = TREE_CODE_CLASS (code);
14461 tree tem;
14462 location_t loc = EXPR_LOCATION (expr);
14463
14464 /* Return right away if a constant. */
14465 if (kind == tcc_constant)
14466 return t;
14467
14468 /* CALL_EXPR-like objects with variable numbers of operands are
14469 treated specially. */
14470 if (kind == tcc_vl_exp)
14471 {
14472 if (code == CALL_EXPR)
14473 {
14474 tem = fold_call_expr (loc, expr, false);
14475 return tem ? tem : expr;
14476 }
14477 return expr;
14478 }
14479
14480 if (IS_EXPR_CODE_CLASS (kind))
14481 {
14482 tree type = TREE_TYPE (t);
14483 tree op0, op1, op2;
14484
14485 switch (TREE_CODE_LENGTH (code))
14486 {
14487 case 1:
14488 op0 = TREE_OPERAND (t, 0);
14489 tem = fold_unary_loc (loc, code, type, op0);
14490 return tem ? tem : expr;
14491 case 2:
14492 op0 = TREE_OPERAND (t, 0);
14493 op1 = TREE_OPERAND (t, 1);
14494 tem = fold_binary_loc (loc, code, type, op0, op1);
14495 return tem ? tem : expr;
14496 case 3:
14497 op0 = TREE_OPERAND (t, 0);
14498 op1 = TREE_OPERAND (t, 1);
14499 op2 = TREE_OPERAND (t, 2);
14500 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14501 return tem ? tem : expr;
14502 default:
14503 break;
14504 }
14505 }
14506
14507 switch (code)
14508 {
14509 case ARRAY_REF:
14510 {
14511 tree op0 = TREE_OPERAND (t, 0);
14512 tree op1 = TREE_OPERAND (t, 1);
14513
14514 if (TREE_CODE (op1) == INTEGER_CST
14515 && TREE_CODE (op0) == CONSTRUCTOR
14516 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14517 {
14518 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14519 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14520 unsigned HOST_WIDE_INT begin = 0;
14521
14522 /* Find a matching index by means of a binary search. */
14523 while (begin != end)
14524 {
14525 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14526 tree index = (*elts)[middle].index;
14527
14528 if (TREE_CODE (index) == INTEGER_CST
14529 && tree_int_cst_lt (index, op1))
14530 begin = middle + 1;
14531 else if (TREE_CODE (index) == INTEGER_CST
14532 && tree_int_cst_lt (op1, index))
14533 end = middle;
14534 else if (TREE_CODE (index) == RANGE_EXPR
14535 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14536 begin = middle + 1;
14537 else if (TREE_CODE (index) == RANGE_EXPR
14538 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14539 end = middle;
14540 else
14541 return (*elts)[middle].value;
14542 }
14543 }
14544
14545 return t;
14546 }
14547
14548 /* Return a VECTOR_CST if possible. */
14549 case CONSTRUCTOR:
14550 {
14551 tree type = TREE_TYPE (t);
14552 if (TREE_CODE (type) != VECTOR_TYPE)
14553 return t;
14554
14555 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14556 unsigned HOST_WIDE_INT idx, pos = 0;
14557 tree value;
14558
14559 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14560 {
14561 if (!CONSTANT_CLASS_P (value))
14562 return t;
14563 if (TREE_CODE (value) == VECTOR_CST)
14564 {
14565 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14566 vec[pos++] = VECTOR_CST_ELT (value, i);
14567 }
14568 else
14569 vec[pos++] = value;
14570 }
14571 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14572 vec[pos] = build_zero_cst (TREE_TYPE (type));
14573
14574 return build_vector (type, vec);
14575 }
14576
14577 case CONST_DECL:
14578 return fold (DECL_INITIAL (t));
14579
14580 default:
14581 return t;
14582 } /* switch (code) */
14583 }
14584
14585 #ifdef ENABLE_FOLD_CHECKING
14586 #undef fold
14587
14588 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14589 hash_table <pointer_hash <tree_node> >);
14590 static void fold_check_failed (const_tree, const_tree);
14591 void print_fold_checksum (const_tree);
14592
14593 /* When --enable-checking=fold, compute a digest of expr before
14594 and after actual fold call to see if fold did not accidentally
14595 change original expr. */
14596
14597 tree
14598 fold (tree expr)
14599 {
14600 tree ret;
14601 struct md5_ctx ctx;
14602 unsigned char checksum_before[16], checksum_after[16];
14603 hash_table <pointer_hash <tree_node> > ht;
14604
14605 ht.create (32);
14606 md5_init_ctx (&ctx);
14607 fold_checksum_tree (expr, &ctx, ht);
14608 md5_finish_ctx (&ctx, checksum_before);
14609 ht.empty ();
14610
14611 ret = fold_1 (expr);
14612
14613 md5_init_ctx (&ctx);
14614 fold_checksum_tree (expr, &ctx, ht);
14615 md5_finish_ctx (&ctx, checksum_after);
14616 ht.dispose ();
14617
14618 if (memcmp (checksum_before, checksum_after, 16))
14619 fold_check_failed (expr, ret);
14620
14621 return ret;
14622 }
14623
14624 void
14625 print_fold_checksum (const_tree expr)
14626 {
14627 struct md5_ctx ctx;
14628 unsigned char checksum[16], cnt;
14629 hash_table <pointer_hash <tree_node> > ht;
14630
14631 ht.create (32);
14632 md5_init_ctx (&ctx);
14633 fold_checksum_tree (expr, &ctx, ht);
14634 md5_finish_ctx (&ctx, checksum);
14635 ht.dispose ();
14636 for (cnt = 0; cnt < 16; ++cnt)
14637 fprintf (stderr, "%02x", checksum[cnt]);
14638 putc ('\n', stderr);
14639 }
14640
14641 static void
14642 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14643 {
14644 internal_error ("fold check: original tree changed by fold");
14645 }
14646
14647 static void
14648 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14649 hash_table <pointer_hash <tree_node> > ht)
14650 {
14651 tree_node **slot;
14652 enum tree_code code;
14653 union tree_node buf;
14654 int i, len;
14655
14656 recursive_label:
14657 if (expr == NULL)
14658 return;
14659 slot = ht.find_slot (expr, INSERT);
14660 if (*slot != NULL)
14661 return;
14662 *slot = CONST_CAST_TREE (expr);
14663 code = TREE_CODE (expr);
14664 if (TREE_CODE_CLASS (code) == tcc_declaration
14665 && DECL_ASSEMBLER_NAME_SET_P (expr))
14666 {
14667 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14668 memcpy ((char *) &buf, expr, tree_size (expr));
14669 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14670 expr = (tree) &buf;
14671 }
14672 else if (TREE_CODE_CLASS (code) == tcc_type
14673 && (TYPE_POINTER_TO (expr)
14674 || TYPE_REFERENCE_TO (expr)
14675 || TYPE_CACHED_VALUES_P (expr)
14676 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14677 || TYPE_NEXT_VARIANT (expr)))
14678 {
14679 /* Allow these fields to be modified. */
14680 tree tmp;
14681 memcpy ((char *) &buf, expr, tree_size (expr));
14682 expr = tmp = (tree) &buf;
14683 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14684 TYPE_POINTER_TO (tmp) = NULL;
14685 TYPE_REFERENCE_TO (tmp) = NULL;
14686 TYPE_NEXT_VARIANT (tmp) = NULL;
14687 if (TYPE_CACHED_VALUES_P (tmp))
14688 {
14689 TYPE_CACHED_VALUES_P (tmp) = 0;
14690 TYPE_CACHED_VALUES (tmp) = NULL;
14691 }
14692 }
14693 md5_process_bytes (expr, tree_size (expr), ctx);
14694 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14695 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14696 if (TREE_CODE_CLASS (code) != tcc_type
14697 && TREE_CODE_CLASS (code) != tcc_declaration
14698 && code != TREE_LIST
14699 && code != SSA_NAME
14700 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14701 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14702 switch (TREE_CODE_CLASS (code))
14703 {
14704 case tcc_constant:
14705 switch (code)
14706 {
14707 case STRING_CST:
14708 md5_process_bytes (TREE_STRING_POINTER (expr),
14709 TREE_STRING_LENGTH (expr), ctx);
14710 break;
14711 case COMPLEX_CST:
14712 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14713 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14714 break;
14715 case VECTOR_CST:
14716 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14717 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14718 break;
14719 default:
14720 break;
14721 }
14722 break;
14723 case tcc_exceptional:
14724 switch (code)
14725 {
14726 case TREE_LIST:
14727 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14728 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14729 expr = TREE_CHAIN (expr);
14730 goto recursive_label;
14731 break;
14732 case TREE_VEC:
14733 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14734 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14735 break;
14736 default:
14737 break;
14738 }
14739 break;
14740 case tcc_expression:
14741 case tcc_reference:
14742 case tcc_comparison:
14743 case tcc_unary:
14744 case tcc_binary:
14745 case tcc_statement:
14746 case tcc_vl_exp:
14747 len = TREE_OPERAND_LENGTH (expr);
14748 for (i = 0; i < len; ++i)
14749 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14750 break;
14751 case tcc_declaration:
14752 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14753 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14754 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14755 {
14756 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14757 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14758 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14759 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14760 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14761 }
14762 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14763 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14764
14765 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14766 {
14767 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14768 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14769 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14770 }
14771 break;
14772 case tcc_type:
14773 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14774 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14775 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14776 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14777 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14778 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14779 if (INTEGRAL_TYPE_P (expr)
14780 || SCALAR_FLOAT_TYPE_P (expr))
14781 {
14782 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14783 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14784 }
14785 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14786 if (TREE_CODE (expr) == RECORD_TYPE
14787 || TREE_CODE (expr) == UNION_TYPE
14788 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14789 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14790 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14791 break;
14792 default:
14793 break;
14794 }
14795 }
14796
14797 /* Helper function for outputting the checksum of a tree T. When
14798 debugging with gdb, you can "define mynext" to be "next" followed
14799 by "call debug_fold_checksum (op0)", then just trace down till the
14800 outputs differ. */
14801
14802 DEBUG_FUNCTION void
14803 debug_fold_checksum (const_tree t)
14804 {
14805 int i;
14806 unsigned char checksum[16];
14807 struct md5_ctx ctx;
14808 hash_table <pointer_hash <tree_node> > ht;
14809 ht.create (32);
14810
14811 md5_init_ctx (&ctx);
14812 fold_checksum_tree (t, &ctx, ht);
14813 md5_finish_ctx (&ctx, checksum);
14814 ht.empty ();
14815
14816 for (i = 0; i < 16; i++)
14817 fprintf (stderr, "%d ", checksum[i]);
14818
14819 fprintf (stderr, "\n");
14820 }
14821
14822 #endif
14823
14824 /* Fold a unary tree expression with code CODE of type TYPE with an
14825 operand OP0. LOC is the location of the resulting expression.
14826 Return a folded expression if successful. Otherwise, return a tree
14827 expression with code CODE of type TYPE with an operand OP0. */
14828
14829 tree
14830 fold_build1_stat_loc (location_t loc,
14831 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14832 {
14833 tree tem;
14834 #ifdef ENABLE_FOLD_CHECKING
14835 unsigned char checksum_before[16], checksum_after[16];
14836 struct md5_ctx ctx;
14837 hash_table <pointer_hash <tree_node> > ht;
14838
14839 ht.create (32);
14840 md5_init_ctx (&ctx);
14841 fold_checksum_tree (op0, &ctx, ht);
14842 md5_finish_ctx (&ctx, checksum_before);
14843 ht.empty ();
14844 #endif
14845
14846 tem = fold_unary_loc (loc, code, type, op0);
14847 if (!tem)
14848 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14849
14850 #ifdef ENABLE_FOLD_CHECKING
14851 md5_init_ctx (&ctx);
14852 fold_checksum_tree (op0, &ctx, ht);
14853 md5_finish_ctx (&ctx, checksum_after);
14854 ht.dispose ();
14855
14856 if (memcmp (checksum_before, checksum_after, 16))
14857 fold_check_failed (op0, tem);
14858 #endif
14859 return tem;
14860 }
14861
14862 /* Fold a binary tree expression with code CODE of type TYPE with
14863 operands OP0 and OP1. LOC is the location of the resulting
14864 expression. Return a folded expression if successful. Otherwise,
14865 return a tree expression with code CODE of type TYPE with operands
14866 OP0 and OP1. */
14867
14868 tree
14869 fold_build2_stat_loc (location_t loc,
14870 enum tree_code code, tree type, tree op0, tree op1
14871 MEM_STAT_DECL)
14872 {
14873 tree tem;
14874 #ifdef ENABLE_FOLD_CHECKING
14875 unsigned char checksum_before_op0[16],
14876 checksum_before_op1[16],
14877 checksum_after_op0[16],
14878 checksum_after_op1[16];
14879 struct md5_ctx ctx;
14880 hash_table <pointer_hash <tree_node> > ht;
14881
14882 ht.create (32);
14883 md5_init_ctx (&ctx);
14884 fold_checksum_tree (op0, &ctx, ht);
14885 md5_finish_ctx (&ctx, checksum_before_op0);
14886 ht.empty ();
14887
14888 md5_init_ctx (&ctx);
14889 fold_checksum_tree (op1, &ctx, ht);
14890 md5_finish_ctx (&ctx, checksum_before_op1);
14891 ht.empty ();
14892 #endif
14893
14894 tem = fold_binary_loc (loc, code, type, op0, op1);
14895 if (!tem)
14896 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14897
14898 #ifdef ENABLE_FOLD_CHECKING
14899 md5_init_ctx (&ctx);
14900 fold_checksum_tree (op0, &ctx, ht);
14901 md5_finish_ctx (&ctx, checksum_after_op0);
14902 ht.empty ();
14903
14904 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14905 fold_check_failed (op0, tem);
14906
14907 md5_init_ctx (&ctx);
14908 fold_checksum_tree (op1, &ctx, ht);
14909 md5_finish_ctx (&ctx, checksum_after_op1);
14910 ht.dispose ();
14911
14912 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14913 fold_check_failed (op1, tem);
14914 #endif
14915 return tem;
14916 }
14917
14918 /* Fold a ternary tree expression with code CODE of type TYPE with
14919 operands OP0, OP1, and OP2. Return a folded expression if
14920 successful. Otherwise, return a tree expression with code CODE of
14921 type TYPE with operands OP0, OP1, and OP2. */
14922
14923 tree
14924 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14925 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14926 {
14927 tree tem;
14928 #ifdef ENABLE_FOLD_CHECKING
14929 unsigned char checksum_before_op0[16],
14930 checksum_before_op1[16],
14931 checksum_before_op2[16],
14932 checksum_after_op0[16],
14933 checksum_after_op1[16],
14934 checksum_after_op2[16];
14935 struct md5_ctx ctx;
14936 hash_table <pointer_hash <tree_node> > ht;
14937
14938 ht.create (32);
14939 md5_init_ctx (&ctx);
14940 fold_checksum_tree (op0, &ctx, ht);
14941 md5_finish_ctx (&ctx, checksum_before_op0);
14942 ht.empty ();
14943
14944 md5_init_ctx (&ctx);
14945 fold_checksum_tree (op1, &ctx, ht);
14946 md5_finish_ctx (&ctx, checksum_before_op1);
14947 ht.empty ();
14948
14949 md5_init_ctx (&ctx);
14950 fold_checksum_tree (op2, &ctx, ht);
14951 md5_finish_ctx (&ctx, checksum_before_op2);
14952 ht.empty ();
14953 #endif
14954
14955 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14956 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14957 if (!tem)
14958 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14959
14960 #ifdef ENABLE_FOLD_CHECKING
14961 md5_init_ctx (&ctx);
14962 fold_checksum_tree (op0, &ctx, ht);
14963 md5_finish_ctx (&ctx, checksum_after_op0);
14964 ht.empty ();
14965
14966 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14967 fold_check_failed (op0, tem);
14968
14969 md5_init_ctx (&ctx);
14970 fold_checksum_tree (op1, &ctx, ht);
14971 md5_finish_ctx (&ctx, checksum_after_op1);
14972 ht.empty ();
14973
14974 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14975 fold_check_failed (op1, tem);
14976
14977 md5_init_ctx (&ctx);
14978 fold_checksum_tree (op2, &ctx, ht);
14979 md5_finish_ctx (&ctx, checksum_after_op2);
14980 ht.dispose ();
14981
14982 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14983 fold_check_failed (op2, tem);
14984 #endif
14985 return tem;
14986 }
14987
14988 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14989 arguments in ARGARRAY, and a null static chain.
14990 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14991 of type TYPE from the given operands as constructed by build_call_array. */
14992
14993 tree
14994 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14995 int nargs, tree *argarray)
14996 {
14997 tree tem;
14998 #ifdef ENABLE_FOLD_CHECKING
14999 unsigned char checksum_before_fn[16],
15000 checksum_before_arglist[16],
15001 checksum_after_fn[16],
15002 checksum_after_arglist[16];
15003 struct md5_ctx ctx;
15004 hash_table <pointer_hash <tree_node> > ht;
15005 int i;
15006
15007 ht.create (32);
15008 md5_init_ctx (&ctx);
15009 fold_checksum_tree (fn, &ctx, ht);
15010 md5_finish_ctx (&ctx, checksum_before_fn);
15011 ht.empty ();
15012
15013 md5_init_ctx (&ctx);
15014 for (i = 0; i < nargs; i++)
15015 fold_checksum_tree (argarray[i], &ctx, ht);
15016 md5_finish_ctx (&ctx, checksum_before_arglist);
15017 ht.empty ();
15018 #endif
15019
15020 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15021
15022 #ifdef ENABLE_FOLD_CHECKING
15023 md5_init_ctx (&ctx);
15024 fold_checksum_tree (fn, &ctx, ht);
15025 md5_finish_ctx (&ctx, checksum_after_fn);
15026 ht.empty ();
15027
15028 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15029 fold_check_failed (fn, tem);
15030
15031 md5_init_ctx (&ctx);
15032 for (i = 0; i < nargs; i++)
15033 fold_checksum_tree (argarray[i], &ctx, ht);
15034 md5_finish_ctx (&ctx, checksum_after_arglist);
15035 ht.dispose ();
15036
15037 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15038 fold_check_failed (NULL_TREE, tem);
15039 #endif
15040 return tem;
15041 }
15042
15043 /* Perform constant folding and related simplification of initializer
15044 expression EXPR. These behave identically to "fold_buildN" but ignore
15045 potential run-time traps and exceptions that fold must preserve. */
15046
15047 #define START_FOLD_INIT \
15048 int saved_signaling_nans = flag_signaling_nans;\
15049 int saved_trapping_math = flag_trapping_math;\
15050 int saved_rounding_math = flag_rounding_math;\
15051 int saved_trapv = flag_trapv;\
15052 int saved_folding_initializer = folding_initializer;\
15053 flag_signaling_nans = 0;\
15054 flag_trapping_math = 0;\
15055 flag_rounding_math = 0;\
15056 flag_trapv = 0;\
15057 folding_initializer = 1;
15058
15059 #define END_FOLD_INIT \
15060 flag_signaling_nans = saved_signaling_nans;\
15061 flag_trapping_math = saved_trapping_math;\
15062 flag_rounding_math = saved_rounding_math;\
15063 flag_trapv = saved_trapv;\
15064 folding_initializer = saved_folding_initializer;
15065
15066 tree
15067 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15068 tree type, tree op)
15069 {
15070 tree result;
15071 START_FOLD_INIT;
15072
15073 result = fold_build1_loc (loc, code, type, op);
15074
15075 END_FOLD_INIT;
15076 return result;
15077 }
15078
15079 tree
15080 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15081 tree type, tree op0, tree op1)
15082 {
15083 tree result;
15084 START_FOLD_INIT;
15085
15086 result = fold_build2_loc (loc, code, type, op0, op1);
15087
15088 END_FOLD_INIT;
15089 return result;
15090 }
15091
15092 tree
15093 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15094 tree type, tree op0, tree op1, tree op2)
15095 {
15096 tree result;
15097 START_FOLD_INIT;
15098
15099 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15100
15101 END_FOLD_INIT;
15102 return result;
15103 }
15104
15105 tree
15106 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15107 int nargs, tree *argarray)
15108 {
15109 tree result;
15110 START_FOLD_INIT;
15111
15112 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15113
15114 END_FOLD_INIT;
15115 return result;
15116 }
15117
15118 #undef START_FOLD_INIT
15119 #undef END_FOLD_INIT
15120
15121 /* Determine if first argument is a multiple of second argument. Return 0 if
15122 it is not, or we cannot easily determined it to be.
15123
15124 An example of the sort of thing we care about (at this point; this routine
15125 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15126 fold cases do now) is discovering that
15127
15128 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15129
15130 is a multiple of
15131
15132 SAVE_EXPR (J * 8)
15133
15134 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15135
15136 This code also handles discovering that
15137
15138 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15139
15140 is a multiple of 8 so we don't have to worry about dealing with a
15141 possible remainder.
15142
15143 Note that we *look* inside a SAVE_EXPR only to determine how it was
15144 calculated; it is not safe for fold to do much of anything else with the
15145 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15146 at run time. For example, the latter example above *cannot* be implemented
15147 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15148 evaluation time of the original SAVE_EXPR is not necessarily the same at
15149 the time the new expression is evaluated. The only optimization of this
15150 sort that would be valid is changing
15151
15152 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15153
15154 divided by 8 to
15155
15156 SAVE_EXPR (I) * SAVE_EXPR (J)
15157
15158 (where the same SAVE_EXPR (J) is used in the original and the
15159 transformed version). */
15160
15161 int
15162 multiple_of_p (tree type, const_tree top, const_tree bottom)
15163 {
15164 if (operand_equal_p (top, bottom, 0))
15165 return 1;
15166
15167 if (TREE_CODE (type) != INTEGER_TYPE)
15168 return 0;
15169
15170 switch (TREE_CODE (top))
15171 {
15172 case BIT_AND_EXPR:
15173 /* Bitwise and provides a power of two multiple. If the mask is
15174 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15175 if (!integer_pow2p (bottom))
15176 return 0;
15177 /* FALLTHRU */
15178
15179 case MULT_EXPR:
15180 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15181 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15182
15183 case PLUS_EXPR:
15184 case MINUS_EXPR:
15185 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15186 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15187
15188 case LSHIFT_EXPR:
15189 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15190 {
15191 tree op1, t1;
15192
15193 op1 = TREE_OPERAND (top, 1);
15194 /* const_binop may not detect overflow correctly,
15195 so check for it explicitly here. */
15196 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15197 && 0 != (t1 = fold_convert (type,
15198 const_binop (LSHIFT_EXPR,
15199 size_one_node,
15200 op1)))
15201 && !TREE_OVERFLOW (t1))
15202 return multiple_of_p (type, t1, bottom);
15203 }
15204 return 0;
15205
15206 case NOP_EXPR:
15207 /* Can't handle conversions from non-integral or wider integral type. */
15208 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15209 || (TYPE_PRECISION (type)
15210 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15211 return 0;
15212
15213 /* .. fall through ... */
15214
15215 case SAVE_EXPR:
15216 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15217
15218 case COND_EXPR:
15219 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15220 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15221
15222 case INTEGER_CST:
15223 if (TREE_CODE (bottom) != INTEGER_CST
15224 || integer_zerop (bottom)
15225 || (TYPE_UNSIGNED (type)
15226 && (tree_int_cst_sgn (top) < 0
15227 || tree_int_cst_sgn (bottom) < 0)))
15228 return 0;
15229 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15230 top, bottom));
15231
15232 default:
15233 return 0;
15234 }
15235 }
15236
15237 /* Return true if CODE or TYPE is known to be non-negative. */
15238
15239 static bool
15240 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15241 {
15242 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15243 && truth_value_p (code))
15244 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15245 have a signed:1 type (where the value is -1 and 0). */
15246 return true;
15247 return false;
15248 }
15249
15250 /* Return true if (CODE OP0) is known to be non-negative. If the return
15251 value is based on the assumption that signed overflow is undefined,
15252 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15253 *STRICT_OVERFLOW_P. */
15254
15255 bool
15256 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15257 bool *strict_overflow_p)
15258 {
15259 if (TYPE_UNSIGNED (type))
15260 return true;
15261
15262 switch (code)
15263 {
15264 case ABS_EXPR:
15265 /* We can't return 1 if flag_wrapv is set because
15266 ABS_EXPR<INT_MIN> = INT_MIN. */
15267 if (!INTEGRAL_TYPE_P (type))
15268 return true;
15269 if (TYPE_OVERFLOW_UNDEFINED (type))
15270 {
15271 *strict_overflow_p = true;
15272 return true;
15273 }
15274 break;
15275
15276 case NON_LVALUE_EXPR:
15277 case FLOAT_EXPR:
15278 case FIX_TRUNC_EXPR:
15279 return tree_expr_nonnegative_warnv_p (op0,
15280 strict_overflow_p);
15281
15282 case NOP_EXPR:
15283 {
15284 tree inner_type = TREE_TYPE (op0);
15285 tree outer_type = type;
15286
15287 if (TREE_CODE (outer_type) == REAL_TYPE)
15288 {
15289 if (TREE_CODE (inner_type) == REAL_TYPE)
15290 return tree_expr_nonnegative_warnv_p (op0,
15291 strict_overflow_p);
15292 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15293 {
15294 if (TYPE_UNSIGNED (inner_type))
15295 return true;
15296 return tree_expr_nonnegative_warnv_p (op0,
15297 strict_overflow_p);
15298 }
15299 }
15300 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15301 {
15302 if (TREE_CODE (inner_type) == REAL_TYPE)
15303 return tree_expr_nonnegative_warnv_p (op0,
15304 strict_overflow_p);
15305 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15306 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15307 && TYPE_UNSIGNED (inner_type);
15308 }
15309 }
15310 break;
15311
15312 default:
15313 return tree_simple_nonnegative_warnv_p (code, type);
15314 }
15315
15316 /* We don't know sign of `t', so be conservative and return false. */
15317 return false;
15318 }
15319
15320 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15321 value is based on the assumption that signed overflow is undefined,
15322 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15323 *STRICT_OVERFLOW_P. */
15324
15325 bool
15326 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15327 tree op1, bool *strict_overflow_p)
15328 {
15329 if (TYPE_UNSIGNED (type))
15330 return true;
15331
15332 switch (code)
15333 {
15334 case POINTER_PLUS_EXPR:
15335 case PLUS_EXPR:
15336 if (FLOAT_TYPE_P (type))
15337 return (tree_expr_nonnegative_warnv_p (op0,
15338 strict_overflow_p)
15339 && tree_expr_nonnegative_warnv_p (op1,
15340 strict_overflow_p));
15341
15342 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15343 both unsigned and at least 2 bits shorter than the result. */
15344 if (TREE_CODE (type) == INTEGER_TYPE
15345 && TREE_CODE (op0) == NOP_EXPR
15346 && TREE_CODE (op1) == NOP_EXPR)
15347 {
15348 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15349 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15350 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15351 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15352 {
15353 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15354 TYPE_PRECISION (inner2)) + 1;
15355 return prec < TYPE_PRECISION (type);
15356 }
15357 }
15358 break;
15359
15360 case MULT_EXPR:
15361 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15362 {
15363 /* x * x is always non-negative for floating point x
15364 or without overflow. */
15365 if (operand_equal_p (op0, op1, 0)
15366 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15367 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15368 {
15369 if (TYPE_OVERFLOW_UNDEFINED (type))
15370 *strict_overflow_p = true;
15371 return true;
15372 }
15373 }
15374
15375 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15376 both unsigned and their total bits is shorter than the result. */
15377 if (TREE_CODE (type) == INTEGER_TYPE
15378 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15379 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15380 {
15381 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15382 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15383 : TREE_TYPE (op0);
15384 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15385 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15386 : TREE_TYPE (op1);
15387
15388 bool unsigned0 = TYPE_UNSIGNED (inner0);
15389 bool unsigned1 = TYPE_UNSIGNED (inner1);
15390
15391 if (TREE_CODE (op0) == INTEGER_CST)
15392 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15393
15394 if (TREE_CODE (op1) == INTEGER_CST)
15395 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15396
15397 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15398 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15399 {
15400 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15401 ? tree_int_cst_min_precision (op0, UNSIGNED)
15402 : TYPE_PRECISION (inner0);
15403
15404 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15405 ? tree_int_cst_min_precision (op1, UNSIGNED)
15406 : TYPE_PRECISION (inner1);
15407
15408 return precision0 + precision1 < TYPE_PRECISION (type);
15409 }
15410 }
15411 return false;
15412
15413 case BIT_AND_EXPR:
15414 case MAX_EXPR:
15415 return (tree_expr_nonnegative_warnv_p (op0,
15416 strict_overflow_p)
15417 || tree_expr_nonnegative_warnv_p (op1,
15418 strict_overflow_p));
15419
15420 case BIT_IOR_EXPR:
15421 case BIT_XOR_EXPR:
15422 case MIN_EXPR:
15423 case RDIV_EXPR:
15424 case TRUNC_DIV_EXPR:
15425 case CEIL_DIV_EXPR:
15426 case FLOOR_DIV_EXPR:
15427 case ROUND_DIV_EXPR:
15428 return (tree_expr_nonnegative_warnv_p (op0,
15429 strict_overflow_p)
15430 && tree_expr_nonnegative_warnv_p (op1,
15431 strict_overflow_p));
15432
15433 case TRUNC_MOD_EXPR:
15434 case CEIL_MOD_EXPR:
15435 case FLOOR_MOD_EXPR:
15436 case ROUND_MOD_EXPR:
15437 return tree_expr_nonnegative_warnv_p (op0,
15438 strict_overflow_p);
15439 default:
15440 return tree_simple_nonnegative_warnv_p (code, type);
15441 }
15442
15443 /* We don't know sign of `t', so be conservative and return false. */
15444 return false;
15445 }
15446
15447 /* Return true if T is known to be non-negative. If the return
15448 value is based on the assumption that signed overflow is undefined,
15449 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15450 *STRICT_OVERFLOW_P. */
15451
15452 bool
15453 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15454 {
15455 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15456 return true;
15457
15458 switch (TREE_CODE (t))
15459 {
15460 case INTEGER_CST:
15461 return tree_int_cst_sgn (t) >= 0;
15462
15463 case REAL_CST:
15464 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15465
15466 case FIXED_CST:
15467 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15468
15469 case COND_EXPR:
15470 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15471 strict_overflow_p)
15472 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15473 strict_overflow_p));
15474 default:
15475 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15476 TREE_TYPE (t));
15477 }
15478 /* We don't know sign of `t', so be conservative and return false. */
15479 return false;
15480 }
15481
15482 /* Return true if T is known to be non-negative. If the return
15483 value is based on the assumption that signed overflow is undefined,
15484 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15485 *STRICT_OVERFLOW_P. */
15486
15487 bool
15488 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15489 tree arg0, tree arg1, bool *strict_overflow_p)
15490 {
15491 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15492 switch (DECL_FUNCTION_CODE (fndecl))
15493 {
15494 CASE_FLT_FN (BUILT_IN_ACOS):
15495 CASE_FLT_FN (BUILT_IN_ACOSH):
15496 CASE_FLT_FN (BUILT_IN_CABS):
15497 CASE_FLT_FN (BUILT_IN_COSH):
15498 CASE_FLT_FN (BUILT_IN_ERFC):
15499 CASE_FLT_FN (BUILT_IN_EXP):
15500 CASE_FLT_FN (BUILT_IN_EXP10):
15501 CASE_FLT_FN (BUILT_IN_EXP2):
15502 CASE_FLT_FN (BUILT_IN_FABS):
15503 CASE_FLT_FN (BUILT_IN_FDIM):
15504 CASE_FLT_FN (BUILT_IN_HYPOT):
15505 CASE_FLT_FN (BUILT_IN_POW10):
15506 CASE_INT_FN (BUILT_IN_FFS):
15507 CASE_INT_FN (BUILT_IN_PARITY):
15508 CASE_INT_FN (BUILT_IN_POPCOUNT):
15509 CASE_INT_FN (BUILT_IN_CLZ):
15510 CASE_INT_FN (BUILT_IN_CLRSB):
15511 case BUILT_IN_BSWAP32:
15512 case BUILT_IN_BSWAP64:
15513 /* Always true. */
15514 return true;
15515
15516 CASE_FLT_FN (BUILT_IN_SQRT):
15517 /* sqrt(-0.0) is -0.0. */
15518 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15519 return true;
15520 return tree_expr_nonnegative_warnv_p (arg0,
15521 strict_overflow_p);
15522
15523 CASE_FLT_FN (BUILT_IN_ASINH):
15524 CASE_FLT_FN (BUILT_IN_ATAN):
15525 CASE_FLT_FN (BUILT_IN_ATANH):
15526 CASE_FLT_FN (BUILT_IN_CBRT):
15527 CASE_FLT_FN (BUILT_IN_CEIL):
15528 CASE_FLT_FN (BUILT_IN_ERF):
15529 CASE_FLT_FN (BUILT_IN_EXPM1):
15530 CASE_FLT_FN (BUILT_IN_FLOOR):
15531 CASE_FLT_FN (BUILT_IN_FMOD):
15532 CASE_FLT_FN (BUILT_IN_FREXP):
15533 CASE_FLT_FN (BUILT_IN_ICEIL):
15534 CASE_FLT_FN (BUILT_IN_IFLOOR):
15535 CASE_FLT_FN (BUILT_IN_IRINT):
15536 CASE_FLT_FN (BUILT_IN_IROUND):
15537 CASE_FLT_FN (BUILT_IN_LCEIL):
15538 CASE_FLT_FN (BUILT_IN_LDEXP):
15539 CASE_FLT_FN (BUILT_IN_LFLOOR):
15540 CASE_FLT_FN (BUILT_IN_LLCEIL):
15541 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15542 CASE_FLT_FN (BUILT_IN_LLRINT):
15543 CASE_FLT_FN (BUILT_IN_LLROUND):
15544 CASE_FLT_FN (BUILT_IN_LRINT):
15545 CASE_FLT_FN (BUILT_IN_LROUND):
15546 CASE_FLT_FN (BUILT_IN_MODF):
15547 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15548 CASE_FLT_FN (BUILT_IN_RINT):
15549 CASE_FLT_FN (BUILT_IN_ROUND):
15550 CASE_FLT_FN (BUILT_IN_SCALB):
15551 CASE_FLT_FN (BUILT_IN_SCALBLN):
15552 CASE_FLT_FN (BUILT_IN_SCALBN):
15553 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15554 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15555 CASE_FLT_FN (BUILT_IN_SINH):
15556 CASE_FLT_FN (BUILT_IN_TANH):
15557 CASE_FLT_FN (BUILT_IN_TRUNC):
15558 /* True if the 1st argument is nonnegative. */
15559 return tree_expr_nonnegative_warnv_p (arg0,
15560 strict_overflow_p);
15561
15562 CASE_FLT_FN (BUILT_IN_FMAX):
15563 /* True if the 1st OR 2nd arguments are nonnegative. */
15564 return (tree_expr_nonnegative_warnv_p (arg0,
15565 strict_overflow_p)
15566 || (tree_expr_nonnegative_warnv_p (arg1,
15567 strict_overflow_p)));
15568
15569 CASE_FLT_FN (BUILT_IN_FMIN):
15570 /* True if the 1st AND 2nd arguments are nonnegative. */
15571 return (tree_expr_nonnegative_warnv_p (arg0,
15572 strict_overflow_p)
15573 && (tree_expr_nonnegative_warnv_p (arg1,
15574 strict_overflow_p)));
15575
15576 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15577 /* True if the 2nd argument is nonnegative. */
15578 return tree_expr_nonnegative_warnv_p (arg1,
15579 strict_overflow_p);
15580
15581 CASE_FLT_FN (BUILT_IN_POWI):
15582 /* True if the 1st argument is nonnegative or the second
15583 argument is an even integer. */
15584 if (TREE_CODE (arg1) == INTEGER_CST
15585 && (tree_to_hwi (arg1) & 1) == 0)
15586 return true;
15587 return tree_expr_nonnegative_warnv_p (arg0,
15588 strict_overflow_p);
15589
15590 CASE_FLT_FN (BUILT_IN_POW):
15591 /* True if the 1st argument is nonnegative or the second
15592 argument is an even integer valued real. */
15593 if (TREE_CODE (arg1) == REAL_CST)
15594 {
15595 REAL_VALUE_TYPE c;
15596 HOST_WIDE_INT n;
15597
15598 c = TREE_REAL_CST (arg1);
15599 n = real_to_integer (&c);
15600 if ((n & 1) == 0)
15601 {
15602 REAL_VALUE_TYPE cint;
15603 real_from_integer (&cint, VOIDmode, n, SIGNED);
15604 if (real_identical (&c, &cint))
15605 return true;
15606 }
15607 }
15608 return tree_expr_nonnegative_warnv_p (arg0,
15609 strict_overflow_p);
15610
15611 default:
15612 break;
15613 }
15614 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15615 type);
15616 }
15617
15618 /* Return true if T is known to be non-negative. If the return
15619 value is based on the assumption that signed overflow is undefined,
15620 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15621 *STRICT_OVERFLOW_P. */
15622
15623 bool
15624 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15625 {
15626 enum tree_code code = TREE_CODE (t);
15627 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15628 return true;
15629
15630 switch (code)
15631 {
15632 case TARGET_EXPR:
15633 {
15634 tree temp = TARGET_EXPR_SLOT (t);
15635 t = TARGET_EXPR_INITIAL (t);
15636
15637 /* If the initializer is non-void, then it's a normal expression
15638 that will be assigned to the slot. */
15639 if (!VOID_TYPE_P (t))
15640 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15641
15642 /* Otherwise, the initializer sets the slot in some way. One common
15643 way is an assignment statement at the end of the initializer. */
15644 while (1)
15645 {
15646 if (TREE_CODE (t) == BIND_EXPR)
15647 t = expr_last (BIND_EXPR_BODY (t));
15648 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15649 || TREE_CODE (t) == TRY_CATCH_EXPR)
15650 t = expr_last (TREE_OPERAND (t, 0));
15651 else if (TREE_CODE (t) == STATEMENT_LIST)
15652 t = expr_last (t);
15653 else
15654 break;
15655 }
15656 if (TREE_CODE (t) == MODIFY_EXPR
15657 && TREE_OPERAND (t, 0) == temp)
15658 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15659 strict_overflow_p);
15660
15661 return false;
15662 }
15663
15664 case CALL_EXPR:
15665 {
15666 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15667 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15668
15669 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15670 get_callee_fndecl (t),
15671 arg0,
15672 arg1,
15673 strict_overflow_p);
15674 }
15675 case COMPOUND_EXPR:
15676 case MODIFY_EXPR:
15677 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15678 strict_overflow_p);
15679 case BIND_EXPR:
15680 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15681 strict_overflow_p);
15682 case SAVE_EXPR:
15683 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15684 strict_overflow_p);
15685
15686 default:
15687 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15688 TREE_TYPE (t));
15689 }
15690
15691 /* We don't know sign of `t', so be conservative and return false. */
15692 return false;
15693 }
15694
15695 /* Return true if T is known to be non-negative. If the return
15696 value is based on the assumption that signed overflow is undefined,
15697 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15698 *STRICT_OVERFLOW_P. */
15699
15700 bool
15701 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15702 {
15703 enum tree_code code;
15704 if (t == error_mark_node)
15705 return false;
15706
15707 code = TREE_CODE (t);
15708 switch (TREE_CODE_CLASS (code))
15709 {
15710 case tcc_binary:
15711 case tcc_comparison:
15712 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15713 TREE_TYPE (t),
15714 TREE_OPERAND (t, 0),
15715 TREE_OPERAND (t, 1),
15716 strict_overflow_p);
15717
15718 case tcc_unary:
15719 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15720 TREE_TYPE (t),
15721 TREE_OPERAND (t, 0),
15722 strict_overflow_p);
15723
15724 case tcc_constant:
15725 case tcc_declaration:
15726 case tcc_reference:
15727 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15728
15729 default:
15730 break;
15731 }
15732
15733 switch (code)
15734 {
15735 case TRUTH_AND_EXPR:
15736 case TRUTH_OR_EXPR:
15737 case TRUTH_XOR_EXPR:
15738 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15739 TREE_TYPE (t),
15740 TREE_OPERAND (t, 0),
15741 TREE_OPERAND (t, 1),
15742 strict_overflow_p);
15743 case TRUTH_NOT_EXPR:
15744 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15745 TREE_TYPE (t),
15746 TREE_OPERAND (t, 0),
15747 strict_overflow_p);
15748
15749 case COND_EXPR:
15750 case CONSTRUCTOR:
15751 case OBJ_TYPE_REF:
15752 case ASSERT_EXPR:
15753 case ADDR_EXPR:
15754 case WITH_SIZE_EXPR:
15755 case SSA_NAME:
15756 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15757
15758 default:
15759 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15760 }
15761 }
15762
15763 /* Return true if `t' is known to be non-negative. Handle warnings
15764 about undefined signed overflow. */
15765
15766 bool
15767 tree_expr_nonnegative_p (tree t)
15768 {
15769 bool ret, strict_overflow_p;
15770
15771 strict_overflow_p = false;
15772 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15773 if (strict_overflow_p)
15774 fold_overflow_warning (("assuming signed overflow does not occur when "
15775 "determining that expression is always "
15776 "non-negative"),
15777 WARN_STRICT_OVERFLOW_MISC);
15778 return ret;
15779 }
15780
15781
15782 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15783 For floating point we further ensure that T is not denormal.
15784 Similar logic is present in nonzero_address in rtlanal.h.
15785
15786 If the return value is based on the assumption that signed overflow
15787 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15788 change *STRICT_OVERFLOW_P. */
15789
15790 bool
15791 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15792 bool *strict_overflow_p)
15793 {
15794 switch (code)
15795 {
15796 case ABS_EXPR:
15797 return tree_expr_nonzero_warnv_p (op0,
15798 strict_overflow_p);
15799
15800 case NOP_EXPR:
15801 {
15802 tree inner_type = TREE_TYPE (op0);
15803 tree outer_type = type;
15804
15805 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15806 && tree_expr_nonzero_warnv_p (op0,
15807 strict_overflow_p));
15808 }
15809 break;
15810
15811 case NON_LVALUE_EXPR:
15812 return tree_expr_nonzero_warnv_p (op0,
15813 strict_overflow_p);
15814
15815 default:
15816 break;
15817 }
15818
15819 return false;
15820 }
15821
15822 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15823 For floating point we further ensure that T is not denormal.
15824 Similar logic is present in nonzero_address in rtlanal.h.
15825
15826 If the return value is based on the assumption that signed overflow
15827 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15828 change *STRICT_OVERFLOW_P. */
15829
15830 bool
15831 tree_binary_nonzero_warnv_p (enum tree_code code,
15832 tree type,
15833 tree op0,
15834 tree op1, bool *strict_overflow_p)
15835 {
15836 bool sub_strict_overflow_p;
15837 switch (code)
15838 {
15839 case POINTER_PLUS_EXPR:
15840 case PLUS_EXPR:
15841 if (TYPE_OVERFLOW_UNDEFINED (type))
15842 {
15843 /* With the presence of negative values it is hard
15844 to say something. */
15845 sub_strict_overflow_p = false;
15846 if (!tree_expr_nonnegative_warnv_p (op0,
15847 &sub_strict_overflow_p)
15848 || !tree_expr_nonnegative_warnv_p (op1,
15849 &sub_strict_overflow_p))
15850 return false;
15851 /* One of operands must be positive and the other non-negative. */
15852 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15853 overflows, on a twos-complement machine the sum of two
15854 nonnegative numbers can never be zero. */
15855 return (tree_expr_nonzero_warnv_p (op0,
15856 strict_overflow_p)
15857 || tree_expr_nonzero_warnv_p (op1,
15858 strict_overflow_p));
15859 }
15860 break;
15861
15862 case MULT_EXPR:
15863 if (TYPE_OVERFLOW_UNDEFINED (type))
15864 {
15865 if (tree_expr_nonzero_warnv_p (op0,
15866 strict_overflow_p)
15867 && tree_expr_nonzero_warnv_p (op1,
15868 strict_overflow_p))
15869 {
15870 *strict_overflow_p = true;
15871 return true;
15872 }
15873 }
15874 break;
15875
15876 case MIN_EXPR:
15877 sub_strict_overflow_p = false;
15878 if (tree_expr_nonzero_warnv_p (op0,
15879 &sub_strict_overflow_p)
15880 && tree_expr_nonzero_warnv_p (op1,
15881 &sub_strict_overflow_p))
15882 {
15883 if (sub_strict_overflow_p)
15884 *strict_overflow_p = true;
15885 }
15886 break;
15887
15888 case MAX_EXPR:
15889 sub_strict_overflow_p = false;
15890 if (tree_expr_nonzero_warnv_p (op0,
15891 &sub_strict_overflow_p))
15892 {
15893 if (sub_strict_overflow_p)
15894 *strict_overflow_p = true;
15895
15896 /* When both operands are nonzero, then MAX must be too. */
15897 if (tree_expr_nonzero_warnv_p (op1,
15898 strict_overflow_p))
15899 return true;
15900
15901 /* MAX where operand 0 is positive is positive. */
15902 return tree_expr_nonnegative_warnv_p (op0,
15903 strict_overflow_p);
15904 }
15905 /* MAX where operand 1 is positive is positive. */
15906 else if (tree_expr_nonzero_warnv_p (op1,
15907 &sub_strict_overflow_p)
15908 && tree_expr_nonnegative_warnv_p (op1,
15909 &sub_strict_overflow_p))
15910 {
15911 if (sub_strict_overflow_p)
15912 *strict_overflow_p = true;
15913 return true;
15914 }
15915 break;
15916
15917 case BIT_IOR_EXPR:
15918 return (tree_expr_nonzero_warnv_p (op1,
15919 strict_overflow_p)
15920 || tree_expr_nonzero_warnv_p (op0,
15921 strict_overflow_p));
15922
15923 default:
15924 break;
15925 }
15926
15927 return false;
15928 }
15929
15930 /* Return true when T is an address and is known to be nonzero.
15931 For floating point we further ensure that T is not denormal.
15932 Similar logic is present in nonzero_address in rtlanal.h.
15933
15934 If the return value is based on the assumption that signed overflow
15935 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15936 change *STRICT_OVERFLOW_P. */
15937
15938 bool
15939 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15940 {
15941 bool sub_strict_overflow_p;
15942 switch (TREE_CODE (t))
15943 {
15944 case INTEGER_CST:
15945 return !integer_zerop (t);
15946
15947 case ADDR_EXPR:
15948 {
15949 tree base = TREE_OPERAND (t, 0);
15950 if (!DECL_P (base))
15951 base = get_base_address (base);
15952
15953 if (!base)
15954 return false;
15955
15956 /* Weak declarations may link to NULL. Other things may also be NULL
15957 so protect with -fdelete-null-pointer-checks; but not variables
15958 allocated on the stack. */
15959 if (DECL_P (base)
15960 && (flag_delete_null_pointer_checks
15961 || (DECL_CONTEXT (base)
15962 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15963 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15964 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15965
15966 /* Constants are never weak. */
15967 if (CONSTANT_CLASS_P (base))
15968 return true;
15969
15970 return false;
15971 }
15972
15973 case COND_EXPR:
15974 sub_strict_overflow_p = false;
15975 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15976 &sub_strict_overflow_p)
15977 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15978 &sub_strict_overflow_p))
15979 {
15980 if (sub_strict_overflow_p)
15981 *strict_overflow_p = true;
15982 return true;
15983 }
15984 break;
15985
15986 default:
15987 break;
15988 }
15989 return false;
15990 }
15991
15992 /* Return true when T is an address and is known to be nonzero.
15993 For floating point we further ensure that T is not denormal.
15994 Similar logic is present in nonzero_address in rtlanal.h.
15995
15996 If the return value is based on the assumption that signed overflow
15997 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15998 change *STRICT_OVERFLOW_P. */
15999
16000 bool
16001 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16002 {
16003 tree type = TREE_TYPE (t);
16004 enum tree_code code;
16005
16006 /* Doing something useful for floating point would need more work. */
16007 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16008 return false;
16009
16010 code = TREE_CODE (t);
16011 switch (TREE_CODE_CLASS (code))
16012 {
16013 case tcc_unary:
16014 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16015 strict_overflow_p);
16016 case tcc_binary:
16017 case tcc_comparison:
16018 return tree_binary_nonzero_warnv_p (code, type,
16019 TREE_OPERAND (t, 0),
16020 TREE_OPERAND (t, 1),
16021 strict_overflow_p);
16022 case tcc_constant:
16023 case tcc_declaration:
16024 case tcc_reference:
16025 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16026
16027 default:
16028 break;
16029 }
16030
16031 switch (code)
16032 {
16033 case TRUTH_NOT_EXPR:
16034 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16035 strict_overflow_p);
16036
16037 case TRUTH_AND_EXPR:
16038 case TRUTH_OR_EXPR:
16039 case TRUTH_XOR_EXPR:
16040 return tree_binary_nonzero_warnv_p (code, type,
16041 TREE_OPERAND (t, 0),
16042 TREE_OPERAND (t, 1),
16043 strict_overflow_p);
16044
16045 case COND_EXPR:
16046 case CONSTRUCTOR:
16047 case OBJ_TYPE_REF:
16048 case ASSERT_EXPR:
16049 case ADDR_EXPR:
16050 case WITH_SIZE_EXPR:
16051 case SSA_NAME:
16052 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16053
16054 case COMPOUND_EXPR:
16055 case MODIFY_EXPR:
16056 case BIND_EXPR:
16057 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16058 strict_overflow_p);
16059
16060 case SAVE_EXPR:
16061 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16062 strict_overflow_p);
16063
16064 case CALL_EXPR:
16065 return alloca_call_p (t);
16066
16067 default:
16068 break;
16069 }
16070 return false;
16071 }
16072
16073 /* Return true when T is an address and is known to be nonzero.
16074 Handle warnings about undefined signed overflow. */
16075
16076 bool
16077 tree_expr_nonzero_p (tree t)
16078 {
16079 bool ret, strict_overflow_p;
16080
16081 strict_overflow_p = false;
16082 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16083 if (strict_overflow_p)
16084 fold_overflow_warning (("assuming signed overflow does not occur when "
16085 "determining that expression is always "
16086 "non-zero"),
16087 WARN_STRICT_OVERFLOW_MISC);
16088 return ret;
16089 }
16090
16091 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16092 attempt to fold the expression to a constant without modifying TYPE,
16093 OP0 or OP1.
16094
16095 If the expression could be simplified to a constant, then return
16096 the constant. If the expression would not be simplified to a
16097 constant, then return NULL_TREE. */
16098
16099 tree
16100 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16101 {
16102 tree tem = fold_binary (code, type, op0, op1);
16103 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16104 }
16105
16106 /* Given the components of a unary expression CODE, TYPE and OP0,
16107 attempt to fold the expression to a constant without modifying
16108 TYPE or OP0.
16109
16110 If the expression could be simplified to a constant, then return
16111 the constant. If the expression would not be simplified to a
16112 constant, then return NULL_TREE. */
16113
16114 tree
16115 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16116 {
16117 tree tem = fold_unary (code, type, op0);
16118 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16119 }
16120
16121 /* If EXP represents referencing an element in a constant string
16122 (either via pointer arithmetic or array indexing), return the
16123 tree representing the value accessed, otherwise return NULL. */
16124
16125 tree
16126 fold_read_from_constant_string (tree exp)
16127 {
16128 if ((TREE_CODE (exp) == INDIRECT_REF
16129 || TREE_CODE (exp) == ARRAY_REF)
16130 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16131 {
16132 tree exp1 = TREE_OPERAND (exp, 0);
16133 tree index;
16134 tree string;
16135 location_t loc = EXPR_LOCATION (exp);
16136
16137 if (TREE_CODE (exp) == INDIRECT_REF)
16138 string = string_constant (exp1, &index);
16139 else
16140 {
16141 tree low_bound = array_ref_low_bound (exp);
16142 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16143
16144 /* Optimize the special-case of a zero lower bound.
16145
16146 We convert the low_bound to sizetype to avoid some problems
16147 with constant folding. (E.g. suppose the lower bound is 1,
16148 and its mode is QI. Without the conversion,l (ARRAY
16149 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16150 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16151 if (! integer_zerop (low_bound))
16152 index = size_diffop_loc (loc, index,
16153 fold_convert_loc (loc, sizetype, low_bound));
16154
16155 string = exp1;
16156 }
16157
16158 if (string
16159 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16160 && TREE_CODE (string) == STRING_CST
16161 && TREE_CODE (index) == INTEGER_CST
16162 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16163 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16164 == MODE_INT)
16165 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16166 return build_int_cst_type (TREE_TYPE (exp),
16167 (TREE_STRING_POINTER (string)
16168 [tree_to_uhwi (index)]));
16169 }
16170 return NULL;
16171 }
16172
16173 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16174 an integer constant, real, or fixed-point constant.
16175
16176 TYPE is the type of the result. */
16177
16178 static tree
16179 fold_negate_const (tree arg0, tree type)
16180 {
16181 tree t = NULL_TREE;
16182
16183 switch (TREE_CODE (arg0))
16184 {
16185 case INTEGER_CST:
16186 {
16187 wide_int val = arg0;
16188 bool overflow;
16189 val = wi::neg (val, &overflow);
16190 t = force_fit_type (type, val, 1,
16191 (overflow | TREE_OVERFLOW (arg0))
16192 && !TYPE_UNSIGNED (type));
16193 break;
16194 }
16195
16196 case REAL_CST:
16197 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16198 break;
16199
16200 case FIXED_CST:
16201 {
16202 FIXED_VALUE_TYPE f;
16203 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16204 &(TREE_FIXED_CST (arg0)), NULL,
16205 TYPE_SATURATING (type));
16206 t = build_fixed (type, f);
16207 /* Propagate overflow flags. */
16208 if (overflow_p | TREE_OVERFLOW (arg0))
16209 TREE_OVERFLOW (t) = 1;
16210 break;
16211 }
16212
16213 default:
16214 gcc_unreachable ();
16215 }
16216
16217 return t;
16218 }
16219
16220 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16221 an integer constant or real constant.
16222
16223 TYPE is the type of the result. */
16224
16225 tree
16226 fold_abs_const (tree arg0, tree type)
16227 {
16228 tree t = NULL_TREE;
16229
16230 switch (TREE_CODE (arg0))
16231 {
16232 case INTEGER_CST:
16233 {
16234 wide_int val = arg0;
16235
16236 /* If the value is unsigned or non-negative, then the absolute value
16237 is the same as the ordinary value. */
16238 if (!wi::neg_p (val, TYPE_SIGN (type)))
16239 t = arg0;
16240
16241 /* If the value is negative, then the absolute value is
16242 its negation. */
16243 else
16244 {
16245 bool overflow;
16246 val = wi::neg (val, &overflow);
16247 t = force_fit_type (type, val, -1,
16248 overflow | TREE_OVERFLOW (arg0));
16249 }
16250 }
16251 break;
16252
16253 case REAL_CST:
16254 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16255 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16256 else
16257 t = arg0;
16258 break;
16259
16260 default:
16261 gcc_unreachable ();
16262 }
16263
16264 return t;
16265 }
16266
16267 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16268 constant. TYPE is the type of the result. */
16269
16270 static tree
16271 fold_not_const (const_tree arg0, tree type)
16272 {
16273 wide_int val;
16274
16275 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16276
16277 val = wi::bit_not (arg0);
16278 return force_fit_type (type, val, 0, TREE_OVERFLOW (arg0));
16279 }
16280
16281 /* Given CODE, a relational operator, the target type, TYPE and two
16282 constant operands OP0 and OP1, return the result of the
16283 relational operation. If the result is not a compile time
16284 constant, then return NULL_TREE. */
16285
16286 static tree
16287 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16288 {
16289 int result, invert;
16290
16291 /* From here on, the only cases we handle are when the result is
16292 known to be a constant. */
16293
16294 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16295 {
16296 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16297 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16298
16299 /* Handle the cases where either operand is a NaN. */
16300 if (real_isnan (c0) || real_isnan (c1))
16301 {
16302 switch (code)
16303 {
16304 case EQ_EXPR:
16305 case ORDERED_EXPR:
16306 result = 0;
16307 break;
16308
16309 case NE_EXPR:
16310 case UNORDERED_EXPR:
16311 case UNLT_EXPR:
16312 case UNLE_EXPR:
16313 case UNGT_EXPR:
16314 case UNGE_EXPR:
16315 case UNEQ_EXPR:
16316 result = 1;
16317 break;
16318
16319 case LT_EXPR:
16320 case LE_EXPR:
16321 case GT_EXPR:
16322 case GE_EXPR:
16323 case LTGT_EXPR:
16324 if (flag_trapping_math)
16325 return NULL_TREE;
16326 result = 0;
16327 break;
16328
16329 default:
16330 gcc_unreachable ();
16331 }
16332
16333 return constant_boolean_node (result, type);
16334 }
16335
16336 return constant_boolean_node (real_compare (code, c0, c1), type);
16337 }
16338
16339 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16340 {
16341 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16342 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16343 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16344 }
16345
16346 /* Handle equality/inequality of complex constants. */
16347 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16348 {
16349 tree rcond = fold_relational_const (code, type,
16350 TREE_REALPART (op0),
16351 TREE_REALPART (op1));
16352 tree icond = fold_relational_const (code, type,
16353 TREE_IMAGPART (op0),
16354 TREE_IMAGPART (op1));
16355 if (code == EQ_EXPR)
16356 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16357 else if (code == NE_EXPR)
16358 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16359 else
16360 return NULL_TREE;
16361 }
16362
16363 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16364 {
16365 unsigned count = VECTOR_CST_NELTS (op0);
16366 tree *elts = XALLOCAVEC (tree, count);
16367 gcc_assert (VECTOR_CST_NELTS (op1) == count
16368 && TYPE_VECTOR_SUBPARTS (type) == count);
16369
16370 for (unsigned i = 0; i < count; i++)
16371 {
16372 tree elem_type = TREE_TYPE (type);
16373 tree elem0 = VECTOR_CST_ELT (op0, i);
16374 tree elem1 = VECTOR_CST_ELT (op1, i);
16375
16376 tree tem = fold_relational_const (code, elem_type,
16377 elem0, elem1);
16378
16379 if (tem == NULL_TREE)
16380 return NULL_TREE;
16381
16382 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16383 }
16384
16385 return build_vector (type, elts);
16386 }
16387
16388 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16389
16390 To compute GT, swap the arguments and do LT.
16391 To compute GE, do LT and invert the result.
16392 To compute LE, swap the arguments, do LT and invert the result.
16393 To compute NE, do EQ and invert the result.
16394
16395 Therefore, the code below must handle only EQ and LT. */
16396
16397 if (code == LE_EXPR || code == GT_EXPR)
16398 {
16399 tree tem = op0;
16400 op0 = op1;
16401 op1 = tem;
16402 code = swap_tree_comparison (code);
16403 }
16404
16405 /* Note that it is safe to invert for real values here because we
16406 have already handled the one case that it matters. */
16407
16408 invert = 0;
16409 if (code == NE_EXPR || code == GE_EXPR)
16410 {
16411 invert = 1;
16412 code = invert_tree_comparison (code, false);
16413 }
16414
16415 /* Compute a result for LT or EQ if args permit;
16416 Otherwise return T. */
16417 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16418 {
16419 if (code == EQ_EXPR)
16420 result = tree_int_cst_equal (op0, op1);
16421 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16422 result = INT_CST_LT_UNSIGNED (op0, op1);
16423 else
16424 result = INT_CST_LT (op0, op1);
16425 }
16426 else
16427 return NULL_TREE;
16428
16429 if (invert)
16430 result ^= 1;
16431 return constant_boolean_node (result, type);
16432 }
16433
16434 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16435 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16436 itself. */
16437
16438 tree
16439 fold_build_cleanup_point_expr (tree type, tree expr)
16440 {
16441 /* If the expression does not have side effects then we don't have to wrap
16442 it with a cleanup point expression. */
16443 if (!TREE_SIDE_EFFECTS (expr))
16444 return expr;
16445
16446 /* If the expression is a return, check to see if the expression inside the
16447 return has no side effects or the right hand side of the modify expression
16448 inside the return. If either don't have side effects set we don't need to
16449 wrap the expression in a cleanup point expression. Note we don't check the
16450 left hand side of the modify because it should always be a return decl. */
16451 if (TREE_CODE (expr) == RETURN_EXPR)
16452 {
16453 tree op = TREE_OPERAND (expr, 0);
16454 if (!op || !TREE_SIDE_EFFECTS (op))
16455 return expr;
16456 op = TREE_OPERAND (op, 1);
16457 if (!TREE_SIDE_EFFECTS (op))
16458 return expr;
16459 }
16460
16461 return build1 (CLEANUP_POINT_EXPR, type, expr);
16462 }
16463
16464 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16465 of an indirection through OP0, or NULL_TREE if no simplification is
16466 possible. */
16467
16468 tree
16469 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16470 {
16471 tree sub = op0;
16472 tree subtype;
16473
16474 STRIP_NOPS (sub);
16475 subtype = TREE_TYPE (sub);
16476 if (!POINTER_TYPE_P (subtype))
16477 return NULL_TREE;
16478
16479 if (TREE_CODE (sub) == ADDR_EXPR)
16480 {
16481 tree op = TREE_OPERAND (sub, 0);
16482 tree optype = TREE_TYPE (op);
16483 /* *&CONST_DECL -> to the value of the const decl. */
16484 if (TREE_CODE (op) == CONST_DECL)
16485 return DECL_INITIAL (op);
16486 /* *&p => p; make sure to handle *&"str"[cst] here. */
16487 if (type == optype)
16488 {
16489 tree fop = fold_read_from_constant_string (op);
16490 if (fop)
16491 return fop;
16492 else
16493 return op;
16494 }
16495 /* *(foo *)&fooarray => fooarray[0] */
16496 else if (TREE_CODE (optype) == ARRAY_TYPE
16497 && type == TREE_TYPE (optype)
16498 && (!in_gimple_form
16499 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16500 {
16501 tree type_domain = TYPE_DOMAIN (optype);
16502 tree min_val = size_zero_node;
16503 if (type_domain && TYPE_MIN_VALUE (type_domain))
16504 min_val = TYPE_MIN_VALUE (type_domain);
16505 if (in_gimple_form
16506 && TREE_CODE (min_val) != INTEGER_CST)
16507 return NULL_TREE;
16508 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16509 NULL_TREE, NULL_TREE);
16510 }
16511 /* *(foo *)&complexfoo => __real__ complexfoo */
16512 else if (TREE_CODE (optype) == COMPLEX_TYPE
16513 && type == TREE_TYPE (optype))
16514 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16515 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16516 else if (TREE_CODE (optype) == VECTOR_TYPE
16517 && type == TREE_TYPE (optype))
16518 {
16519 tree part_width = TYPE_SIZE (type);
16520 tree index = bitsize_int (0);
16521 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16522 }
16523 }
16524
16525 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16526 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16527 {
16528 tree op00 = TREE_OPERAND (sub, 0);
16529 tree op01 = TREE_OPERAND (sub, 1);
16530
16531 STRIP_NOPS (op00);
16532 if (TREE_CODE (op00) == ADDR_EXPR)
16533 {
16534 tree op00type;
16535 op00 = TREE_OPERAND (op00, 0);
16536 op00type = TREE_TYPE (op00);
16537
16538 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16539 if (TREE_CODE (op00type) == VECTOR_TYPE
16540 && type == TREE_TYPE (op00type))
16541 {
16542 HOST_WIDE_INT offset = tree_to_shwi (op01);
16543 tree part_width = TYPE_SIZE (type);
16544 unsigned HOST_WIDE_INT part_widthi
16545 = tree_to_shwi (part_width) / BITS_PER_UNIT;
16546 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16547 tree index = bitsize_int (indexi);
16548
16549 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16550 return fold_build3_loc (loc,
16551 BIT_FIELD_REF, type, op00,
16552 part_width, index);
16553
16554 }
16555 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16556 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16557 && type == TREE_TYPE (op00type))
16558 {
16559 tree size = TYPE_SIZE_UNIT (type);
16560 if (tree_int_cst_equal (size, op01))
16561 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16562 }
16563 /* ((foo *)&fooarray)[1] => fooarray[1] */
16564 else if (TREE_CODE (op00type) == ARRAY_TYPE
16565 && type == TREE_TYPE (op00type))
16566 {
16567 tree type_domain = TYPE_DOMAIN (op00type);
16568 tree min_val = size_zero_node;
16569 if (type_domain && TYPE_MIN_VALUE (type_domain))
16570 min_val = TYPE_MIN_VALUE (type_domain);
16571 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16572 TYPE_SIZE_UNIT (type));
16573 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16574 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16575 NULL_TREE, NULL_TREE);
16576 }
16577 }
16578 }
16579
16580 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16581 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16582 && type == TREE_TYPE (TREE_TYPE (subtype))
16583 && (!in_gimple_form
16584 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16585 {
16586 tree type_domain;
16587 tree min_val = size_zero_node;
16588 sub = build_fold_indirect_ref_loc (loc, sub);
16589 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16590 if (type_domain && TYPE_MIN_VALUE (type_domain))
16591 min_val = TYPE_MIN_VALUE (type_domain);
16592 if (in_gimple_form
16593 && TREE_CODE (min_val) != INTEGER_CST)
16594 return NULL_TREE;
16595 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16596 NULL_TREE);
16597 }
16598
16599 return NULL_TREE;
16600 }
16601
16602 /* Builds an expression for an indirection through T, simplifying some
16603 cases. */
16604
16605 tree
16606 build_fold_indirect_ref_loc (location_t loc, tree t)
16607 {
16608 tree type = TREE_TYPE (TREE_TYPE (t));
16609 tree sub = fold_indirect_ref_1 (loc, type, t);
16610
16611 if (sub)
16612 return sub;
16613
16614 return build1_loc (loc, INDIRECT_REF, type, t);
16615 }
16616
16617 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16618
16619 tree
16620 fold_indirect_ref_loc (location_t loc, tree t)
16621 {
16622 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16623
16624 if (sub)
16625 return sub;
16626 else
16627 return t;
16628 }
16629
16630 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16631 whose result is ignored. The type of the returned tree need not be
16632 the same as the original expression. */
16633
16634 tree
16635 fold_ignored_result (tree t)
16636 {
16637 if (!TREE_SIDE_EFFECTS (t))
16638 return integer_zero_node;
16639
16640 for (;;)
16641 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16642 {
16643 case tcc_unary:
16644 t = TREE_OPERAND (t, 0);
16645 break;
16646
16647 case tcc_binary:
16648 case tcc_comparison:
16649 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16650 t = TREE_OPERAND (t, 0);
16651 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16652 t = TREE_OPERAND (t, 1);
16653 else
16654 return t;
16655 break;
16656
16657 case tcc_expression:
16658 switch (TREE_CODE (t))
16659 {
16660 case COMPOUND_EXPR:
16661 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16662 return t;
16663 t = TREE_OPERAND (t, 0);
16664 break;
16665
16666 case COND_EXPR:
16667 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16668 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16669 return t;
16670 t = TREE_OPERAND (t, 0);
16671 break;
16672
16673 default:
16674 return t;
16675 }
16676 break;
16677
16678 default:
16679 return t;
16680 }
16681 }
16682
16683 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16684
16685 tree
16686 round_up_loc (location_t loc, tree value, int divisor)
16687 {
16688 tree div = NULL_TREE;
16689
16690 gcc_assert (divisor > 0);
16691 if (divisor == 1)
16692 return value;
16693
16694 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16695 have to do anything. Only do this when we are not given a const,
16696 because in that case, this check is more expensive than just
16697 doing it. */
16698 if (TREE_CODE (value) != INTEGER_CST)
16699 {
16700 div = build_int_cst (TREE_TYPE (value), divisor);
16701
16702 if (multiple_of_p (TREE_TYPE (value), value, div))
16703 return value;
16704 }
16705
16706 /* If divisor is a power of two, simplify this to bit manipulation. */
16707 if (divisor == (divisor & -divisor))
16708 {
16709 if (TREE_CODE (value) == INTEGER_CST)
16710 {
16711 wide_int val = value;
16712 bool overflow_p;
16713
16714 if ((val & (divisor - 1)) == 0)
16715 return value;
16716
16717 overflow_p = TREE_OVERFLOW (value);
16718 val &= ~(divisor - 1);
16719 val += divisor;
16720 if (val == 0)
16721 overflow_p = true;
16722
16723 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16724 }
16725 else
16726 {
16727 tree t;
16728
16729 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16730 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16731 t = build_int_cst (TREE_TYPE (value), -divisor);
16732 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16733 }
16734 }
16735 else
16736 {
16737 if (!div)
16738 div = build_int_cst (TREE_TYPE (value), divisor);
16739 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16740 value = size_binop_loc (loc, MULT_EXPR, value, div);
16741 }
16742
16743 return value;
16744 }
16745
16746 /* Likewise, but round down. */
16747
16748 tree
16749 round_down_loc (location_t loc, tree value, int divisor)
16750 {
16751 tree div = NULL_TREE;
16752
16753 gcc_assert (divisor > 0);
16754 if (divisor == 1)
16755 return value;
16756
16757 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16758 have to do anything. Only do this when we are not given a const,
16759 because in that case, this check is more expensive than just
16760 doing it. */
16761 if (TREE_CODE (value) != INTEGER_CST)
16762 {
16763 div = build_int_cst (TREE_TYPE (value), divisor);
16764
16765 if (multiple_of_p (TREE_TYPE (value), value, div))
16766 return value;
16767 }
16768
16769 /* If divisor is a power of two, simplify this to bit manipulation. */
16770 if (divisor == (divisor & -divisor))
16771 {
16772 tree t;
16773
16774 t = build_int_cst (TREE_TYPE (value), -divisor);
16775 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16776 }
16777 else
16778 {
16779 if (!div)
16780 div = build_int_cst (TREE_TYPE (value), divisor);
16781 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16782 value = size_binop_loc (loc, MULT_EXPR, value, div);
16783 }
16784
16785 return value;
16786 }
16787
16788 /* Returns the pointer to the base of the object addressed by EXP and
16789 extracts the information about the offset of the access, storing it
16790 to PBITPOS and POFFSET. */
16791
16792 static tree
16793 split_address_to_core_and_offset (tree exp,
16794 HOST_WIDE_INT *pbitpos, tree *poffset)
16795 {
16796 tree core;
16797 enum machine_mode mode;
16798 int unsignedp, volatilep;
16799 HOST_WIDE_INT bitsize;
16800 location_t loc = EXPR_LOCATION (exp);
16801
16802 if (TREE_CODE (exp) == ADDR_EXPR)
16803 {
16804 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16805 poffset, &mode, &unsignedp, &volatilep,
16806 false);
16807 core = build_fold_addr_expr_loc (loc, core);
16808 }
16809 else
16810 {
16811 core = exp;
16812 *pbitpos = 0;
16813 *poffset = NULL_TREE;
16814 }
16815
16816 return core;
16817 }
16818
16819 /* Returns true if addresses of E1 and E2 differ by a constant, false
16820 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16821
16822 bool
16823 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16824 {
16825 tree core1, core2;
16826 HOST_WIDE_INT bitpos1, bitpos2;
16827 tree toffset1, toffset2, tdiff, type;
16828
16829 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16830 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16831
16832 if (bitpos1 % BITS_PER_UNIT != 0
16833 || bitpos2 % BITS_PER_UNIT != 0
16834 || !operand_equal_p (core1, core2, 0))
16835 return false;
16836
16837 if (toffset1 && toffset2)
16838 {
16839 type = TREE_TYPE (toffset1);
16840 if (type != TREE_TYPE (toffset2))
16841 toffset2 = fold_convert (type, toffset2);
16842
16843 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16844 if (!cst_fits_shwi_p (tdiff))
16845 return false;
16846
16847 *diff = int_cst_value (tdiff);
16848 }
16849 else if (toffset1 || toffset2)
16850 {
16851 /* If only one of the offsets is non-constant, the difference cannot
16852 be a constant. */
16853 return false;
16854 }
16855 else
16856 *diff = 0;
16857
16858 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16859 return true;
16860 }
16861
16862 /* Simplify the floating point expression EXP when the sign of the
16863 result is not significant. Return NULL_TREE if no simplification
16864 is possible. */
16865
16866 tree
16867 fold_strip_sign_ops (tree exp)
16868 {
16869 tree arg0, arg1;
16870 location_t loc = EXPR_LOCATION (exp);
16871
16872 switch (TREE_CODE (exp))
16873 {
16874 case ABS_EXPR:
16875 case NEGATE_EXPR:
16876 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16877 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16878
16879 case MULT_EXPR:
16880 case RDIV_EXPR:
16881 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16882 return NULL_TREE;
16883 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16884 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16885 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16886 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16887 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16888 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16889 break;
16890
16891 case COMPOUND_EXPR:
16892 arg0 = TREE_OPERAND (exp, 0);
16893 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16894 if (arg1)
16895 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16896 break;
16897
16898 case COND_EXPR:
16899 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16900 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16901 if (arg0 || arg1)
16902 return fold_build3_loc (loc,
16903 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16904 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16905 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16906 break;
16907
16908 case CALL_EXPR:
16909 {
16910 const enum built_in_function fcode = builtin_mathfn_code (exp);
16911 switch (fcode)
16912 {
16913 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16914 /* Strip copysign function call, return the 1st argument. */
16915 arg0 = CALL_EXPR_ARG (exp, 0);
16916 arg1 = CALL_EXPR_ARG (exp, 1);
16917 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16918
16919 default:
16920 /* Strip sign ops from the argument of "odd" math functions. */
16921 if (negate_mathfn_p (fcode))
16922 {
16923 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16924 if (arg0)
16925 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16926 }
16927 break;
16928 }
16929 }
16930 break;
16931
16932 default:
16933 break;
16934 }
16935 return NULL_TREE;
16936 }