]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fold-const.c
dojump.h: New header file.
[thirdparty/gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "hash-set.h"
49 #include "machmode.h"
50 #include "vec.h"
51 #include "double-int.h"
52 #include "input.h"
53 #include "alias.h"
54 #include "symtab.h"
55 #include "wide-int.h"
56 #include "inchash.h"
57 #include "tree.h"
58 #include "fold-const.h"
59 #include "stor-layout.h"
60 #include "calls.h"
61 #include "tree-iterator.h"
62 #include "realmpfr.h"
63 #include "rtl.h"
64 #include "hashtab.h"
65 #include "hard-reg-set.h"
66 #include "function.h"
67 #include "statistics.h"
68 #include "real.h"
69 #include "fixed-value.h"
70 #include "insn-config.h"
71 #include "expmed.h"
72 #include "dojump.h"
73 #include "explow.h"
74 #include "emit-rtl.h"
75 #include "varasm.h"
76 #include "stmt.h"
77 #include "expr.h"
78 #include "tm_p.h"
79 #include "target.h"
80 #include "diagnostic-core.h"
81 #include "intl.h"
82 #include "langhooks.h"
83 #include "md5.h"
84 #include "predict.h"
85 #include "basic-block.h"
86 #include "tree-ssa-alias.h"
87 #include "internal-fn.h"
88 #include "tree-eh.h"
89 #include "gimple-expr.h"
90 #include "is-a.h"
91 #include "gimple.h"
92 #include "gimplify.h"
93 #include "tree-dfa.h"
94 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
95 #include "builtins.h"
96 #include "hash-map.h"
97 #include "plugin-api.h"
98 #include "ipa-ref.h"
99 #include "cgraph.h"
100 #include "generic-match.h"
101 #include "optabs.h"
102
103 /* Nonzero if we are folding constants inside an initializer; zero
104 otherwise. */
105 int folding_initializer = 0;
106
107 /* The following constants represent a bit based encoding of GCC's
108 comparison operators. This encoding simplifies transformations
109 on relational comparison operators, such as AND and OR. */
110 enum comparison_code {
111 COMPCODE_FALSE = 0,
112 COMPCODE_LT = 1,
113 COMPCODE_EQ = 2,
114 COMPCODE_LE = 3,
115 COMPCODE_GT = 4,
116 COMPCODE_LTGT = 5,
117 COMPCODE_GE = 6,
118 COMPCODE_ORD = 7,
119 COMPCODE_UNORD = 8,
120 COMPCODE_UNLT = 9,
121 COMPCODE_UNEQ = 10,
122 COMPCODE_UNLE = 11,
123 COMPCODE_UNGT = 12,
124 COMPCODE_NE = 13,
125 COMPCODE_UNGE = 14,
126 COMPCODE_TRUE = 15
127 };
128
129 static bool negate_mathfn_p (enum built_in_function);
130 static bool negate_expr_p (tree);
131 static tree negate_expr (tree);
132 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
133 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
134 static enum comparison_code comparison_to_compcode (enum tree_code);
135 static enum tree_code compcode_to_comparison (enum comparison_code);
136 static int operand_equal_for_comparison_p (tree, tree, tree);
137 static int twoval_comparison_p (tree, tree *, tree *, int *);
138 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
139 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
140 static tree make_bit_field_ref (location_t, tree, tree,
141 HOST_WIDE_INT, HOST_WIDE_INT, int);
142 static tree optimize_bit_field_compare (location_t, enum tree_code,
143 tree, tree, tree);
144 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
145 HOST_WIDE_INT *,
146 machine_mode *, int *, int *,
147 tree *, tree *);
148 static int simple_operand_p (const_tree);
149 static bool simple_operand_p_2 (tree);
150 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
151 static tree range_predecessor (tree);
152 static tree range_successor (tree);
153 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
155 static tree unextend (tree, int, int, tree);
156 static tree optimize_minmax_comparison (location_t, enum tree_code,
157 tree, tree, tree);
158 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
159 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
160 static tree fold_binary_op_with_conditional_arg (location_t,
161 enum tree_code, tree,
162 tree, tree,
163 tree, tree, int);
164 static tree fold_mathfn_compare (location_t,
165 enum built_in_function, enum tree_code,
166 tree, tree, tree);
167 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
168 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
169 static bool reorder_operands_p (const_tree, const_tree);
170 static tree fold_negate_const (tree, tree);
171 static tree fold_not_const (const_tree, tree);
172 static tree fold_relational_const (enum tree_code, tree, tree, tree);
173 static tree fold_convert_const (enum tree_code, tree, tree);
174 static tree fold_view_convert_expr (tree, tree);
175 static bool vec_cst_ctor_to_array (tree, tree *);
176
177
178 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
179 Otherwise, return LOC. */
180
181 static location_t
182 expr_location_or (tree t, location_t loc)
183 {
184 location_t tloc = EXPR_LOCATION (t);
185 return tloc == UNKNOWN_LOCATION ? loc : tloc;
186 }
187
188 /* Similar to protected_set_expr_location, but never modify x in place,
189 if location can and needs to be set, unshare it. */
190
191 static inline tree
192 protected_set_expr_location_unshare (tree x, location_t loc)
193 {
194 if (CAN_HAVE_LOCATION_P (x)
195 && EXPR_LOCATION (x) != loc
196 && !(TREE_CODE (x) == SAVE_EXPR
197 || TREE_CODE (x) == TARGET_EXPR
198 || TREE_CODE (x) == BIND_EXPR))
199 {
200 x = copy_node (x);
201 SET_EXPR_LOCATION (x, loc);
202 }
203 return x;
204 }
205 \f
206 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
207 division and returns the quotient. Otherwise returns
208 NULL_TREE. */
209
210 tree
211 div_if_zero_remainder (const_tree arg1, const_tree arg2)
212 {
213 widest_int quo;
214
215 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
216 SIGNED, &quo))
217 return wide_int_to_tree (TREE_TYPE (arg1), quo);
218
219 return NULL_TREE;
220 }
221 \f
222 /* This is nonzero if we should defer warnings about undefined
223 overflow. This facility exists because these warnings are a
224 special case. The code to estimate loop iterations does not want
225 to issue any warnings, since it works with expressions which do not
226 occur in user code. Various bits of cleanup code call fold(), but
227 only use the result if it has certain characteristics (e.g., is a
228 constant); that code only wants to issue a warning if the result is
229 used. */
230
231 static int fold_deferring_overflow_warnings;
232
233 /* If a warning about undefined overflow is deferred, this is the
234 warning. Note that this may cause us to turn two warnings into
235 one, but that is fine since it is sufficient to only give one
236 warning per expression. */
237
238 static const char* fold_deferred_overflow_warning;
239
240 /* If a warning about undefined overflow is deferred, this is the
241 level at which the warning should be emitted. */
242
243 static enum warn_strict_overflow_code fold_deferred_overflow_code;
244
245 /* Start deferring overflow warnings. We could use a stack here to
246 permit nested calls, but at present it is not necessary. */
247
248 void
249 fold_defer_overflow_warnings (void)
250 {
251 ++fold_deferring_overflow_warnings;
252 }
253
254 /* Stop deferring overflow warnings. If there is a pending warning,
255 and ISSUE is true, then issue the warning if appropriate. STMT is
256 the statement with which the warning should be associated (used for
257 location information); STMT may be NULL. CODE is the level of the
258 warning--a warn_strict_overflow_code value. This function will use
259 the smaller of CODE and the deferred code when deciding whether to
260 issue the warning. CODE may be zero to mean to always use the
261 deferred code. */
262
263 void
264 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
265 {
266 const char *warnmsg;
267 location_t locus;
268
269 gcc_assert (fold_deferring_overflow_warnings > 0);
270 --fold_deferring_overflow_warnings;
271 if (fold_deferring_overflow_warnings > 0)
272 {
273 if (fold_deferred_overflow_warning != NULL
274 && code != 0
275 && code < (int) fold_deferred_overflow_code)
276 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
277 return;
278 }
279
280 warnmsg = fold_deferred_overflow_warning;
281 fold_deferred_overflow_warning = NULL;
282
283 if (!issue || warnmsg == NULL)
284 return;
285
286 if (gimple_no_warning_p (stmt))
287 return;
288
289 /* Use the smallest code level when deciding to issue the
290 warning. */
291 if (code == 0 || code > (int) fold_deferred_overflow_code)
292 code = fold_deferred_overflow_code;
293
294 if (!issue_strict_overflow_warning (code))
295 return;
296
297 if (stmt == NULL)
298 locus = input_location;
299 else
300 locus = gimple_location (stmt);
301 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
302 }
303
304 /* Stop deferring overflow warnings, ignoring any deferred
305 warnings. */
306
307 void
308 fold_undefer_and_ignore_overflow_warnings (void)
309 {
310 fold_undefer_overflow_warnings (false, NULL, 0);
311 }
312
313 /* Whether we are deferring overflow warnings. */
314
315 bool
316 fold_deferring_overflow_warnings_p (void)
317 {
318 return fold_deferring_overflow_warnings > 0;
319 }
320
321 /* This is called when we fold something based on the fact that signed
322 overflow is undefined. */
323
324 static void
325 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
326 {
327 if (fold_deferring_overflow_warnings > 0)
328 {
329 if (fold_deferred_overflow_warning == NULL
330 || wc < fold_deferred_overflow_code)
331 {
332 fold_deferred_overflow_warning = gmsgid;
333 fold_deferred_overflow_code = wc;
334 }
335 }
336 else if (issue_strict_overflow_warning (wc))
337 warning (OPT_Wstrict_overflow, gmsgid);
338 }
339 \f
340 /* Return true if the built-in mathematical function specified by CODE
341 is odd, i.e. -f(x) == f(-x). */
342
343 static bool
344 negate_mathfn_p (enum built_in_function code)
345 {
346 switch (code)
347 {
348 CASE_FLT_FN (BUILT_IN_ASIN):
349 CASE_FLT_FN (BUILT_IN_ASINH):
350 CASE_FLT_FN (BUILT_IN_ATAN):
351 CASE_FLT_FN (BUILT_IN_ATANH):
352 CASE_FLT_FN (BUILT_IN_CASIN):
353 CASE_FLT_FN (BUILT_IN_CASINH):
354 CASE_FLT_FN (BUILT_IN_CATAN):
355 CASE_FLT_FN (BUILT_IN_CATANH):
356 CASE_FLT_FN (BUILT_IN_CBRT):
357 CASE_FLT_FN (BUILT_IN_CPROJ):
358 CASE_FLT_FN (BUILT_IN_CSIN):
359 CASE_FLT_FN (BUILT_IN_CSINH):
360 CASE_FLT_FN (BUILT_IN_CTAN):
361 CASE_FLT_FN (BUILT_IN_CTANH):
362 CASE_FLT_FN (BUILT_IN_ERF):
363 CASE_FLT_FN (BUILT_IN_LLROUND):
364 CASE_FLT_FN (BUILT_IN_LROUND):
365 CASE_FLT_FN (BUILT_IN_ROUND):
366 CASE_FLT_FN (BUILT_IN_SIN):
367 CASE_FLT_FN (BUILT_IN_SINH):
368 CASE_FLT_FN (BUILT_IN_TAN):
369 CASE_FLT_FN (BUILT_IN_TANH):
370 CASE_FLT_FN (BUILT_IN_TRUNC):
371 return true;
372
373 CASE_FLT_FN (BUILT_IN_LLRINT):
374 CASE_FLT_FN (BUILT_IN_LRINT):
375 CASE_FLT_FN (BUILT_IN_NEARBYINT):
376 CASE_FLT_FN (BUILT_IN_RINT):
377 return !flag_rounding_math;
378
379 default:
380 break;
381 }
382 return false;
383 }
384
385 /* Check whether we may negate an integer constant T without causing
386 overflow. */
387
388 bool
389 may_negate_without_overflow_p (const_tree t)
390 {
391 tree type;
392
393 gcc_assert (TREE_CODE (t) == INTEGER_CST);
394
395 type = TREE_TYPE (t);
396 if (TYPE_UNSIGNED (type))
397 return false;
398
399 return !wi::only_sign_bit_p (t);
400 }
401
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
404
405 static bool
406 negate_expr_p (tree t)
407 {
408 tree type;
409
410 if (t == 0)
411 return false;
412
413 type = TREE_TYPE (t);
414
415 STRIP_SIGN_NOPS (t);
416 switch (TREE_CODE (t))
417 {
418 case INTEGER_CST:
419 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
420 return true;
421
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
424 case BIT_NOT_EXPR:
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
427
428 case FIXED_CST:
429 return true;
430
431 case NEGATE_EXPR:
432 return !TYPE_OVERFLOW_SANITIZED (type);
433
434 case REAL_CST:
435 /* We want to canonicalize to positive real constants. Pretend
436 that only negative ones can be easily negated. */
437 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
438
439 case COMPLEX_CST:
440 return negate_expr_p (TREE_REALPART (t))
441 && negate_expr_p (TREE_IMAGPART (t));
442
443 case VECTOR_CST:
444 {
445 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
446 return true;
447
448 int count = TYPE_VECTOR_SUBPARTS (type), i;
449
450 for (i = 0; i < count; i++)
451 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
452 return false;
453
454 return true;
455 }
456
457 case COMPLEX_EXPR:
458 return negate_expr_p (TREE_OPERAND (t, 0))
459 && negate_expr_p (TREE_OPERAND (t, 1));
460
461 case CONJ_EXPR:
462 return negate_expr_p (TREE_OPERAND (t, 0));
463
464 case PLUS_EXPR:
465 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
466 || HONOR_SIGNED_ZEROS (element_mode (type)))
467 return false;
468 /* -(A + B) -> (-B) - A. */
469 if (negate_expr_p (TREE_OPERAND (t, 1))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1)))
472 return true;
473 /* -(A + B) -> (-A) - B. */
474 return negate_expr_p (TREE_OPERAND (t, 0));
475
476 case MINUS_EXPR:
477 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
478 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
479 && !HONOR_SIGNED_ZEROS (element_mode (type))
480 && reorder_operands_p (TREE_OPERAND (t, 0),
481 TREE_OPERAND (t, 1));
482
483 case MULT_EXPR:
484 if (TYPE_UNSIGNED (TREE_TYPE (t)))
485 break;
486
487 /* Fall through. */
488
489 case RDIV_EXPR:
490 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
491 return negate_expr_p (TREE_OPERAND (t, 1))
492 || negate_expr_p (TREE_OPERAND (t, 0));
493 break;
494
495 case TRUNC_DIV_EXPR:
496 case ROUND_DIV_EXPR:
497 case EXACT_DIV_EXPR:
498 /* In general we can't negate A / B, because if A is INT_MIN and
499 B is 1, we may turn this into INT_MIN / -1 which is undefined
500 and actually traps on some architectures. But if overflow is
501 undefined, we can negate, because - (INT_MIN / 1) is an
502 overflow. */
503 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
504 {
505 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
506 break;
507 /* If overflow is undefined then we have to be careful because
508 we ask whether it's ok to associate the negate with the
509 division which is not ok for example for
510 -((a - b) / c) where (-(a - b)) / c may invoke undefined
511 overflow because of negating INT_MIN. So do not use
512 negate_expr_p here but open-code the two important cases. */
513 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
514 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
515 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
516 return true;
517 }
518 else if (negate_expr_p (TREE_OPERAND (t, 0)))
519 return true;
520 return negate_expr_p (TREE_OPERAND (t, 1));
521
522 case NOP_EXPR:
523 /* Negate -((double)float) as (double)(-float). */
524 if (TREE_CODE (type) == REAL_TYPE)
525 {
526 tree tem = strip_float_extensions (t);
527 if (tem != t)
528 return negate_expr_p (tem);
529 }
530 break;
531
532 case CALL_EXPR:
533 /* Negate -f(x) as f(-x). */
534 if (negate_mathfn_p (builtin_mathfn_code (t)))
535 return negate_expr_p (CALL_EXPR_ARG (t, 0));
536 break;
537
538 case RSHIFT_EXPR:
539 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
540 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
541 {
542 tree op1 = TREE_OPERAND (t, 1);
543 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
544 return true;
545 }
546 break;
547
548 default:
549 break;
550 }
551 return false;
552 }
553
554 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
555 simplification is possible.
556 If negate_expr_p would return true for T, NULL_TREE will never be
557 returned. */
558
559 static tree
560 fold_negate_expr (location_t loc, tree t)
561 {
562 tree type = TREE_TYPE (t);
563 tree tem;
564
565 switch (TREE_CODE (t))
566 {
567 /* Convert - (~A) to A + 1. */
568 case BIT_NOT_EXPR:
569 if (INTEGRAL_TYPE_P (type))
570 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
571 build_one_cst (type));
572 break;
573
574 case INTEGER_CST:
575 tem = fold_negate_const (t, type);
576 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
577 || (ANY_INTEGRAL_TYPE_P (type)
578 && !TYPE_OVERFLOW_TRAPS (type)
579 && TYPE_OVERFLOW_WRAPS (type))
580 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
581 return tem;
582 break;
583
584 case REAL_CST:
585 tem = fold_negate_const (t, type);
586 return tem;
587
588 case FIXED_CST:
589 tem = fold_negate_const (t, type);
590 return tem;
591
592 case COMPLEX_CST:
593 {
594 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
595 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
596 if (rpart && ipart)
597 return build_complex (type, rpart, ipart);
598 }
599 break;
600
601 case VECTOR_CST:
602 {
603 int count = TYPE_VECTOR_SUBPARTS (type), i;
604 tree *elts = XALLOCAVEC (tree, count);
605
606 for (i = 0; i < count; i++)
607 {
608 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
609 if (elts[i] == NULL_TREE)
610 return NULL_TREE;
611 }
612
613 return build_vector (type, elts);
614 }
615
616 case COMPLEX_EXPR:
617 if (negate_expr_p (t))
618 return fold_build2_loc (loc, COMPLEX_EXPR, type,
619 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
620 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
621 break;
622
623 case CONJ_EXPR:
624 if (negate_expr_p (t))
625 return fold_build1_loc (loc, CONJ_EXPR, type,
626 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
627 break;
628
629 case NEGATE_EXPR:
630 if (!TYPE_OVERFLOW_SANITIZED (type))
631 return TREE_OPERAND (t, 0);
632 break;
633
634 case PLUS_EXPR:
635 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
636 && !HONOR_SIGNED_ZEROS (element_mode (type)))
637 {
638 /* -(A + B) -> (-B) - A. */
639 if (negate_expr_p (TREE_OPERAND (t, 1))
640 && reorder_operands_p (TREE_OPERAND (t, 0),
641 TREE_OPERAND (t, 1)))
642 {
643 tem = negate_expr (TREE_OPERAND (t, 1));
644 return fold_build2_loc (loc, MINUS_EXPR, type,
645 tem, TREE_OPERAND (t, 0));
646 }
647
648 /* -(A + B) -> (-A) - B. */
649 if (negate_expr_p (TREE_OPERAND (t, 0)))
650 {
651 tem = negate_expr (TREE_OPERAND (t, 0));
652 return fold_build2_loc (loc, MINUS_EXPR, type,
653 tem, TREE_OPERAND (t, 1));
654 }
655 }
656 break;
657
658 case MINUS_EXPR:
659 /* - (A - B) -> B - A */
660 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
661 && !HONOR_SIGNED_ZEROS (element_mode (type))
662 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
663 return fold_build2_loc (loc, MINUS_EXPR, type,
664 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
665 break;
666
667 case MULT_EXPR:
668 if (TYPE_UNSIGNED (type))
669 break;
670
671 /* Fall through. */
672
673 case RDIV_EXPR:
674 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
675 {
676 tem = TREE_OPERAND (t, 1);
677 if (negate_expr_p (tem))
678 return fold_build2_loc (loc, TREE_CODE (t), type,
679 TREE_OPERAND (t, 0), negate_expr (tem));
680 tem = TREE_OPERAND (t, 0);
681 if (negate_expr_p (tem))
682 return fold_build2_loc (loc, TREE_CODE (t), type,
683 negate_expr (tem), TREE_OPERAND (t, 1));
684 }
685 break;
686
687 case TRUNC_DIV_EXPR:
688 case ROUND_DIV_EXPR:
689 case EXACT_DIV_EXPR:
690 /* In general we can't negate A / B, because if A is INT_MIN and
691 B is 1, we may turn this into INT_MIN / -1 which is undefined
692 and actually traps on some architectures. But if overflow is
693 undefined, we can negate, because - (INT_MIN / 1) is an
694 overflow. */
695 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
696 {
697 const char * const warnmsg = G_("assuming signed overflow does not "
698 "occur when negating a division");
699 tem = TREE_OPERAND (t, 1);
700 if (negate_expr_p (tem))
701 {
702 if (INTEGRAL_TYPE_P (type)
703 && (TREE_CODE (tem) != INTEGER_CST
704 || integer_onep (tem)))
705 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
706 return fold_build2_loc (loc, TREE_CODE (t), type,
707 TREE_OPERAND (t, 0), negate_expr (tem));
708 }
709 /* If overflow is undefined then we have to be careful because
710 we ask whether it's ok to associate the negate with the
711 division which is not ok for example for
712 -((a - b) / c) where (-(a - b)) / c may invoke undefined
713 overflow because of negating INT_MIN. So do not use
714 negate_expr_p here but open-code the two important cases. */
715 tem = TREE_OPERAND (t, 0);
716 if ((INTEGRAL_TYPE_P (type)
717 && (TREE_CODE (tem) == NEGATE_EXPR
718 || (TREE_CODE (tem) == INTEGER_CST
719 && may_negate_without_overflow_p (tem))))
720 || !INTEGRAL_TYPE_P (type))
721 return fold_build2_loc (loc, TREE_CODE (t), type,
722 negate_expr (tem), TREE_OPERAND (t, 1));
723 }
724 break;
725
726 case NOP_EXPR:
727 /* Convert -((double)float) into (double)(-float). */
728 if (TREE_CODE (type) == REAL_TYPE)
729 {
730 tem = strip_float_extensions (t);
731 if (tem != t && negate_expr_p (tem))
732 return fold_convert_loc (loc, type, negate_expr (tem));
733 }
734 break;
735
736 case CALL_EXPR:
737 /* Negate -f(x) as f(-x). */
738 if (negate_mathfn_p (builtin_mathfn_code (t))
739 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
740 {
741 tree fndecl, arg;
742
743 fndecl = get_callee_fndecl (t);
744 arg = negate_expr (CALL_EXPR_ARG (t, 0));
745 return build_call_expr_loc (loc, fndecl, 1, arg);
746 }
747 break;
748
749 case RSHIFT_EXPR:
750 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
751 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
752 {
753 tree op1 = TREE_OPERAND (t, 1);
754 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
755 {
756 tree ntype = TYPE_UNSIGNED (type)
757 ? signed_type_for (type)
758 : unsigned_type_for (type);
759 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
760 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
761 return fold_convert_loc (loc, type, temp);
762 }
763 }
764 break;
765
766 default:
767 break;
768 }
769
770 return NULL_TREE;
771 }
772
773 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
774 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
775 return NULL_TREE. */
776
777 static tree
778 negate_expr (tree t)
779 {
780 tree type, tem;
781 location_t loc;
782
783 if (t == NULL_TREE)
784 return NULL_TREE;
785
786 loc = EXPR_LOCATION (t);
787 type = TREE_TYPE (t);
788 STRIP_SIGN_NOPS (t);
789
790 tem = fold_negate_expr (loc, t);
791 if (!tem)
792 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
793 return fold_convert_loc (loc, type, tem);
794 }
795 \f
796 /* Split a tree IN into a constant, literal and variable parts that could be
797 combined with CODE to make IN. "constant" means an expression with
798 TREE_CONSTANT but that isn't an actual constant. CODE must be a
799 commutative arithmetic operation. Store the constant part into *CONP,
800 the literal in *LITP and return the variable part. If a part isn't
801 present, set it to null. If the tree does not decompose in this way,
802 return the entire tree as the variable part and the other parts as null.
803
804 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
805 case, we negate an operand that was subtracted. Except if it is a
806 literal for which we use *MINUS_LITP instead.
807
808 If NEGATE_P is true, we are negating all of IN, again except a literal
809 for which we use *MINUS_LITP instead.
810
811 If IN is itself a literal or constant, return it as appropriate.
812
813 Note that we do not guarantee that any of the three values will be the
814 same type as IN, but they will have the same signedness and mode. */
815
816 static tree
817 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
818 tree *minus_litp, int negate_p)
819 {
820 tree var = 0;
821
822 *conp = 0;
823 *litp = 0;
824 *minus_litp = 0;
825
826 /* Strip any conversions that don't change the machine mode or signedness. */
827 STRIP_SIGN_NOPS (in);
828
829 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
830 || TREE_CODE (in) == FIXED_CST)
831 *litp = in;
832 else if (TREE_CODE (in) == code
833 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
834 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
835 /* We can associate addition and subtraction together (even
836 though the C standard doesn't say so) for integers because
837 the value is not affected. For reals, the value might be
838 affected, so we can't. */
839 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
840 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
841 {
842 tree op0 = TREE_OPERAND (in, 0);
843 tree op1 = TREE_OPERAND (in, 1);
844 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
845 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
846
847 /* First see if either of the operands is a literal, then a constant. */
848 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
849 || TREE_CODE (op0) == FIXED_CST)
850 *litp = op0, op0 = 0;
851 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
852 || TREE_CODE (op1) == FIXED_CST)
853 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
854
855 if (op0 != 0 && TREE_CONSTANT (op0))
856 *conp = op0, op0 = 0;
857 else if (op1 != 0 && TREE_CONSTANT (op1))
858 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
859
860 /* If we haven't dealt with either operand, this is not a case we can
861 decompose. Otherwise, VAR is either of the ones remaining, if any. */
862 if (op0 != 0 && op1 != 0)
863 var = in;
864 else if (op0 != 0)
865 var = op0;
866 else
867 var = op1, neg_var_p = neg1_p;
868
869 /* Now do any needed negations. */
870 if (neg_litp_p)
871 *minus_litp = *litp, *litp = 0;
872 if (neg_conp_p)
873 *conp = negate_expr (*conp);
874 if (neg_var_p)
875 var = negate_expr (var);
876 }
877 else if (TREE_CODE (in) == BIT_NOT_EXPR
878 && code == PLUS_EXPR)
879 {
880 /* -X - 1 is folded to ~X, undo that here. */
881 *minus_litp = build_one_cst (TREE_TYPE (in));
882 var = negate_expr (TREE_OPERAND (in, 0));
883 }
884 else if (TREE_CONSTANT (in))
885 *conp = in;
886 else
887 var = in;
888
889 if (negate_p)
890 {
891 if (*litp)
892 *minus_litp = *litp, *litp = 0;
893 else if (*minus_litp)
894 *litp = *minus_litp, *minus_litp = 0;
895 *conp = negate_expr (*conp);
896 var = negate_expr (var);
897 }
898
899 return var;
900 }
901
902 /* Re-associate trees split by the above function. T1 and T2 are
903 either expressions to associate or null. Return the new
904 expression, if any. LOC is the location of the new expression. If
905 we build an operation, do it in TYPE and with CODE. */
906
907 static tree
908 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
909 {
910 if (t1 == 0)
911 return t2;
912 else if (t2 == 0)
913 return t1;
914
915 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
916 try to fold this since we will have infinite recursion. But do
917 deal with any NEGATE_EXPRs. */
918 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
919 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
920 {
921 if (code == PLUS_EXPR)
922 {
923 if (TREE_CODE (t1) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t2),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t1, 0)));
928 else if (TREE_CODE (t2) == NEGATE_EXPR)
929 return build2_loc (loc, MINUS_EXPR, type,
930 fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type,
932 TREE_OPERAND (t2, 0)));
933 else if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
935 }
936 else if (code == MINUS_EXPR)
937 {
938 if (integer_zerop (t2))
939 return fold_convert_loc (loc, type, t1);
940 }
941
942 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
944 }
945
946 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
947 fold_convert_loc (loc, type, t2));
948 }
949 \f
950 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
951 for use in int_const_binop, size_binop and size_diffop. */
952
953 static bool
954 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
955 {
956 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
957 return false;
958 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
959 return false;
960
961 switch (code)
962 {
963 case LSHIFT_EXPR:
964 case RSHIFT_EXPR:
965 case LROTATE_EXPR:
966 case RROTATE_EXPR:
967 return true;
968
969 default:
970 break;
971 }
972
973 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
974 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
975 && TYPE_MODE (type1) == TYPE_MODE (type2);
976 }
977
978
979 /* Combine two integer constants ARG1 and ARG2 under operation CODE
980 to produce a new constant. Return NULL_TREE if we don't know how
981 to evaluate CODE at compile-time. */
982
983 static tree
984 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
985 int overflowable)
986 {
987 wide_int res;
988 tree t;
989 tree type = TREE_TYPE (arg1);
990 signop sign = TYPE_SIGN (type);
991 bool overflow = false;
992
993 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
994 TYPE_SIGN (TREE_TYPE (parg2)));
995
996 switch (code)
997 {
998 case BIT_IOR_EXPR:
999 res = wi::bit_or (arg1, arg2);
1000 break;
1001
1002 case BIT_XOR_EXPR:
1003 res = wi::bit_xor (arg1, arg2);
1004 break;
1005
1006 case BIT_AND_EXPR:
1007 res = wi::bit_and (arg1, arg2);
1008 break;
1009
1010 case RSHIFT_EXPR:
1011 case LSHIFT_EXPR:
1012 if (wi::neg_p (arg2))
1013 {
1014 arg2 = -arg2;
1015 if (code == RSHIFT_EXPR)
1016 code = LSHIFT_EXPR;
1017 else
1018 code = RSHIFT_EXPR;
1019 }
1020
1021 if (code == RSHIFT_EXPR)
1022 /* It's unclear from the C standard whether shifts can overflow.
1023 The following code ignores overflow; perhaps a C standard
1024 interpretation ruling is needed. */
1025 res = wi::rshift (arg1, arg2, sign);
1026 else
1027 res = wi::lshift (arg1, arg2);
1028 break;
1029
1030 case RROTATE_EXPR:
1031 case LROTATE_EXPR:
1032 if (wi::neg_p (arg2))
1033 {
1034 arg2 = -arg2;
1035 if (code == RROTATE_EXPR)
1036 code = LROTATE_EXPR;
1037 else
1038 code = RROTATE_EXPR;
1039 }
1040
1041 if (code == RROTATE_EXPR)
1042 res = wi::rrotate (arg1, arg2);
1043 else
1044 res = wi::lrotate (arg1, arg2);
1045 break;
1046
1047 case PLUS_EXPR:
1048 res = wi::add (arg1, arg2, sign, &overflow);
1049 break;
1050
1051 case MINUS_EXPR:
1052 res = wi::sub (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case MULT_EXPR:
1056 res = wi::mul (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case MULT_HIGHPART_EXPR:
1060 res = wi::mul_high (arg1, arg2, sign);
1061 break;
1062
1063 case TRUNC_DIV_EXPR:
1064 case EXACT_DIV_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1068 break;
1069
1070 case FLOOR_DIV_EXPR:
1071 if (arg2 == 0)
1072 return NULL_TREE;
1073 res = wi::div_floor (arg1, arg2, sign, &overflow);
1074 break;
1075
1076 case CEIL_DIV_EXPR:
1077 if (arg2 == 0)
1078 return NULL_TREE;
1079 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1080 break;
1081
1082 case ROUND_DIV_EXPR:
1083 if (arg2 == 0)
1084 return NULL_TREE;
1085 res = wi::div_round (arg1, arg2, sign, &overflow);
1086 break;
1087
1088 case TRUNC_MOD_EXPR:
1089 if (arg2 == 0)
1090 return NULL_TREE;
1091 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1092 break;
1093
1094 case FLOOR_MOD_EXPR:
1095 if (arg2 == 0)
1096 return NULL_TREE;
1097 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1098 break;
1099
1100 case CEIL_MOD_EXPR:
1101 if (arg2 == 0)
1102 return NULL_TREE;
1103 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1104 break;
1105
1106 case ROUND_MOD_EXPR:
1107 if (arg2 == 0)
1108 return NULL_TREE;
1109 res = wi::mod_round (arg1, arg2, sign, &overflow);
1110 break;
1111
1112 case MIN_EXPR:
1113 res = wi::min (arg1, arg2, sign);
1114 break;
1115
1116 case MAX_EXPR:
1117 res = wi::max (arg1, arg2, sign);
1118 break;
1119
1120 default:
1121 return NULL_TREE;
1122 }
1123
1124 t = force_fit_type (type, res, overflowable,
1125 (((sign == SIGNED || overflowable == -1)
1126 && overflow)
1127 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1128
1129 return t;
1130 }
1131
1132 tree
1133 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1134 {
1135 return int_const_binop_1 (code, arg1, arg2, 1);
1136 }
1137
1138 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1139 constant. We assume ARG1 and ARG2 have the same data type, or at least
1140 are the same kind of constant and the same machine mode. Return zero if
1141 combining the constants is not allowed in the current operating mode. */
1142
1143 static tree
1144 const_binop (enum tree_code code, tree arg1, tree arg2)
1145 {
1146 /* Sanity check for the recursive cases. */
1147 if (!arg1 || !arg2)
1148 return NULL_TREE;
1149
1150 STRIP_NOPS (arg1);
1151 STRIP_NOPS (arg2);
1152
1153 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1154 {
1155 if (code == POINTER_PLUS_EXPR)
1156 return int_const_binop (PLUS_EXPR,
1157 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1158
1159 return int_const_binop (code, arg1, arg2);
1160 }
1161
1162 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1163 {
1164 machine_mode mode;
1165 REAL_VALUE_TYPE d1;
1166 REAL_VALUE_TYPE d2;
1167 REAL_VALUE_TYPE value;
1168 REAL_VALUE_TYPE result;
1169 bool inexact;
1170 tree t, type;
1171
1172 /* The following codes are handled by real_arithmetic. */
1173 switch (code)
1174 {
1175 case PLUS_EXPR:
1176 case MINUS_EXPR:
1177 case MULT_EXPR:
1178 case RDIV_EXPR:
1179 case MIN_EXPR:
1180 case MAX_EXPR:
1181 break;
1182
1183 default:
1184 return NULL_TREE;
1185 }
1186
1187 d1 = TREE_REAL_CST (arg1);
1188 d2 = TREE_REAL_CST (arg2);
1189
1190 type = TREE_TYPE (arg1);
1191 mode = TYPE_MODE (type);
1192
1193 /* Don't perform operation if we honor signaling NaNs and
1194 either operand is a NaN. */
1195 if (HONOR_SNANS (mode)
1196 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1197 return NULL_TREE;
1198
1199 /* Don't perform operation if it would raise a division
1200 by zero exception. */
1201 if (code == RDIV_EXPR
1202 && REAL_VALUES_EQUAL (d2, dconst0)
1203 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1204 return NULL_TREE;
1205
1206 /* If either operand is a NaN, just return it. Otherwise, set up
1207 for floating-point trap; we return an overflow. */
1208 if (REAL_VALUE_ISNAN (d1))
1209 return arg1;
1210 else if (REAL_VALUE_ISNAN (d2))
1211 return arg2;
1212
1213 inexact = real_arithmetic (&value, code, &d1, &d2);
1214 real_convert (&result, mode, &value);
1215
1216 /* Don't constant fold this floating point operation if
1217 the result has overflowed and flag_trapping_math. */
1218 if (flag_trapping_math
1219 && MODE_HAS_INFINITIES (mode)
1220 && REAL_VALUE_ISINF (result)
1221 && !REAL_VALUE_ISINF (d1)
1222 && !REAL_VALUE_ISINF (d2))
1223 return NULL_TREE;
1224
1225 /* Don't constant fold this floating point operation if the
1226 result may dependent upon the run-time rounding mode and
1227 flag_rounding_math is set, or if GCC's software emulation
1228 is unable to accurately represent the result. */
1229 if ((flag_rounding_math
1230 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1231 && (inexact || !real_identical (&result, &value)))
1232 return NULL_TREE;
1233
1234 t = build_real (type, result);
1235
1236 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1237 return t;
1238 }
1239
1240 if (TREE_CODE (arg1) == FIXED_CST)
1241 {
1242 FIXED_VALUE_TYPE f1;
1243 FIXED_VALUE_TYPE f2;
1244 FIXED_VALUE_TYPE result;
1245 tree t, type;
1246 int sat_p;
1247 bool overflow_p;
1248
1249 /* The following codes are handled by fixed_arithmetic. */
1250 switch (code)
1251 {
1252 case PLUS_EXPR:
1253 case MINUS_EXPR:
1254 case MULT_EXPR:
1255 case TRUNC_DIV_EXPR:
1256 if (TREE_CODE (arg2) != FIXED_CST)
1257 return NULL_TREE;
1258 f2 = TREE_FIXED_CST (arg2);
1259 break;
1260
1261 case LSHIFT_EXPR:
1262 case RSHIFT_EXPR:
1263 {
1264 if (TREE_CODE (arg2) != INTEGER_CST)
1265 return NULL_TREE;
1266 wide_int w2 = arg2;
1267 f2.data.high = w2.elt (1);
1268 f2.data.low = w2.elt (0);
1269 f2.mode = SImode;
1270 }
1271 break;
1272
1273 default:
1274 return NULL_TREE;
1275 }
1276
1277 f1 = TREE_FIXED_CST (arg1);
1278 type = TREE_TYPE (arg1);
1279 sat_p = TYPE_SATURATING (type);
1280 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1281 t = build_fixed (type, result);
1282 /* Propagate overflow flags. */
1283 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1284 TREE_OVERFLOW (t) = 1;
1285 return t;
1286 }
1287
1288 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1289 {
1290 tree type = TREE_TYPE (arg1);
1291 tree r1 = TREE_REALPART (arg1);
1292 tree i1 = TREE_IMAGPART (arg1);
1293 tree r2 = TREE_REALPART (arg2);
1294 tree i2 = TREE_IMAGPART (arg2);
1295 tree real, imag;
1296
1297 switch (code)
1298 {
1299 case PLUS_EXPR:
1300 case MINUS_EXPR:
1301 real = const_binop (code, r1, r2);
1302 imag = const_binop (code, i1, i2);
1303 break;
1304
1305 case MULT_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_mul);
1310
1311 real = const_binop (MINUS_EXPR,
1312 const_binop (MULT_EXPR, r1, r2),
1313 const_binop (MULT_EXPR, i1, i2));
1314 imag = const_binop (PLUS_EXPR,
1315 const_binop (MULT_EXPR, r1, i2),
1316 const_binop (MULT_EXPR, i1, r2));
1317 break;
1318
1319 case RDIV_EXPR:
1320 if (COMPLEX_FLOAT_TYPE_P (type))
1321 return do_mpc_arg2 (arg1, arg2, type,
1322 /* do_nonfinite= */ folding_initializer,
1323 mpc_div);
1324 /* Fallthru ... */
1325 case TRUNC_DIV_EXPR:
1326 case CEIL_DIV_EXPR:
1327 case FLOOR_DIV_EXPR:
1328 case ROUND_DIV_EXPR:
1329 if (flag_complex_method == 0)
1330 {
1331 /* Keep this algorithm in sync with
1332 tree-complex.c:expand_complex_div_straight().
1333
1334 Expand complex division to scalars, straightforward algorithm.
1335 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1336 t = br*br + bi*bi
1337 */
1338 tree magsquared
1339 = const_binop (PLUS_EXPR,
1340 const_binop (MULT_EXPR, r2, r2),
1341 const_binop (MULT_EXPR, i2, i2));
1342 tree t1
1343 = const_binop (PLUS_EXPR,
1344 const_binop (MULT_EXPR, r1, r2),
1345 const_binop (MULT_EXPR, i1, i2));
1346 tree t2
1347 = const_binop (MINUS_EXPR,
1348 const_binop (MULT_EXPR, i1, r2),
1349 const_binop (MULT_EXPR, r1, i2));
1350
1351 real = const_binop (code, t1, magsquared);
1352 imag = const_binop (code, t2, magsquared);
1353 }
1354 else
1355 {
1356 /* Keep this algorithm in sync with
1357 tree-complex.c:expand_complex_div_wide().
1358
1359 Expand complex division to scalars, modified algorithm to minimize
1360 overflow with wide input ranges. */
1361 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1362 fold_abs_const (r2, TREE_TYPE (type)),
1363 fold_abs_const (i2, TREE_TYPE (type)));
1364
1365 if (integer_nonzerop (compare))
1366 {
1367 /* In the TRUE branch, we compute
1368 ratio = br/bi;
1369 div = (br * ratio) + bi;
1370 tr = (ar * ratio) + ai;
1371 ti = (ai * ratio) - ar;
1372 tr = tr / div;
1373 ti = ti / div; */
1374 tree ratio = const_binop (code, r2, i2);
1375 tree div = const_binop (PLUS_EXPR, i2,
1376 const_binop (MULT_EXPR, r2, ratio));
1377 real = const_binop (MULT_EXPR, r1, ratio);
1378 real = const_binop (PLUS_EXPR, real, i1);
1379 real = const_binop (code, real, div);
1380
1381 imag = const_binop (MULT_EXPR, i1, ratio);
1382 imag = const_binop (MINUS_EXPR, imag, r1);
1383 imag = const_binop (code, imag, div);
1384 }
1385 else
1386 {
1387 /* In the FALSE branch, we compute
1388 ratio = d/c;
1389 divisor = (d * ratio) + c;
1390 tr = (b * ratio) + a;
1391 ti = b - (a * ratio);
1392 tr = tr / div;
1393 ti = ti / div; */
1394 tree ratio = const_binop (code, i2, r2);
1395 tree div = const_binop (PLUS_EXPR, r2,
1396 const_binop (MULT_EXPR, i2, ratio));
1397
1398 real = const_binop (MULT_EXPR, i1, ratio);
1399 real = const_binop (PLUS_EXPR, real, r1);
1400 real = const_binop (code, real, div);
1401
1402 imag = const_binop (MULT_EXPR, r1, ratio);
1403 imag = const_binop (MINUS_EXPR, i1, imag);
1404 imag = const_binop (code, imag, div);
1405 }
1406 }
1407 break;
1408
1409 default:
1410 return NULL_TREE;
1411 }
1412
1413 if (real && imag)
1414 return build_complex (type, real, imag);
1415 }
1416
1417 if (TREE_CODE (arg1) == VECTOR_CST
1418 && TREE_CODE (arg2) == VECTOR_CST)
1419 {
1420 tree type = TREE_TYPE (arg1);
1421 int count = TYPE_VECTOR_SUBPARTS (type), i;
1422 tree *elts = XALLOCAVEC (tree, count);
1423
1424 for (i = 0; i < count; i++)
1425 {
1426 tree elem1 = VECTOR_CST_ELT (arg1, i);
1427 tree elem2 = VECTOR_CST_ELT (arg2, i);
1428
1429 elts[i] = const_binop (code, elem1, elem2);
1430
1431 /* It is possible that const_binop cannot handle the given
1432 code and return NULL_TREE */
1433 if (elts[i] == NULL_TREE)
1434 return NULL_TREE;
1435 }
1436
1437 return build_vector (type, elts);
1438 }
1439
1440 /* Shifts allow a scalar offset for a vector. */
1441 if (TREE_CODE (arg1) == VECTOR_CST
1442 && TREE_CODE (arg2) == INTEGER_CST)
1443 {
1444 tree type = TREE_TYPE (arg1);
1445 int count = TYPE_VECTOR_SUBPARTS (type), i;
1446 tree *elts = XALLOCAVEC (tree, count);
1447
1448 for (i = 0; i < count; i++)
1449 {
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1451
1452 elts[i] = const_binop (code, elem1, arg2);
1453
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE. */
1456 if (elts[i] == NULL_TREE)
1457 return NULL_TREE;
1458 }
1459
1460 return build_vector (type, elts);
1461 }
1462 return NULL_TREE;
1463 }
1464
1465 /* Overload that adds a TYPE parameter to be able to dispatch
1466 to fold_relational_const. */
1467
1468 tree
1469 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1470 {
1471 if (TREE_CODE_CLASS (code) == tcc_comparison)
1472 return fold_relational_const (code, type, arg1, arg2);
1473
1474 /* ??? Until we make the const_binop worker take the type of the
1475 result as argument put those cases that need it here. */
1476 switch (code)
1477 {
1478 case COMPLEX_EXPR:
1479 if ((TREE_CODE (arg1) == REAL_CST
1480 && TREE_CODE (arg2) == REAL_CST)
1481 || (TREE_CODE (arg1) == INTEGER_CST
1482 && TREE_CODE (arg2) == INTEGER_CST))
1483 return build_complex (type, arg1, arg2);
1484 return NULL_TREE;
1485
1486 case VEC_PACK_TRUNC_EXPR:
1487 case VEC_PACK_FIX_TRUNC_EXPR:
1488 {
1489 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1490 tree *elts;
1491
1492 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1493 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1494 if (TREE_CODE (arg1) != VECTOR_CST
1495 || TREE_CODE (arg2) != VECTOR_CST)
1496 return NULL_TREE;
1497
1498 elts = XALLOCAVEC (tree, nelts);
1499 if (!vec_cst_ctor_to_array (arg1, elts)
1500 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1501 return NULL_TREE;
1502
1503 for (i = 0; i < nelts; i++)
1504 {
1505 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1506 ? NOP_EXPR : FIX_TRUNC_EXPR,
1507 TREE_TYPE (type), elts[i]);
1508 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1509 return NULL_TREE;
1510 }
1511
1512 return build_vector (type, elts);
1513 }
1514
1515 case VEC_WIDEN_MULT_LO_EXPR:
1516 case VEC_WIDEN_MULT_HI_EXPR:
1517 case VEC_WIDEN_MULT_EVEN_EXPR:
1518 case VEC_WIDEN_MULT_ODD_EXPR:
1519 {
1520 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1521 unsigned int out, ofs, scale;
1522 tree *elts;
1523
1524 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1525 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1526 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1527 return NULL_TREE;
1528
1529 elts = XALLOCAVEC (tree, nelts * 4);
1530 if (!vec_cst_ctor_to_array (arg1, elts)
1531 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1532 return NULL_TREE;
1533
1534 if (code == VEC_WIDEN_MULT_LO_EXPR)
1535 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1536 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1537 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1538 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1539 scale = 1, ofs = 0;
1540 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1541 scale = 1, ofs = 1;
1542
1543 for (out = 0; out < nelts; out++)
1544 {
1545 unsigned int in1 = (out << scale) + ofs;
1546 unsigned int in2 = in1 + nelts * 2;
1547 tree t1, t2;
1548
1549 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1550 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1551
1552 if (t1 == NULL_TREE || t2 == NULL_TREE)
1553 return NULL_TREE;
1554 elts[out] = const_binop (MULT_EXPR, t1, t2);
1555 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1556 return NULL_TREE;
1557 }
1558
1559 return build_vector (type, elts);
1560 }
1561
1562 default:;
1563 }
1564
1565 /* Make sure type and arg0 have the same saturating flag. */
1566 gcc_checking_assert (TYPE_SATURATING (type)
1567 == TYPE_SATURATING (TREE_TYPE (arg1)));
1568 return const_binop (code, arg1, arg2);
1569 }
1570
1571 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1572 Return zero if computing the constants is not possible. */
1573
1574 tree
1575 const_unop (enum tree_code code, tree type, tree arg0)
1576 {
1577 switch (code)
1578 {
1579 CASE_CONVERT:
1580 case FLOAT_EXPR:
1581 case FIX_TRUNC_EXPR:
1582 case FIXED_CONVERT_EXPR:
1583 return fold_convert_const (code, type, arg0);
1584
1585 case ADDR_SPACE_CONVERT_EXPR:
1586 if (integer_zerop (arg0))
1587 return fold_convert_const (code, type, arg0);
1588 break;
1589
1590 case VIEW_CONVERT_EXPR:
1591 return fold_view_convert_expr (type, arg0);
1592
1593 case NEGATE_EXPR:
1594 {
1595 /* Can't call fold_negate_const directly here as that doesn't
1596 handle all cases and we might not be able to negate some
1597 constants. */
1598 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1599 if (tem && CONSTANT_CLASS_P (tem))
1600 return tem;
1601 break;
1602 }
1603
1604 case ABS_EXPR:
1605 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1606 return fold_abs_const (arg0, type);
1607 break;
1608
1609 case CONJ_EXPR:
1610 if (TREE_CODE (arg0) == COMPLEX_CST)
1611 {
1612 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1613 TREE_TYPE (type));
1614 return build_complex (type, TREE_REALPART (arg0), ipart);
1615 }
1616 break;
1617
1618 case BIT_NOT_EXPR:
1619 if (TREE_CODE (arg0) == INTEGER_CST)
1620 return fold_not_const (arg0, type);
1621 /* Perform BIT_NOT_EXPR on each element individually. */
1622 else if (TREE_CODE (arg0) == VECTOR_CST)
1623 {
1624 tree *elements;
1625 tree elem;
1626 unsigned count = VECTOR_CST_NELTS (arg0), i;
1627
1628 elements = XALLOCAVEC (tree, count);
1629 for (i = 0; i < count; i++)
1630 {
1631 elem = VECTOR_CST_ELT (arg0, i);
1632 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1633 if (elem == NULL_TREE)
1634 break;
1635 elements[i] = elem;
1636 }
1637 if (i == count)
1638 return build_vector (type, elements);
1639 }
1640 break;
1641
1642 case TRUTH_NOT_EXPR:
1643 if (TREE_CODE (arg0) == INTEGER_CST)
1644 return constant_boolean_node (integer_zerop (arg0), type);
1645 break;
1646
1647 case REALPART_EXPR:
1648 if (TREE_CODE (arg0) == COMPLEX_CST)
1649 return fold_convert (type, TREE_REALPART (arg0));
1650 break;
1651
1652 case IMAGPART_EXPR:
1653 if (TREE_CODE (arg0) == COMPLEX_CST)
1654 return fold_convert (type, TREE_IMAGPART (arg0));
1655 break;
1656
1657 case VEC_UNPACK_LO_EXPR:
1658 case VEC_UNPACK_HI_EXPR:
1659 case VEC_UNPACK_FLOAT_LO_EXPR:
1660 case VEC_UNPACK_FLOAT_HI_EXPR:
1661 {
1662 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1663 tree *elts;
1664 enum tree_code subcode;
1665
1666 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1667 if (TREE_CODE (arg0) != VECTOR_CST)
1668 return NULL_TREE;
1669
1670 elts = XALLOCAVEC (tree, nelts * 2);
1671 if (!vec_cst_ctor_to_array (arg0, elts))
1672 return NULL_TREE;
1673
1674 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1675 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1676 elts += nelts;
1677
1678 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1679 subcode = NOP_EXPR;
1680 else
1681 subcode = FLOAT_EXPR;
1682
1683 for (i = 0; i < nelts; i++)
1684 {
1685 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1686 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1687 return NULL_TREE;
1688 }
1689
1690 return build_vector (type, elts);
1691 }
1692
1693 case REDUC_MIN_EXPR:
1694 case REDUC_MAX_EXPR:
1695 case REDUC_PLUS_EXPR:
1696 {
1697 unsigned int nelts, i;
1698 tree *elts;
1699 enum tree_code subcode;
1700
1701 if (TREE_CODE (arg0) != VECTOR_CST)
1702 return NULL_TREE;
1703 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1704
1705 elts = XALLOCAVEC (tree, nelts);
1706 if (!vec_cst_ctor_to_array (arg0, elts))
1707 return NULL_TREE;
1708
1709 switch (code)
1710 {
1711 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1712 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1713 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1714 default: gcc_unreachable ();
1715 }
1716
1717 for (i = 1; i < nelts; i++)
1718 {
1719 elts[0] = const_binop (subcode, elts[0], elts[i]);
1720 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1721 return NULL_TREE;
1722 }
1723
1724 return elts[0];
1725 }
1726
1727 default:
1728 break;
1729 }
1730
1731 return NULL_TREE;
1732 }
1733
1734 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1735 indicates which particular sizetype to create. */
1736
1737 tree
1738 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1739 {
1740 return build_int_cst (sizetype_tab[(int) kind], number);
1741 }
1742 \f
1743 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1744 is a tree code. The type of the result is taken from the operands.
1745 Both must be equivalent integer types, ala int_binop_types_match_p.
1746 If the operands are constant, so is the result. */
1747
1748 tree
1749 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1750 {
1751 tree type = TREE_TYPE (arg0);
1752
1753 if (arg0 == error_mark_node || arg1 == error_mark_node)
1754 return error_mark_node;
1755
1756 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1757 TREE_TYPE (arg1)));
1758
1759 /* Handle the special case of two integer constants faster. */
1760 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1761 {
1762 /* And some specific cases even faster than that. */
1763 if (code == PLUS_EXPR)
1764 {
1765 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1766 return arg1;
1767 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1768 return arg0;
1769 }
1770 else if (code == MINUS_EXPR)
1771 {
1772 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1773 return arg0;
1774 }
1775 else if (code == MULT_EXPR)
1776 {
1777 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1778 return arg1;
1779 }
1780
1781 /* Handle general case of two integer constants. For sizetype
1782 constant calculations we always want to know about overflow,
1783 even in the unsigned case. */
1784 return int_const_binop_1 (code, arg0, arg1, -1);
1785 }
1786
1787 return fold_build2_loc (loc, code, type, arg0, arg1);
1788 }
1789
1790 /* Given two values, either both of sizetype or both of bitsizetype,
1791 compute the difference between the two values. Return the value
1792 in signed type corresponding to the type of the operands. */
1793
1794 tree
1795 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1796 {
1797 tree type = TREE_TYPE (arg0);
1798 tree ctype;
1799
1800 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1801 TREE_TYPE (arg1)));
1802
1803 /* If the type is already signed, just do the simple thing. */
1804 if (!TYPE_UNSIGNED (type))
1805 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1806
1807 if (type == sizetype)
1808 ctype = ssizetype;
1809 else if (type == bitsizetype)
1810 ctype = sbitsizetype;
1811 else
1812 ctype = signed_type_for (type);
1813
1814 /* If either operand is not a constant, do the conversions to the signed
1815 type and subtract. The hardware will do the right thing with any
1816 overflow in the subtraction. */
1817 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1818 return size_binop_loc (loc, MINUS_EXPR,
1819 fold_convert_loc (loc, ctype, arg0),
1820 fold_convert_loc (loc, ctype, arg1));
1821
1822 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1823 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1824 overflow) and negate (which can't either). Special-case a result
1825 of zero while we're here. */
1826 if (tree_int_cst_equal (arg0, arg1))
1827 return build_int_cst (ctype, 0);
1828 else if (tree_int_cst_lt (arg1, arg0))
1829 return fold_convert_loc (loc, ctype,
1830 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1831 else
1832 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1833 fold_convert_loc (loc, ctype,
1834 size_binop_loc (loc,
1835 MINUS_EXPR,
1836 arg1, arg0)));
1837 }
1838 \f
1839 /* A subroutine of fold_convert_const handling conversions of an
1840 INTEGER_CST to another integer type. */
1841
1842 static tree
1843 fold_convert_const_int_from_int (tree type, const_tree arg1)
1844 {
1845 /* Given an integer constant, make new constant with new type,
1846 appropriately sign-extended or truncated. Use widest_int
1847 so that any extension is done according ARG1's type. */
1848 return force_fit_type (type, wi::to_widest (arg1),
1849 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1850 TREE_OVERFLOW (arg1));
1851 }
1852
1853 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1854 to an integer type. */
1855
1856 static tree
1857 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1858 {
1859 bool overflow = false;
1860 tree t;
1861
1862 /* The following code implements the floating point to integer
1863 conversion rules required by the Java Language Specification,
1864 that IEEE NaNs are mapped to zero and values that overflow
1865 the target precision saturate, i.e. values greater than
1866 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1867 are mapped to INT_MIN. These semantics are allowed by the
1868 C and C++ standards that simply state that the behavior of
1869 FP-to-integer conversion is unspecified upon overflow. */
1870
1871 wide_int val;
1872 REAL_VALUE_TYPE r;
1873 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1874
1875 switch (code)
1876 {
1877 case FIX_TRUNC_EXPR:
1878 real_trunc (&r, VOIDmode, &x);
1879 break;
1880
1881 default:
1882 gcc_unreachable ();
1883 }
1884
1885 /* If R is NaN, return zero and show we have an overflow. */
1886 if (REAL_VALUE_ISNAN (r))
1887 {
1888 overflow = true;
1889 val = wi::zero (TYPE_PRECISION (type));
1890 }
1891
1892 /* See if R is less than the lower bound or greater than the
1893 upper bound. */
1894
1895 if (! overflow)
1896 {
1897 tree lt = TYPE_MIN_VALUE (type);
1898 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1899 if (REAL_VALUES_LESS (r, l))
1900 {
1901 overflow = true;
1902 val = lt;
1903 }
1904 }
1905
1906 if (! overflow)
1907 {
1908 tree ut = TYPE_MAX_VALUE (type);
1909 if (ut)
1910 {
1911 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1912 if (REAL_VALUES_LESS (u, r))
1913 {
1914 overflow = true;
1915 val = ut;
1916 }
1917 }
1918 }
1919
1920 if (! overflow)
1921 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1922
1923 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1924 return t;
1925 }
1926
1927 /* A subroutine of fold_convert_const handling conversions of a
1928 FIXED_CST to an integer type. */
1929
1930 static tree
1931 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1932 {
1933 tree t;
1934 double_int temp, temp_trunc;
1935 unsigned int mode;
1936
1937 /* Right shift FIXED_CST to temp by fbit. */
1938 temp = TREE_FIXED_CST (arg1).data;
1939 mode = TREE_FIXED_CST (arg1).mode;
1940 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1941 {
1942 temp = temp.rshift (GET_MODE_FBIT (mode),
1943 HOST_BITS_PER_DOUBLE_INT,
1944 SIGNED_FIXED_POINT_MODE_P (mode));
1945
1946 /* Left shift temp to temp_trunc by fbit. */
1947 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1948 HOST_BITS_PER_DOUBLE_INT,
1949 SIGNED_FIXED_POINT_MODE_P (mode));
1950 }
1951 else
1952 {
1953 temp = double_int_zero;
1954 temp_trunc = double_int_zero;
1955 }
1956
1957 /* If FIXED_CST is negative, we need to round the value toward 0.
1958 By checking if the fractional bits are not zero to add 1 to temp. */
1959 if (SIGNED_FIXED_POINT_MODE_P (mode)
1960 && temp_trunc.is_negative ()
1961 && TREE_FIXED_CST (arg1).data != temp_trunc)
1962 temp += double_int_one;
1963
1964 /* Given a fixed-point constant, make new constant with new type,
1965 appropriately sign-extended or truncated. */
1966 t = force_fit_type (type, temp, -1,
1967 (temp.is_negative ()
1968 && (TYPE_UNSIGNED (type)
1969 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1970 | TREE_OVERFLOW (arg1));
1971
1972 return t;
1973 }
1974
1975 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1976 to another floating point type. */
1977
1978 static tree
1979 fold_convert_const_real_from_real (tree type, const_tree arg1)
1980 {
1981 REAL_VALUE_TYPE value;
1982 tree t;
1983
1984 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1985 t = build_real (type, value);
1986
1987 /* If converting an infinity or NAN to a representation that doesn't
1988 have one, set the overflow bit so that we can produce some kind of
1989 error message at the appropriate point if necessary. It's not the
1990 most user-friendly message, but it's better than nothing. */
1991 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1992 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1993 TREE_OVERFLOW (t) = 1;
1994 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1995 && !MODE_HAS_NANS (TYPE_MODE (type)))
1996 TREE_OVERFLOW (t) = 1;
1997 /* Regular overflow, conversion produced an infinity in a mode that
1998 can't represent them. */
1999 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2000 && REAL_VALUE_ISINF (value)
2001 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2002 TREE_OVERFLOW (t) = 1;
2003 else
2004 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2005 return t;
2006 }
2007
2008 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2009 to a floating point type. */
2010
2011 static tree
2012 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2013 {
2014 REAL_VALUE_TYPE value;
2015 tree t;
2016
2017 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2018 t = build_real (type, value);
2019
2020 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2021 return t;
2022 }
2023
2024 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2025 to another fixed-point type. */
2026
2027 static tree
2028 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2029 {
2030 FIXED_VALUE_TYPE value;
2031 tree t;
2032 bool overflow_p;
2033
2034 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2035 TYPE_SATURATING (type));
2036 t = build_fixed (type, value);
2037
2038 /* Propagate overflow flags. */
2039 if (overflow_p | TREE_OVERFLOW (arg1))
2040 TREE_OVERFLOW (t) = 1;
2041 return t;
2042 }
2043
2044 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2045 to a fixed-point type. */
2046
2047 static tree
2048 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2049 {
2050 FIXED_VALUE_TYPE value;
2051 tree t;
2052 bool overflow_p;
2053 double_int di;
2054
2055 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2056
2057 di.low = TREE_INT_CST_ELT (arg1, 0);
2058 if (TREE_INT_CST_NUNITS (arg1) == 1)
2059 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2060 else
2061 di.high = TREE_INT_CST_ELT (arg1, 1);
2062
2063 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2064 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2065 TYPE_SATURATING (type));
2066 t = build_fixed (type, value);
2067
2068 /* Propagate overflow flags. */
2069 if (overflow_p | TREE_OVERFLOW (arg1))
2070 TREE_OVERFLOW (t) = 1;
2071 return t;
2072 }
2073
2074 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2075 to a fixed-point type. */
2076
2077 static tree
2078 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2079 {
2080 FIXED_VALUE_TYPE value;
2081 tree t;
2082 bool overflow_p;
2083
2084 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2085 &TREE_REAL_CST (arg1),
2086 TYPE_SATURATING (type));
2087 t = build_fixed (type, value);
2088
2089 /* Propagate overflow flags. */
2090 if (overflow_p | TREE_OVERFLOW (arg1))
2091 TREE_OVERFLOW (t) = 1;
2092 return t;
2093 }
2094
2095 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2096 type TYPE. If no simplification can be done return NULL_TREE. */
2097
2098 static tree
2099 fold_convert_const (enum tree_code code, tree type, tree arg1)
2100 {
2101 if (TREE_TYPE (arg1) == type)
2102 return arg1;
2103
2104 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2105 || TREE_CODE (type) == OFFSET_TYPE)
2106 {
2107 if (TREE_CODE (arg1) == INTEGER_CST)
2108 return fold_convert_const_int_from_int (type, arg1);
2109 else if (TREE_CODE (arg1) == REAL_CST)
2110 return fold_convert_const_int_from_real (code, type, arg1);
2111 else if (TREE_CODE (arg1) == FIXED_CST)
2112 return fold_convert_const_int_from_fixed (type, arg1);
2113 }
2114 else if (TREE_CODE (type) == REAL_TYPE)
2115 {
2116 if (TREE_CODE (arg1) == INTEGER_CST)
2117 return build_real_from_int_cst (type, arg1);
2118 else if (TREE_CODE (arg1) == REAL_CST)
2119 return fold_convert_const_real_from_real (type, arg1);
2120 else if (TREE_CODE (arg1) == FIXED_CST)
2121 return fold_convert_const_real_from_fixed (type, arg1);
2122 }
2123 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2124 {
2125 if (TREE_CODE (arg1) == FIXED_CST)
2126 return fold_convert_const_fixed_from_fixed (type, arg1);
2127 else if (TREE_CODE (arg1) == INTEGER_CST)
2128 return fold_convert_const_fixed_from_int (type, arg1);
2129 else if (TREE_CODE (arg1) == REAL_CST)
2130 return fold_convert_const_fixed_from_real (type, arg1);
2131 }
2132 return NULL_TREE;
2133 }
2134
2135 /* Construct a vector of zero elements of vector type TYPE. */
2136
2137 static tree
2138 build_zero_vector (tree type)
2139 {
2140 tree t;
2141
2142 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2143 return build_vector_from_val (type, t);
2144 }
2145
2146 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2147
2148 bool
2149 fold_convertible_p (const_tree type, const_tree arg)
2150 {
2151 tree orig = TREE_TYPE (arg);
2152
2153 if (type == orig)
2154 return true;
2155
2156 if (TREE_CODE (arg) == ERROR_MARK
2157 || TREE_CODE (type) == ERROR_MARK
2158 || TREE_CODE (orig) == ERROR_MARK)
2159 return false;
2160
2161 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2162 return true;
2163
2164 switch (TREE_CODE (type))
2165 {
2166 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2167 case POINTER_TYPE: case REFERENCE_TYPE:
2168 case OFFSET_TYPE:
2169 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2170 || TREE_CODE (orig) == OFFSET_TYPE)
2171 return true;
2172 return (TREE_CODE (orig) == VECTOR_TYPE
2173 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2174
2175 case REAL_TYPE:
2176 case FIXED_POINT_TYPE:
2177 case COMPLEX_TYPE:
2178 case VECTOR_TYPE:
2179 case VOID_TYPE:
2180 return TREE_CODE (type) == TREE_CODE (orig);
2181
2182 default:
2183 return false;
2184 }
2185 }
2186
2187 /* Convert expression ARG to type TYPE. Used by the middle-end for
2188 simple conversions in preference to calling the front-end's convert. */
2189
2190 tree
2191 fold_convert_loc (location_t loc, tree type, tree arg)
2192 {
2193 tree orig = TREE_TYPE (arg);
2194 tree tem;
2195
2196 if (type == orig)
2197 return arg;
2198
2199 if (TREE_CODE (arg) == ERROR_MARK
2200 || TREE_CODE (type) == ERROR_MARK
2201 || TREE_CODE (orig) == ERROR_MARK)
2202 return error_mark_node;
2203
2204 switch (TREE_CODE (type))
2205 {
2206 case POINTER_TYPE:
2207 case REFERENCE_TYPE:
2208 /* Handle conversions between pointers to different address spaces. */
2209 if (POINTER_TYPE_P (orig)
2210 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2211 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2212 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2213 /* fall through */
2214
2215 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2216 case OFFSET_TYPE:
2217 if (TREE_CODE (arg) == INTEGER_CST)
2218 {
2219 tem = fold_convert_const (NOP_EXPR, type, arg);
2220 if (tem != NULL_TREE)
2221 return tem;
2222 }
2223 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2224 || TREE_CODE (orig) == OFFSET_TYPE)
2225 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2226 if (TREE_CODE (orig) == COMPLEX_TYPE)
2227 return fold_convert_loc (loc, type,
2228 fold_build1_loc (loc, REALPART_EXPR,
2229 TREE_TYPE (orig), arg));
2230 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2231 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2232 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2233
2234 case REAL_TYPE:
2235 if (TREE_CODE (arg) == INTEGER_CST)
2236 {
2237 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2238 if (tem != NULL_TREE)
2239 return tem;
2240 }
2241 else if (TREE_CODE (arg) == REAL_CST)
2242 {
2243 tem = fold_convert_const (NOP_EXPR, type, arg);
2244 if (tem != NULL_TREE)
2245 return tem;
2246 }
2247 else if (TREE_CODE (arg) == FIXED_CST)
2248 {
2249 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2250 if (tem != NULL_TREE)
2251 return tem;
2252 }
2253
2254 switch (TREE_CODE (orig))
2255 {
2256 case INTEGER_TYPE:
2257 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2258 case POINTER_TYPE: case REFERENCE_TYPE:
2259 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2260
2261 case REAL_TYPE:
2262 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2263
2264 case FIXED_POINT_TYPE:
2265 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2266
2267 case COMPLEX_TYPE:
2268 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2269 return fold_convert_loc (loc, type, tem);
2270
2271 default:
2272 gcc_unreachable ();
2273 }
2274
2275 case FIXED_POINT_TYPE:
2276 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2277 || TREE_CODE (arg) == REAL_CST)
2278 {
2279 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2280 if (tem != NULL_TREE)
2281 goto fold_convert_exit;
2282 }
2283
2284 switch (TREE_CODE (orig))
2285 {
2286 case FIXED_POINT_TYPE:
2287 case INTEGER_TYPE:
2288 case ENUMERAL_TYPE:
2289 case BOOLEAN_TYPE:
2290 case REAL_TYPE:
2291 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2292
2293 case COMPLEX_TYPE:
2294 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2295 return fold_convert_loc (loc, type, tem);
2296
2297 default:
2298 gcc_unreachable ();
2299 }
2300
2301 case COMPLEX_TYPE:
2302 switch (TREE_CODE (orig))
2303 {
2304 case INTEGER_TYPE:
2305 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2306 case POINTER_TYPE: case REFERENCE_TYPE:
2307 case REAL_TYPE:
2308 case FIXED_POINT_TYPE:
2309 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2310 fold_convert_loc (loc, TREE_TYPE (type), arg),
2311 fold_convert_loc (loc, TREE_TYPE (type),
2312 integer_zero_node));
2313 case COMPLEX_TYPE:
2314 {
2315 tree rpart, ipart;
2316
2317 if (TREE_CODE (arg) == COMPLEX_EXPR)
2318 {
2319 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2320 TREE_OPERAND (arg, 0));
2321 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2322 TREE_OPERAND (arg, 1));
2323 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2324 }
2325
2326 arg = save_expr (arg);
2327 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2328 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2329 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2330 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2331 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2332 }
2333
2334 default:
2335 gcc_unreachable ();
2336 }
2337
2338 case VECTOR_TYPE:
2339 if (integer_zerop (arg))
2340 return build_zero_vector (type);
2341 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2342 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2343 || TREE_CODE (orig) == VECTOR_TYPE);
2344 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2345
2346 case VOID_TYPE:
2347 tem = fold_ignored_result (arg);
2348 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2349
2350 default:
2351 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2352 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2353 gcc_unreachable ();
2354 }
2355 fold_convert_exit:
2356 protected_set_expr_location_unshare (tem, loc);
2357 return tem;
2358 }
2359 \f
2360 /* Return false if expr can be assumed not to be an lvalue, true
2361 otherwise. */
2362
2363 static bool
2364 maybe_lvalue_p (const_tree x)
2365 {
2366 /* We only need to wrap lvalue tree codes. */
2367 switch (TREE_CODE (x))
2368 {
2369 case VAR_DECL:
2370 case PARM_DECL:
2371 case RESULT_DECL:
2372 case LABEL_DECL:
2373 case FUNCTION_DECL:
2374 case SSA_NAME:
2375
2376 case COMPONENT_REF:
2377 case MEM_REF:
2378 case INDIRECT_REF:
2379 case ARRAY_REF:
2380 case ARRAY_RANGE_REF:
2381 case BIT_FIELD_REF:
2382 case OBJ_TYPE_REF:
2383
2384 case REALPART_EXPR:
2385 case IMAGPART_EXPR:
2386 case PREINCREMENT_EXPR:
2387 case PREDECREMENT_EXPR:
2388 case SAVE_EXPR:
2389 case TRY_CATCH_EXPR:
2390 case WITH_CLEANUP_EXPR:
2391 case COMPOUND_EXPR:
2392 case MODIFY_EXPR:
2393 case TARGET_EXPR:
2394 case COND_EXPR:
2395 case BIND_EXPR:
2396 break;
2397
2398 default:
2399 /* Assume the worst for front-end tree codes. */
2400 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2401 break;
2402 return false;
2403 }
2404
2405 return true;
2406 }
2407
2408 /* Return an expr equal to X but certainly not valid as an lvalue. */
2409
2410 tree
2411 non_lvalue_loc (location_t loc, tree x)
2412 {
2413 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2414 us. */
2415 if (in_gimple_form)
2416 return x;
2417
2418 if (! maybe_lvalue_p (x))
2419 return x;
2420 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2421 }
2422
2423 /* When pedantic, return an expr equal to X but certainly not valid as a
2424 pedantic lvalue. Otherwise, return X. */
2425
2426 static tree
2427 pedantic_non_lvalue_loc (location_t loc, tree x)
2428 {
2429 return protected_set_expr_location_unshare (x, loc);
2430 }
2431 \f
2432 /* Given a tree comparison code, return the code that is the logical inverse.
2433 It is generally not safe to do this for floating-point comparisons, except
2434 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2435 ERROR_MARK in this case. */
2436
2437 enum tree_code
2438 invert_tree_comparison (enum tree_code code, bool honor_nans)
2439 {
2440 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2441 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2442 return ERROR_MARK;
2443
2444 switch (code)
2445 {
2446 case EQ_EXPR:
2447 return NE_EXPR;
2448 case NE_EXPR:
2449 return EQ_EXPR;
2450 case GT_EXPR:
2451 return honor_nans ? UNLE_EXPR : LE_EXPR;
2452 case GE_EXPR:
2453 return honor_nans ? UNLT_EXPR : LT_EXPR;
2454 case LT_EXPR:
2455 return honor_nans ? UNGE_EXPR : GE_EXPR;
2456 case LE_EXPR:
2457 return honor_nans ? UNGT_EXPR : GT_EXPR;
2458 case LTGT_EXPR:
2459 return UNEQ_EXPR;
2460 case UNEQ_EXPR:
2461 return LTGT_EXPR;
2462 case UNGT_EXPR:
2463 return LE_EXPR;
2464 case UNGE_EXPR:
2465 return LT_EXPR;
2466 case UNLT_EXPR:
2467 return GE_EXPR;
2468 case UNLE_EXPR:
2469 return GT_EXPR;
2470 case ORDERED_EXPR:
2471 return UNORDERED_EXPR;
2472 case UNORDERED_EXPR:
2473 return ORDERED_EXPR;
2474 default:
2475 gcc_unreachable ();
2476 }
2477 }
2478
2479 /* Similar, but return the comparison that results if the operands are
2480 swapped. This is safe for floating-point. */
2481
2482 enum tree_code
2483 swap_tree_comparison (enum tree_code code)
2484 {
2485 switch (code)
2486 {
2487 case EQ_EXPR:
2488 case NE_EXPR:
2489 case ORDERED_EXPR:
2490 case UNORDERED_EXPR:
2491 case LTGT_EXPR:
2492 case UNEQ_EXPR:
2493 return code;
2494 case GT_EXPR:
2495 return LT_EXPR;
2496 case GE_EXPR:
2497 return LE_EXPR;
2498 case LT_EXPR:
2499 return GT_EXPR;
2500 case LE_EXPR:
2501 return GE_EXPR;
2502 case UNGT_EXPR:
2503 return UNLT_EXPR;
2504 case UNGE_EXPR:
2505 return UNLE_EXPR;
2506 case UNLT_EXPR:
2507 return UNGT_EXPR;
2508 case UNLE_EXPR:
2509 return UNGE_EXPR;
2510 default:
2511 gcc_unreachable ();
2512 }
2513 }
2514
2515
2516 /* Convert a comparison tree code from an enum tree_code representation
2517 into a compcode bit-based encoding. This function is the inverse of
2518 compcode_to_comparison. */
2519
2520 static enum comparison_code
2521 comparison_to_compcode (enum tree_code code)
2522 {
2523 switch (code)
2524 {
2525 case LT_EXPR:
2526 return COMPCODE_LT;
2527 case EQ_EXPR:
2528 return COMPCODE_EQ;
2529 case LE_EXPR:
2530 return COMPCODE_LE;
2531 case GT_EXPR:
2532 return COMPCODE_GT;
2533 case NE_EXPR:
2534 return COMPCODE_NE;
2535 case GE_EXPR:
2536 return COMPCODE_GE;
2537 case ORDERED_EXPR:
2538 return COMPCODE_ORD;
2539 case UNORDERED_EXPR:
2540 return COMPCODE_UNORD;
2541 case UNLT_EXPR:
2542 return COMPCODE_UNLT;
2543 case UNEQ_EXPR:
2544 return COMPCODE_UNEQ;
2545 case UNLE_EXPR:
2546 return COMPCODE_UNLE;
2547 case UNGT_EXPR:
2548 return COMPCODE_UNGT;
2549 case LTGT_EXPR:
2550 return COMPCODE_LTGT;
2551 case UNGE_EXPR:
2552 return COMPCODE_UNGE;
2553 default:
2554 gcc_unreachable ();
2555 }
2556 }
2557
2558 /* Convert a compcode bit-based encoding of a comparison operator back
2559 to GCC's enum tree_code representation. This function is the
2560 inverse of comparison_to_compcode. */
2561
2562 static enum tree_code
2563 compcode_to_comparison (enum comparison_code code)
2564 {
2565 switch (code)
2566 {
2567 case COMPCODE_LT:
2568 return LT_EXPR;
2569 case COMPCODE_EQ:
2570 return EQ_EXPR;
2571 case COMPCODE_LE:
2572 return LE_EXPR;
2573 case COMPCODE_GT:
2574 return GT_EXPR;
2575 case COMPCODE_NE:
2576 return NE_EXPR;
2577 case COMPCODE_GE:
2578 return GE_EXPR;
2579 case COMPCODE_ORD:
2580 return ORDERED_EXPR;
2581 case COMPCODE_UNORD:
2582 return UNORDERED_EXPR;
2583 case COMPCODE_UNLT:
2584 return UNLT_EXPR;
2585 case COMPCODE_UNEQ:
2586 return UNEQ_EXPR;
2587 case COMPCODE_UNLE:
2588 return UNLE_EXPR;
2589 case COMPCODE_UNGT:
2590 return UNGT_EXPR;
2591 case COMPCODE_LTGT:
2592 return LTGT_EXPR;
2593 case COMPCODE_UNGE:
2594 return UNGE_EXPR;
2595 default:
2596 gcc_unreachable ();
2597 }
2598 }
2599
2600 /* Return a tree for the comparison which is the combination of
2601 doing the AND or OR (depending on CODE) of the two operations LCODE
2602 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2603 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2604 if this makes the transformation invalid. */
2605
2606 tree
2607 combine_comparisons (location_t loc,
2608 enum tree_code code, enum tree_code lcode,
2609 enum tree_code rcode, tree truth_type,
2610 tree ll_arg, tree lr_arg)
2611 {
2612 bool honor_nans = HONOR_NANS (ll_arg);
2613 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2614 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2615 int compcode;
2616
2617 switch (code)
2618 {
2619 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2620 compcode = lcompcode & rcompcode;
2621 break;
2622
2623 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2624 compcode = lcompcode | rcompcode;
2625 break;
2626
2627 default:
2628 return NULL_TREE;
2629 }
2630
2631 if (!honor_nans)
2632 {
2633 /* Eliminate unordered comparisons, as well as LTGT and ORD
2634 which are not used unless the mode has NaNs. */
2635 compcode &= ~COMPCODE_UNORD;
2636 if (compcode == COMPCODE_LTGT)
2637 compcode = COMPCODE_NE;
2638 else if (compcode == COMPCODE_ORD)
2639 compcode = COMPCODE_TRUE;
2640 }
2641 else if (flag_trapping_math)
2642 {
2643 /* Check that the original operation and the optimized ones will trap
2644 under the same condition. */
2645 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2646 && (lcompcode != COMPCODE_EQ)
2647 && (lcompcode != COMPCODE_ORD);
2648 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2649 && (rcompcode != COMPCODE_EQ)
2650 && (rcompcode != COMPCODE_ORD);
2651 bool trap = (compcode & COMPCODE_UNORD) == 0
2652 && (compcode != COMPCODE_EQ)
2653 && (compcode != COMPCODE_ORD);
2654
2655 /* In a short-circuited boolean expression the LHS might be
2656 such that the RHS, if evaluated, will never trap. For
2657 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2658 if neither x nor y is NaN. (This is a mixed blessing: for
2659 example, the expression above will never trap, hence
2660 optimizing it to x < y would be invalid). */
2661 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2662 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2663 rtrap = false;
2664
2665 /* If the comparison was short-circuited, and only the RHS
2666 trapped, we may now generate a spurious trap. */
2667 if (rtrap && !ltrap
2668 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2669 return NULL_TREE;
2670
2671 /* If we changed the conditions that cause a trap, we lose. */
2672 if ((ltrap || rtrap) != trap)
2673 return NULL_TREE;
2674 }
2675
2676 if (compcode == COMPCODE_TRUE)
2677 return constant_boolean_node (true, truth_type);
2678 else if (compcode == COMPCODE_FALSE)
2679 return constant_boolean_node (false, truth_type);
2680 else
2681 {
2682 enum tree_code tcode;
2683
2684 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2685 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2686 }
2687 }
2688 \f
2689 /* Return nonzero if two operands (typically of the same tree node)
2690 are necessarily equal. If either argument has side-effects this
2691 function returns zero. FLAGS modifies behavior as follows:
2692
2693 If OEP_ONLY_CONST is set, only return nonzero for constants.
2694 This function tests whether the operands are indistinguishable;
2695 it does not test whether they are equal using C's == operation.
2696 The distinction is important for IEEE floating point, because
2697 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2698 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2699
2700 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2701 even though it may hold multiple values during a function.
2702 This is because a GCC tree node guarantees that nothing else is
2703 executed between the evaluation of its "operands" (which may often
2704 be evaluated in arbitrary order). Hence if the operands themselves
2705 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2706 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2707 unset means assuming isochronic (or instantaneous) tree equivalence.
2708 Unless comparing arbitrary expression trees, such as from different
2709 statements, this flag can usually be left unset.
2710
2711 If OEP_PURE_SAME is set, then pure functions with identical arguments
2712 are considered the same. It is used when the caller has other ways
2713 to ensure that global memory is unchanged in between. */
2714
2715 int
2716 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2717 {
2718 /* If either is ERROR_MARK, they aren't equal. */
2719 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2720 || TREE_TYPE (arg0) == error_mark_node
2721 || TREE_TYPE (arg1) == error_mark_node)
2722 return 0;
2723
2724 /* Similar, if either does not have a type (like a released SSA name),
2725 they aren't equal. */
2726 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2727 return 0;
2728
2729 /* Check equality of integer constants before bailing out due to
2730 precision differences. */
2731 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2732 return tree_int_cst_equal (arg0, arg1);
2733
2734 /* If both types don't have the same signedness, then we can't consider
2735 them equal. We must check this before the STRIP_NOPS calls
2736 because they may change the signedness of the arguments. As pointers
2737 strictly don't have a signedness, require either two pointers or
2738 two non-pointers as well. */
2739 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2740 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2741 return 0;
2742
2743 /* We cannot consider pointers to different address space equal. */
2744 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2745 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2746 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2747 return 0;
2748
2749 /* If both types don't have the same precision, then it is not safe
2750 to strip NOPs. */
2751 if (element_precision (TREE_TYPE (arg0))
2752 != element_precision (TREE_TYPE (arg1)))
2753 return 0;
2754
2755 STRIP_NOPS (arg0);
2756 STRIP_NOPS (arg1);
2757
2758 /* In case both args are comparisons but with different comparison
2759 code, try to swap the comparison operands of one arg to produce
2760 a match and compare that variant. */
2761 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2762 && COMPARISON_CLASS_P (arg0)
2763 && COMPARISON_CLASS_P (arg1))
2764 {
2765 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2766
2767 if (TREE_CODE (arg0) == swap_code)
2768 return operand_equal_p (TREE_OPERAND (arg0, 0),
2769 TREE_OPERAND (arg1, 1), flags)
2770 && operand_equal_p (TREE_OPERAND (arg0, 1),
2771 TREE_OPERAND (arg1, 0), flags);
2772 }
2773
2774 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2775 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2776 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2777 return 0;
2778
2779 /* This is needed for conversions and for COMPONENT_REF.
2780 Might as well play it safe and always test this. */
2781 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2782 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2783 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2784 return 0;
2785
2786 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2787 We don't care about side effects in that case because the SAVE_EXPR
2788 takes care of that for us. In all other cases, two expressions are
2789 equal if they have no side effects. If we have two identical
2790 expressions with side effects that should be treated the same due
2791 to the only side effects being identical SAVE_EXPR's, that will
2792 be detected in the recursive calls below.
2793 If we are taking an invariant address of two identical objects
2794 they are necessarily equal as well. */
2795 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2796 && (TREE_CODE (arg0) == SAVE_EXPR
2797 || (flags & OEP_CONSTANT_ADDRESS_OF)
2798 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2799 return 1;
2800
2801 /* Next handle constant cases, those for which we can return 1 even
2802 if ONLY_CONST is set. */
2803 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2804 switch (TREE_CODE (arg0))
2805 {
2806 case INTEGER_CST:
2807 return tree_int_cst_equal (arg0, arg1);
2808
2809 case FIXED_CST:
2810 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2811 TREE_FIXED_CST (arg1));
2812
2813 case REAL_CST:
2814 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2815 TREE_REAL_CST (arg1)))
2816 return 1;
2817
2818
2819 if (!HONOR_SIGNED_ZEROS (arg0))
2820 {
2821 /* If we do not distinguish between signed and unsigned zero,
2822 consider them equal. */
2823 if (real_zerop (arg0) && real_zerop (arg1))
2824 return 1;
2825 }
2826 return 0;
2827
2828 case VECTOR_CST:
2829 {
2830 unsigned i;
2831
2832 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2833 return 0;
2834
2835 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2836 {
2837 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2838 VECTOR_CST_ELT (arg1, i), flags))
2839 return 0;
2840 }
2841 return 1;
2842 }
2843
2844 case COMPLEX_CST:
2845 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2846 flags)
2847 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2848 flags));
2849
2850 case STRING_CST:
2851 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2852 && ! memcmp (TREE_STRING_POINTER (arg0),
2853 TREE_STRING_POINTER (arg1),
2854 TREE_STRING_LENGTH (arg0)));
2855
2856 case ADDR_EXPR:
2857 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2858 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2859 ? OEP_CONSTANT_ADDRESS_OF : 0);
2860 default:
2861 break;
2862 }
2863
2864 if (flags & OEP_ONLY_CONST)
2865 return 0;
2866
2867 /* Define macros to test an operand from arg0 and arg1 for equality and a
2868 variant that allows null and views null as being different from any
2869 non-null value. In the latter case, if either is null, the both
2870 must be; otherwise, do the normal comparison. */
2871 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2872 TREE_OPERAND (arg1, N), flags)
2873
2874 #define OP_SAME_WITH_NULL(N) \
2875 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2876 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2877
2878 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2879 {
2880 case tcc_unary:
2881 /* Two conversions are equal only if signedness and modes match. */
2882 switch (TREE_CODE (arg0))
2883 {
2884 CASE_CONVERT:
2885 case FIX_TRUNC_EXPR:
2886 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2887 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2888 return 0;
2889 break;
2890 default:
2891 break;
2892 }
2893
2894 return OP_SAME (0);
2895
2896
2897 case tcc_comparison:
2898 case tcc_binary:
2899 if (OP_SAME (0) && OP_SAME (1))
2900 return 1;
2901
2902 /* For commutative ops, allow the other order. */
2903 return (commutative_tree_code (TREE_CODE (arg0))
2904 && operand_equal_p (TREE_OPERAND (arg0, 0),
2905 TREE_OPERAND (arg1, 1), flags)
2906 && operand_equal_p (TREE_OPERAND (arg0, 1),
2907 TREE_OPERAND (arg1, 0), flags));
2908
2909 case tcc_reference:
2910 /* If either of the pointer (or reference) expressions we are
2911 dereferencing contain a side effect, these cannot be equal,
2912 but their addresses can be. */
2913 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2914 && (TREE_SIDE_EFFECTS (arg0)
2915 || TREE_SIDE_EFFECTS (arg1)))
2916 return 0;
2917
2918 switch (TREE_CODE (arg0))
2919 {
2920 case INDIRECT_REF:
2921 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2922 return OP_SAME (0);
2923
2924 case REALPART_EXPR:
2925 case IMAGPART_EXPR:
2926 return OP_SAME (0);
2927
2928 case TARGET_MEM_REF:
2929 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2930 /* Require equal extra operands and then fall through to MEM_REF
2931 handling of the two common operands. */
2932 if (!OP_SAME_WITH_NULL (2)
2933 || !OP_SAME_WITH_NULL (3)
2934 || !OP_SAME_WITH_NULL (4))
2935 return 0;
2936 /* Fallthru. */
2937 case MEM_REF:
2938 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2939 /* Require equal access sizes, and similar pointer types.
2940 We can have incomplete types for array references of
2941 variable-sized arrays from the Fortran frontend
2942 though. Also verify the types are compatible. */
2943 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2944 || (TYPE_SIZE (TREE_TYPE (arg0))
2945 && TYPE_SIZE (TREE_TYPE (arg1))
2946 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2947 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2948 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2949 && alias_ptr_types_compatible_p
2950 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2951 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2952 && OP_SAME (0) && OP_SAME (1));
2953
2954 case ARRAY_REF:
2955 case ARRAY_RANGE_REF:
2956 /* Operands 2 and 3 may be null.
2957 Compare the array index by value if it is constant first as we
2958 may have different types but same value here. */
2959 if (!OP_SAME (0))
2960 return 0;
2961 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2962 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2963 TREE_OPERAND (arg1, 1))
2964 || OP_SAME (1))
2965 && OP_SAME_WITH_NULL (2)
2966 && OP_SAME_WITH_NULL (3));
2967
2968 case COMPONENT_REF:
2969 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2970 may be NULL when we're called to compare MEM_EXPRs. */
2971 if (!OP_SAME_WITH_NULL (0)
2972 || !OP_SAME (1))
2973 return 0;
2974 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2975 return OP_SAME_WITH_NULL (2);
2976
2977 case BIT_FIELD_REF:
2978 if (!OP_SAME (0))
2979 return 0;
2980 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2981 return OP_SAME (1) && OP_SAME (2);
2982
2983 default:
2984 return 0;
2985 }
2986
2987 case tcc_expression:
2988 switch (TREE_CODE (arg0))
2989 {
2990 case ADDR_EXPR:
2991 case TRUTH_NOT_EXPR:
2992 return OP_SAME (0);
2993
2994 case TRUTH_ANDIF_EXPR:
2995 case TRUTH_ORIF_EXPR:
2996 return OP_SAME (0) && OP_SAME (1);
2997
2998 case FMA_EXPR:
2999 case WIDEN_MULT_PLUS_EXPR:
3000 case WIDEN_MULT_MINUS_EXPR:
3001 if (!OP_SAME (2))
3002 return 0;
3003 /* The multiplcation operands are commutative. */
3004 /* FALLTHRU */
3005
3006 case TRUTH_AND_EXPR:
3007 case TRUTH_OR_EXPR:
3008 case TRUTH_XOR_EXPR:
3009 if (OP_SAME (0) && OP_SAME (1))
3010 return 1;
3011
3012 /* Otherwise take into account this is a commutative operation. */
3013 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3014 TREE_OPERAND (arg1, 1), flags)
3015 && operand_equal_p (TREE_OPERAND (arg0, 1),
3016 TREE_OPERAND (arg1, 0), flags));
3017
3018 case COND_EXPR:
3019 case VEC_COND_EXPR:
3020 case DOT_PROD_EXPR:
3021 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3022
3023 default:
3024 return 0;
3025 }
3026
3027 case tcc_vl_exp:
3028 switch (TREE_CODE (arg0))
3029 {
3030 case CALL_EXPR:
3031 /* If the CALL_EXPRs call different functions, then they
3032 clearly can not be equal. */
3033 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3034 flags))
3035 return 0;
3036
3037 {
3038 unsigned int cef = call_expr_flags (arg0);
3039 if (flags & OEP_PURE_SAME)
3040 cef &= ECF_CONST | ECF_PURE;
3041 else
3042 cef &= ECF_CONST;
3043 if (!cef)
3044 return 0;
3045 }
3046
3047 /* Now see if all the arguments are the same. */
3048 {
3049 const_call_expr_arg_iterator iter0, iter1;
3050 const_tree a0, a1;
3051 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3052 a1 = first_const_call_expr_arg (arg1, &iter1);
3053 a0 && a1;
3054 a0 = next_const_call_expr_arg (&iter0),
3055 a1 = next_const_call_expr_arg (&iter1))
3056 if (! operand_equal_p (a0, a1, flags))
3057 return 0;
3058
3059 /* If we get here and both argument lists are exhausted
3060 then the CALL_EXPRs are equal. */
3061 return ! (a0 || a1);
3062 }
3063 default:
3064 return 0;
3065 }
3066
3067 case tcc_declaration:
3068 /* Consider __builtin_sqrt equal to sqrt. */
3069 return (TREE_CODE (arg0) == FUNCTION_DECL
3070 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3071 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3072 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3073
3074 default:
3075 return 0;
3076 }
3077
3078 #undef OP_SAME
3079 #undef OP_SAME_WITH_NULL
3080 }
3081 \f
3082 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3083 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3084
3085 When in doubt, return 0. */
3086
3087 static int
3088 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3089 {
3090 int unsignedp1, unsignedpo;
3091 tree primarg0, primarg1, primother;
3092 unsigned int correct_width;
3093
3094 if (operand_equal_p (arg0, arg1, 0))
3095 return 1;
3096
3097 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3098 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3099 return 0;
3100
3101 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3102 and see if the inner values are the same. This removes any
3103 signedness comparison, which doesn't matter here. */
3104 primarg0 = arg0, primarg1 = arg1;
3105 STRIP_NOPS (primarg0);
3106 STRIP_NOPS (primarg1);
3107 if (operand_equal_p (primarg0, primarg1, 0))
3108 return 1;
3109
3110 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3111 actual comparison operand, ARG0.
3112
3113 First throw away any conversions to wider types
3114 already present in the operands. */
3115
3116 primarg1 = get_narrower (arg1, &unsignedp1);
3117 primother = get_narrower (other, &unsignedpo);
3118
3119 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3120 if (unsignedp1 == unsignedpo
3121 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3122 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3123 {
3124 tree type = TREE_TYPE (arg0);
3125
3126 /* Make sure shorter operand is extended the right way
3127 to match the longer operand. */
3128 primarg1 = fold_convert (signed_or_unsigned_type_for
3129 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3130
3131 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3132 return 1;
3133 }
3134
3135 return 0;
3136 }
3137 \f
3138 /* See if ARG is an expression that is either a comparison or is performing
3139 arithmetic on comparisons. The comparisons must only be comparing
3140 two different values, which will be stored in *CVAL1 and *CVAL2; if
3141 they are nonzero it means that some operands have already been found.
3142 No variables may be used anywhere else in the expression except in the
3143 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3144 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3145
3146 If this is true, return 1. Otherwise, return zero. */
3147
3148 static int
3149 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3150 {
3151 enum tree_code code = TREE_CODE (arg);
3152 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3153
3154 /* We can handle some of the tcc_expression cases here. */
3155 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3156 tclass = tcc_unary;
3157 else if (tclass == tcc_expression
3158 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3159 || code == COMPOUND_EXPR))
3160 tclass = tcc_binary;
3161
3162 else if (tclass == tcc_expression && code == SAVE_EXPR
3163 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3164 {
3165 /* If we've already found a CVAL1 or CVAL2, this expression is
3166 two complex to handle. */
3167 if (*cval1 || *cval2)
3168 return 0;
3169
3170 tclass = tcc_unary;
3171 *save_p = 1;
3172 }
3173
3174 switch (tclass)
3175 {
3176 case tcc_unary:
3177 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3178
3179 case tcc_binary:
3180 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3181 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3182 cval1, cval2, save_p));
3183
3184 case tcc_constant:
3185 return 1;
3186
3187 case tcc_expression:
3188 if (code == COND_EXPR)
3189 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3190 cval1, cval2, save_p)
3191 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3192 cval1, cval2, save_p)
3193 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3194 cval1, cval2, save_p));
3195 return 0;
3196
3197 case tcc_comparison:
3198 /* First see if we can handle the first operand, then the second. For
3199 the second operand, we know *CVAL1 can't be zero. It must be that
3200 one side of the comparison is each of the values; test for the
3201 case where this isn't true by failing if the two operands
3202 are the same. */
3203
3204 if (operand_equal_p (TREE_OPERAND (arg, 0),
3205 TREE_OPERAND (arg, 1), 0))
3206 return 0;
3207
3208 if (*cval1 == 0)
3209 *cval1 = TREE_OPERAND (arg, 0);
3210 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3211 ;
3212 else if (*cval2 == 0)
3213 *cval2 = TREE_OPERAND (arg, 0);
3214 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3215 ;
3216 else
3217 return 0;
3218
3219 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3220 ;
3221 else if (*cval2 == 0)
3222 *cval2 = TREE_OPERAND (arg, 1);
3223 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3224 ;
3225 else
3226 return 0;
3227
3228 return 1;
3229
3230 default:
3231 return 0;
3232 }
3233 }
3234 \f
3235 /* ARG is a tree that is known to contain just arithmetic operations and
3236 comparisons. Evaluate the operations in the tree substituting NEW0 for
3237 any occurrence of OLD0 as an operand of a comparison and likewise for
3238 NEW1 and OLD1. */
3239
3240 static tree
3241 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3242 tree old1, tree new1)
3243 {
3244 tree type = TREE_TYPE (arg);
3245 enum tree_code code = TREE_CODE (arg);
3246 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3247
3248 /* We can handle some of the tcc_expression cases here. */
3249 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3250 tclass = tcc_unary;
3251 else if (tclass == tcc_expression
3252 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3253 tclass = tcc_binary;
3254
3255 switch (tclass)
3256 {
3257 case tcc_unary:
3258 return fold_build1_loc (loc, code, type,
3259 eval_subst (loc, TREE_OPERAND (arg, 0),
3260 old0, new0, old1, new1));
3261
3262 case tcc_binary:
3263 return fold_build2_loc (loc, code, type,
3264 eval_subst (loc, TREE_OPERAND (arg, 0),
3265 old0, new0, old1, new1),
3266 eval_subst (loc, TREE_OPERAND (arg, 1),
3267 old0, new0, old1, new1));
3268
3269 case tcc_expression:
3270 switch (code)
3271 {
3272 case SAVE_EXPR:
3273 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3274 old1, new1);
3275
3276 case COMPOUND_EXPR:
3277 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3278 old1, new1);
3279
3280 case COND_EXPR:
3281 return fold_build3_loc (loc, code, type,
3282 eval_subst (loc, TREE_OPERAND (arg, 0),
3283 old0, new0, old1, new1),
3284 eval_subst (loc, TREE_OPERAND (arg, 1),
3285 old0, new0, old1, new1),
3286 eval_subst (loc, TREE_OPERAND (arg, 2),
3287 old0, new0, old1, new1));
3288 default:
3289 break;
3290 }
3291 /* Fall through - ??? */
3292
3293 case tcc_comparison:
3294 {
3295 tree arg0 = TREE_OPERAND (arg, 0);
3296 tree arg1 = TREE_OPERAND (arg, 1);
3297
3298 /* We need to check both for exact equality and tree equality. The
3299 former will be true if the operand has a side-effect. In that
3300 case, we know the operand occurred exactly once. */
3301
3302 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3303 arg0 = new0;
3304 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3305 arg0 = new1;
3306
3307 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3308 arg1 = new0;
3309 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3310 arg1 = new1;
3311
3312 return fold_build2_loc (loc, code, type, arg0, arg1);
3313 }
3314
3315 default:
3316 return arg;
3317 }
3318 }
3319 \f
3320 /* Return a tree for the case when the result of an expression is RESULT
3321 converted to TYPE and OMITTED was previously an operand of the expression
3322 but is now not needed (e.g., we folded OMITTED * 0).
3323
3324 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3325 the conversion of RESULT to TYPE. */
3326
3327 tree
3328 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3329 {
3330 tree t = fold_convert_loc (loc, type, result);
3331
3332 /* If the resulting operand is an empty statement, just return the omitted
3333 statement casted to void. */
3334 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3335 return build1_loc (loc, NOP_EXPR, void_type_node,
3336 fold_ignored_result (omitted));
3337
3338 if (TREE_SIDE_EFFECTS (omitted))
3339 return build2_loc (loc, COMPOUND_EXPR, type,
3340 fold_ignored_result (omitted), t);
3341
3342 return non_lvalue_loc (loc, t);
3343 }
3344
3345 /* Return a tree for the case when the result of an expression is RESULT
3346 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3347 of the expression but are now not needed.
3348
3349 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3350 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3351 evaluated before OMITTED2. Otherwise, if neither has side effects,
3352 just do the conversion of RESULT to TYPE. */
3353
3354 tree
3355 omit_two_operands_loc (location_t loc, tree type, tree result,
3356 tree omitted1, tree omitted2)
3357 {
3358 tree t = fold_convert_loc (loc, type, result);
3359
3360 if (TREE_SIDE_EFFECTS (omitted2))
3361 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3362 if (TREE_SIDE_EFFECTS (omitted1))
3363 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3364
3365 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3366 }
3367
3368 \f
3369 /* Return a simplified tree node for the truth-negation of ARG. This
3370 never alters ARG itself. We assume that ARG is an operation that
3371 returns a truth value (0 or 1).
3372
3373 FIXME: one would think we would fold the result, but it causes
3374 problems with the dominator optimizer. */
3375
3376 static tree
3377 fold_truth_not_expr (location_t loc, tree arg)
3378 {
3379 tree type = TREE_TYPE (arg);
3380 enum tree_code code = TREE_CODE (arg);
3381 location_t loc1, loc2;
3382
3383 /* If this is a comparison, we can simply invert it, except for
3384 floating-point non-equality comparisons, in which case we just
3385 enclose a TRUTH_NOT_EXPR around what we have. */
3386
3387 if (TREE_CODE_CLASS (code) == tcc_comparison)
3388 {
3389 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3390 if (FLOAT_TYPE_P (op_type)
3391 && flag_trapping_math
3392 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3393 && code != NE_EXPR && code != EQ_EXPR)
3394 return NULL_TREE;
3395
3396 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3397 if (code == ERROR_MARK)
3398 return NULL_TREE;
3399
3400 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3401 TREE_OPERAND (arg, 1));
3402 }
3403
3404 switch (code)
3405 {
3406 case INTEGER_CST:
3407 return constant_boolean_node (integer_zerop (arg), type);
3408
3409 case TRUTH_AND_EXPR:
3410 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3411 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3412 return build2_loc (loc, TRUTH_OR_EXPR, type,
3413 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3414 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3415
3416 case TRUTH_OR_EXPR:
3417 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3418 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3419 return build2_loc (loc, TRUTH_AND_EXPR, type,
3420 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3421 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3422
3423 case TRUTH_XOR_EXPR:
3424 /* Here we can invert either operand. We invert the first operand
3425 unless the second operand is a TRUTH_NOT_EXPR in which case our
3426 result is the XOR of the first operand with the inside of the
3427 negation of the second operand. */
3428
3429 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3430 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3431 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3432 else
3433 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3434 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3435 TREE_OPERAND (arg, 1));
3436
3437 case TRUTH_ANDIF_EXPR:
3438 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3439 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3440 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3441 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3442 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3443
3444 case TRUTH_ORIF_EXPR:
3445 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3446 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3447 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3448 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3449 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3450
3451 case TRUTH_NOT_EXPR:
3452 return TREE_OPERAND (arg, 0);
3453
3454 case COND_EXPR:
3455 {
3456 tree arg1 = TREE_OPERAND (arg, 1);
3457 tree arg2 = TREE_OPERAND (arg, 2);
3458
3459 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3460 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3461
3462 /* A COND_EXPR may have a throw as one operand, which
3463 then has void type. Just leave void operands
3464 as they are. */
3465 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3466 VOID_TYPE_P (TREE_TYPE (arg1))
3467 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3468 VOID_TYPE_P (TREE_TYPE (arg2))
3469 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3470 }
3471
3472 case COMPOUND_EXPR:
3473 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3474 return build2_loc (loc, COMPOUND_EXPR, type,
3475 TREE_OPERAND (arg, 0),
3476 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3477
3478 case NON_LVALUE_EXPR:
3479 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3480 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3481
3482 CASE_CONVERT:
3483 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3484 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3485
3486 /* ... fall through ... */
3487
3488 case FLOAT_EXPR:
3489 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3490 return build1_loc (loc, TREE_CODE (arg), type,
3491 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3492
3493 case BIT_AND_EXPR:
3494 if (!integer_onep (TREE_OPERAND (arg, 1)))
3495 return NULL_TREE;
3496 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3497
3498 case SAVE_EXPR:
3499 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3500
3501 case CLEANUP_POINT_EXPR:
3502 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3503 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3504 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3505
3506 default:
3507 return NULL_TREE;
3508 }
3509 }
3510
3511 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3512 assume that ARG is an operation that returns a truth value (0 or 1
3513 for scalars, 0 or -1 for vectors). Return the folded expression if
3514 folding is successful. Otherwise, return NULL_TREE. */
3515
3516 static tree
3517 fold_invert_truthvalue (location_t loc, tree arg)
3518 {
3519 tree type = TREE_TYPE (arg);
3520 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3521 ? BIT_NOT_EXPR
3522 : TRUTH_NOT_EXPR,
3523 type, arg);
3524 }
3525
3526 /* Return a simplified tree node for the truth-negation of ARG. This
3527 never alters ARG itself. We assume that ARG is an operation that
3528 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3529
3530 tree
3531 invert_truthvalue_loc (location_t loc, tree arg)
3532 {
3533 if (TREE_CODE (arg) == ERROR_MARK)
3534 return arg;
3535
3536 tree type = TREE_TYPE (arg);
3537 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3538 ? BIT_NOT_EXPR
3539 : TRUTH_NOT_EXPR,
3540 type, arg);
3541 }
3542
3543 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3544 operands are another bit-wise operation with a common input. If so,
3545 distribute the bit operations to save an operation and possibly two if
3546 constants are involved. For example, convert
3547 (A | B) & (A | C) into A | (B & C)
3548 Further simplification will occur if B and C are constants.
3549
3550 If this optimization cannot be done, 0 will be returned. */
3551
3552 static tree
3553 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3554 tree arg0, tree arg1)
3555 {
3556 tree common;
3557 tree left, right;
3558
3559 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3560 || TREE_CODE (arg0) == code
3561 || (TREE_CODE (arg0) != BIT_AND_EXPR
3562 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3563 return 0;
3564
3565 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3566 {
3567 common = TREE_OPERAND (arg0, 0);
3568 left = TREE_OPERAND (arg0, 1);
3569 right = TREE_OPERAND (arg1, 1);
3570 }
3571 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3572 {
3573 common = TREE_OPERAND (arg0, 0);
3574 left = TREE_OPERAND (arg0, 1);
3575 right = TREE_OPERAND (arg1, 0);
3576 }
3577 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3578 {
3579 common = TREE_OPERAND (arg0, 1);
3580 left = TREE_OPERAND (arg0, 0);
3581 right = TREE_OPERAND (arg1, 1);
3582 }
3583 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3584 {
3585 common = TREE_OPERAND (arg0, 1);
3586 left = TREE_OPERAND (arg0, 0);
3587 right = TREE_OPERAND (arg1, 0);
3588 }
3589 else
3590 return 0;
3591
3592 common = fold_convert_loc (loc, type, common);
3593 left = fold_convert_loc (loc, type, left);
3594 right = fold_convert_loc (loc, type, right);
3595 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3596 fold_build2_loc (loc, code, type, left, right));
3597 }
3598
3599 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3600 with code CODE. This optimization is unsafe. */
3601 static tree
3602 distribute_real_division (location_t loc, enum tree_code code, tree type,
3603 tree arg0, tree arg1)
3604 {
3605 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3606 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3607
3608 /* (A / C) +- (B / C) -> (A +- B) / C. */
3609 if (mul0 == mul1
3610 && operand_equal_p (TREE_OPERAND (arg0, 1),
3611 TREE_OPERAND (arg1, 1), 0))
3612 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3613 fold_build2_loc (loc, code, type,
3614 TREE_OPERAND (arg0, 0),
3615 TREE_OPERAND (arg1, 0)),
3616 TREE_OPERAND (arg0, 1));
3617
3618 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3619 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3620 TREE_OPERAND (arg1, 0), 0)
3621 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3622 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3623 {
3624 REAL_VALUE_TYPE r0, r1;
3625 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3626 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3627 if (!mul0)
3628 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3629 if (!mul1)
3630 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3631 real_arithmetic (&r0, code, &r0, &r1);
3632 return fold_build2_loc (loc, MULT_EXPR, type,
3633 TREE_OPERAND (arg0, 0),
3634 build_real (type, r0));
3635 }
3636
3637 return NULL_TREE;
3638 }
3639 \f
3640 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3641 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3642
3643 static tree
3644 make_bit_field_ref (location_t loc, tree inner, tree type,
3645 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3646 {
3647 tree result, bftype;
3648
3649 if (bitpos == 0)
3650 {
3651 tree size = TYPE_SIZE (TREE_TYPE (inner));
3652 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3653 || POINTER_TYPE_P (TREE_TYPE (inner)))
3654 && tree_fits_shwi_p (size)
3655 && tree_to_shwi (size) == bitsize)
3656 return fold_convert_loc (loc, type, inner);
3657 }
3658
3659 bftype = type;
3660 if (TYPE_PRECISION (bftype) != bitsize
3661 || TYPE_UNSIGNED (bftype) == !unsignedp)
3662 bftype = build_nonstandard_integer_type (bitsize, 0);
3663
3664 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3665 size_int (bitsize), bitsize_int (bitpos));
3666
3667 if (bftype != type)
3668 result = fold_convert_loc (loc, type, result);
3669
3670 return result;
3671 }
3672
3673 /* Optimize a bit-field compare.
3674
3675 There are two cases: First is a compare against a constant and the
3676 second is a comparison of two items where the fields are at the same
3677 bit position relative to the start of a chunk (byte, halfword, word)
3678 large enough to contain it. In these cases we can avoid the shift
3679 implicit in bitfield extractions.
3680
3681 For constants, we emit a compare of the shifted constant with the
3682 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3683 compared. For two fields at the same position, we do the ANDs with the
3684 similar mask and compare the result of the ANDs.
3685
3686 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3687 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3688 are the left and right operands of the comparison, respectively.
3689
3690 If the optimization described above can be done, we return the resulting
3691 tree. Otherwise we return zero. */
3692
3693 static tree
3694 optimize_bit_field_compare (location_t loc, enum tree_code code,
3695 tree compare_type, tree lhs, tree rhs)
3696 {
3697 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3698 tree type = TREE_TYPE (lhs);
3699 tree unsigned_type;
3700 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3701 machine_mode lmode, rmode, nmode;
3702 int lunsignedp, runsignedp;
3703 int lvolatilep = 0, rvolatilep = 0;
3704 tree linner, rinner = NULL_TREE;
3705 tree mask;
3706 tree offset;
3707
3708 /* Get all the information about the extractions being done. If the bit size
3709 if the same as the size of the underlying object, we aren't doing an
3710 extraction at all and so can do nothing. We also don't want to
3711 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3712 then will no longer be able to replace it. */
3713 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3714 &lunsignedp, &lvolatilep, false);
3715 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3716 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3717 return 0;
3718
3719 if (!const_p)
3720 {
3721 /* If this is not a constant, we can only do something if bit positions,
3722 sizes, and signedness are the same. */
3723 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3724 &runsignedp, &rvolatilep, false);
3725
3726 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3727 || lunsignedp != runsignedp || offset != 0
3728 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3729 return 0;
3730 }
3731
3732 /* See if we can find a mode to refer to this field. We should be able to,
3733 but fail if we can't. */
3734 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3735 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3736 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3737 TYPE_ALIGN (TREE_TYPE (rinner))),
3738 word_mode, false);
3739 if (nmode == VOIDmode)
3740 return 0;
3741
3742 /* Set signed and unsigned types of the precision of this mode for the
3743 shifts below. */
3744 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3745
3746 /* Compute the bit position and size for the new reference and our offset
3747 within it. If the new reference is the same size as the original, we
3748 won't optimize anything, so return zero. */
3749 nbitsize = GET_MODE_BITSIZE (nmode);
3750 nbitpos = lbitpos & ~ (nbitsize - 1);
3751 lbitpos -= nbitpos;
3752 if (nbitsize == lbitsize)
3753 return 0;
3754
3755 if (BYTES_BIG_ENDIAN)
3756 lbitpos = nbitsize - lbitsize - lbitpos;
3757
3758 /* Make the mask to be used against the extracted field. */
3759 mask = build_int_cst_type (unsigned_type, -1);
3760 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3761 mask = const_binop (RSHIFT_EXPR, mask,
3762 size_int (nbitsize - lbitsize - lbitpos));
3763
3764 if (! const_p)
3765 /* If not comparing with constant, just rework the comparison
3766 and return. */
3767 return fold_build2_loc (loc, code, compare_type,
3768 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3769 make_bit_field_ref (loc, linner,
3770 unsigned_type,
3771 nbitsize, nbitpos,
3772 1),
3773 mask),
3774 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3775 make_bit_field_ref (loc, rinner,
3776 unsigned_type,
3777 nbitsize, nbitpos,
3778 1),
3779 mask));
3780
3781 /* Otherwise, we are handling the constant case. See if the constant is too
3782 big for the field. Warn and return a tree of for 0 (false) if so. We do
3783 this not only for its own sake, but to avoid having to test for this
3784 error case below. If we didn't, we might generate wrong code.
3785
3786 For unsigned fields, the constant shifted right by the field length should
3787 be all zero. For signed fields, the high-order bits should agree with
3788 the sign bit. */
3789
3790 if (lunsignedp)
3791 {
3792 if (wi::lrshift (rhs, lbitsize) != 0)
3793 {
3794 warning (0, "comparison is always %d due to width of bit-field",
3795 code == NE_EXPR);
3796 return constant_boolean_node (code == NE_EXPR, compare_type);
3797 }
3798 }
3799 else
3800 {
3801 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3802 if (tem != 0 && tem != -1)
3803 {
3804 warning (0, "comparison is always %d due to width of bit-field",
3805 code == NE_EXPR);
3806 return constant_boolean_node (code == NE_EXPR, compare_type);
3807 }
3808 }
3809
3810 /* Single-bit compares should always be against zero. */
3811 if (lbitsize == 1 && ! integer_zerop (rhs))
3812 {
3813 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3814 rhs = build_int_cst (type, 0);
3815 }
3816
3817 /* Make a new bitfield reference, shift the constant over the
3818 appropriate number of bits and mask it with the computed mask
3819 (in case this was a signed field). If we changed it, make a new one. */
3820 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3821
3822 rhs = const_binop (BIT_AND_EXPR,
3823 const_binop (LSHIFT_EXPR,
3824 fold_convert_loc (loc, unsigned_type, rhs),
3825 size_int (lbitpos)),
3826 mask);
3827
3828 lhs = build2_loc (loc, code, compare_type,
3829 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3830 return lhs;
3831 }
3832 \f
3833 /* Subroutine for fold_truth_andor_1: decode a field reference.
3834
3835 If EXP is a comparison reference, we return the innermost reference.
3836
3837 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3838 set to the starting bit number.
3839
3840 If the innermost field can be completely contained in a mode-sized
3841 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3842
3843 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3844 otherwise it is not changed.
3845
3846 *PUNSIGNEDP is set to the signedness of the field.
3847
3848 *PMASK is set to the mask used. This is either contained in a
3849 BIT_AND_EXPR or derived from the width of the field.
3850
3851 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3852
3853 Return 0 if this is not a component reference or is one that we can't
3854 do anything with. */
3855
3856 static tree
3857 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3858 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3859 int *punsignedp, int *pvolatilep,
3860 tree *pmask, tree *pand_mask)
3861 {
3862 tree outer_type = 0;
3863 tree and_mask = 0;
3864 tree mask, inner, offset;
3865 tree unsigned_type;
3866 unsigned int precision;
3867
3868 /* All the optimizations using this function assume integer fields.
3869 There are problems with FP fields since the type_for_size call
3870 below can fail for, e.g., XFmode. */
3871 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3872 return 0;
3873
3874 /* We are interested in the bare arrangement of bits, so strip everything
3875 that doesn't affect the machine mode. However, record the type of the
3876 outermost expression if it may matter below. */
3877 if (CONVERT_EXPR_P (exp)
3878 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3879 outer_type = TREE_TYPE (exp);
3880 STRIP_NOPS (exp);
3881
3882 if (TREE_CODE (exp) == BIT_AND_EXPR)
3883 {
3884 and_mask = TREE_OPERAND (exp, 1);
3885 exp = TREE_OPERAND (exp, 0);
3886 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3887 if (TREE_CODE (and_mask) != INTEGER_CST)
3888 return 0;
3889 }
3890
3891 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3892 punsignedp, pvolatilep, false);
3893 if ((inner == exp && and_mask == 0)
3894 || *pbitsize < 0 || offset != 0
3895 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3896 return 0;
3897
3898 /* If the number of bits in the reference is the same as the bitsize of
3899 the outer type, then the outer type gives the signedness. Otherwise
3900 (in case of a small bitfield) the signedness is unchanged. */
3901 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3902 *punsignedp = TYPE_UNSIGNED (outer_type);
3903
3904 /* Compute the mask to access the bitfield. */
3905 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3906 precision = TYPE_PRECISION (unsigned_type);
3907
3908 mask = build_int_cst_type (unsigned_type, -1);
3909
3910 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3911 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3912
3913 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3914 if (and_mask != 0)
3915 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3916 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3917
3918 *pmask = mask;
3919 *pand_mask = and_mask;
3920 return inner;
3921 }
3922
3923 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3924 bit positions and MASK is SIGNED. */
3925
3926 static int
3927 all_ones_mask_p (const_tree mask, unsigned int size)
3928 {
3929 tree type = TREE_TYPE (mask);
3930 unsigned int precision = TYPE_PRECISION (type);
3931
3932 /* If this function returns true when the type of the mask is
3933 UNSIGNED, then there will be errors. In particular see
3934 gcc.c-torture/execute/990326-1.c. There does not appear to be
3935 any documentation paper trail as to why this is so. But the pre
3936 wide-int worked with that restriction and it has been preserved
3937 here. */
3938 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3939 return false;
3940
3941 return wi::mask (size, false, precision) == mask;
3942 }
3943
3944 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3945 represents the sign bit of EXP's type. If EXP represents a sign
3946 or zero extension, also test VAL against the unextended type.
3947 The return value is the (sub)expression whose sign bit is VAL,
3948 or NULL_TREE otherwise. */
3949
3950 tree
3951 sign_bit_p (tree exp, const_tree val)
3952 {
3953 int width;
3954 tree t;
3955
3956 /* Tree EXP must have an integral type. */
3957 t = TREE_TYPE (exp);
3958 if (! INTEGRAL_TYPE_P (t))
3959 return NULL_TREE;
3960
3961 /* Tree VAL must be an integer constant. */
3962 if (TREE_CODE (val) != INTEGER_CST
3963 || TREE_OVERFLOW (val))
3964 return NULL_TREE;
3965
3966 width = TYPE_PRECISION (t);
3967 if (wi::only_sign_bit_p (val, width))
3968 return exp;
3969
3970 /* Handle extension from a narrower type. */
3971 if (TREE_CODE (exp) == NOP_EXPR
3972 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3973 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3974
3975 return NULL_TREE;
3976 }
3977
3978 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3979 to be evaluated unconditionally. */
3980
3981 static int
3982 simple_operand_p (const_tree exp)
3983 {
3984 /* Strip any conversions that don't change the machine mode. */
3985 STRIP_NOPS (exp);
3986
3987 return (CONSTANT_CLASS_P (exp)
3988 || TREE_CODE (exp) == SSA_NAME
3989 || (DECL_P (exp)
3990 && ! TREE_ADDRESSABLE (exp)
3991 && ! TREE_THIS_VOLATILE (exp)
3992 && ! DECL_NONLOCAL (exp)
3993 /* Don't regard global variables as simple. They may be
3994 allocated in ways unknown to the compiler (shared memory,
3995 #pragma weak, etc). */
3996 && ! TREE_PUBLIC (exp)
3997 && ! DECL_EXTERNAL (exp)
3998 /* Weakrefs are not safe to be read, since they can be NULL.
3999 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4000 have DECL_WEAK flag set. */
4001 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4002 /* Loading a static variable is unduly expensive, but global
4003 registers aren't expensive. */
4004 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4005 }
4006
4007 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4008 to be evaluated unconditionally.
4009 I addition to simple_operand_p, we assume that comparisons, conversions,
4010 and logic-not operations are simple, if their operands are simple, too. */
4011
4012 static bool
4013 simple_operand_p_2 (tree exp)
4014 {
4015 enum tree_code code;
4016
4017 if (TREE_SIDE_EFFECTS (exp)
4018 || tree_could_trap_p (exp))
4019 return false;
4020
4021 while (CONVERT_EXPR_P (exp))
4022 exp = TREE_OPERAND (exp, 0);
4023
4024 code = TREE_CODE (exp);
4025
4026 if (TREE_CODE_CLASS (code) == tcc_comparison)
4027 return (simple_operand_p (TREE_OPERAND (exp, 0))
4028 && simple_operand_p (TREE_OPERAND (exp, 1)));
4029
4030 if (code == TRUTH_NOT_EXPR)
4031 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4032
4033 return simple_operand_p (exp);
4034 }
4035
4036 \f
4037 /* The following functions are subroutines to fold_range_test and allow it to
4038 try to change a logical combination of comparisons into a range test.
4039
4040 For example, both
4041 X == 2 || X == 3 || X == 4 || X == 5
4042 and
4043 X >= 2 && X <= 5
4044 are converted to
4045 (unsigned) (X - 2) <= 3
4046
4047 We describe each set of comparisons as being either inside or outside
4048 a range, using a variable named like IN_P, and then describe the
4049 range with a lower and upper bound. If one of the bounds is omitted,
4050 it represents either the highest or lowest value of the type.
4051
4052 In the comments below, we represent a range by two numbers in brackets
4053 preceded by a "+" to designate being inside that range, or a "-" to
4054 designate being outside that range, so the condition can be inverted by
4055 flipping the prefix. An omitted bound is represented by a "-". For
4056 example, "- [-, 10]" means being outside the range starting at the lowest
4057 possible value and ending at 10, in other words, being greater than 10.
4058 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4059 always false.
4060
4061 We set up things so that the missing bounds are handled in a consistent
4062 manner so neither a missing bound nor "true" and "false" need to be
4063 handled using a special case. */
4064
4065 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4066 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4067 and UPPER1_P are nonzero if the respective argument is an upper bound
4068 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4069 must be specified for a comparison. ARG1 will be converted to ARG0's
4070 type if both are specified. */
4071
4072 static tree
4073 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4074 tree arg1, int upper1_p)
4075 {
4076 tree tem;
4077 int result;
4078 int sgn0, sgn1;
4079
4080 /* If neither arg represents infinity, do the normal operation.
4081 Else, if not a comparison, return infinity. Else handle the special
4082 comparison rules. Note that most of the cases below won't occur, but
4083 are handled for consistency. */
4084
4085 if (arg0 != 0 && arg1 != 0)
4086 {
4087 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4088 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4089 STRIP_NOPS (tem);
4090 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4091 }
4092
4093 if (TREE_CODE_CLASS (code) != tcc_comparison)
4094 return 0;
4095
4096 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4097 for neither. In real maths, we cannot assume open ended ranges are
4098 the same. But, this is computer arithmetic, where numbers are finite.
4099 We can therefore make the transformation of any unbounded range with
4100 the value Z, Z being greater than any representable number. This permits
4101 us to treat unbounded ranges as equal. */
4102 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4103 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4104 switch (code)
4105 {
4106 case EQ_EXPR:
4107 result = sgn0 == sgn1;
4108 break;
4109 case NE_EXPR:
4110 result = sgn0 != sgn1;
4111 break;
4112 case LT_EXPR:
4113 result = sgn0 < sgn1;
4114 break;
4115 case LE_EXPR:
4116 result = sgn0 <= sgn1;
4117 break;
4118 case GT_EXPR:
4119 result = sgn0 > sgn1;
4120 break;
4121 case GE_EXPR:
4122 result = sgn0 >= sgn1;
4123 break;
4124 default:
4125 gcc_unreachable ();
4126 }
4127
4128 return constant_boolean_node (result, type);
4129 }
4130 \f
4131 /* Helper routine for make_range. Perform one step for it, return
4132 new expression if the loop should continue or NULL_TREE if it should
4133 stop. */
4134
4135 tree
4136 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4137 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4138 bool *strict_overflow_p)
4139 {
4140 tree arg0_type = TREE_TYPE (arg0);
4141 tree n_low, n_high, low = *p_low, high = *p_high;
4142 int in_p = *p_in_p, n_in_p;
4143
4144 switch (code)
4145 {
4146 case TRUTH_NOT_EXPR:
4147 /* We can only do something if the range is testing for zero. */
4148 if (low == NULL_TREE || high == NULL_TREE
4149 || ! integer_zerop (low) || ! integer_zerop (high))
4150 return NULL_TREE;
4151 *p_in_p = ! in_p;
4152 return arg0;
4153
4154 case EQ_EXPR: case NE_EXPR:
4155 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4156 /* We can only do something if the range is testing for zero
4157 and if the second operand is an integer constant. Note that
4158 saying something is "in" the range we make is done by
4159 complementing IN_P since it will set in the initial case of
4160 being not equal to zero; "out" is leaving it alone. */
4161 if (low == NULL_TREE || high == NULL_TREE
4162 || ! integer_zerop (low) || ! integer_zerop (high)
4163 || TREE_CODE (arg1) != INTEGER_CST)
4164 return NULL_TREE;
4165
4166 switch (code)
4167 {
4168 case NE_EXPR: /* - [c, c] */
4169 low = high = arg1;
4170 break;
4171 case EQ_EXPR: /* + [c, c] */
4172 in_p = ! in_p, low = high = arg1;
4173 break;
4174 case GT_EXPR: /* - [-, c] */
4175 low = 0, high = arg1;
4176 break;
4177 case GE_EXPR: /* + [c, -] */
4178 in_p = ! in_p, low = arg1, high = 0;
4179 break;
4180 case LT_EXPR: /* - [c, -] */
4181 low = arg1, high = 0;
4182 break;
4183 case LE_EXPR: /* + [-, c] */
4184 in_p = ! in_p, low = 0, high = arg1;
4185 break;
4186 default:
4187 gcc_unreachable ();
4188 }
4189
4190 /* If this is an unsigned comparison, we also know that EXP is
4191 greater than or equal to zero. We base the range tests we make
4192 on that fact, so we record it here so we can parse existing
4193 range tests. We test arg0_type since often the return type
4194 of, e.g. EQ_EXPR, is boolean. */
4195 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4196 {
4197 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4198 in_p, low, high, 1,
4199 build_int_cst (arg0_type, 0),
4200 NULL_TREE))
4201 return NULL_TREE;
4202
4203 in_p = n_in_p, low = n_low, high = n_high;
4204
4205 /* If the high bound is missing, but we have a nonzero low
4206 bound, reverse the range so it goes from zero to the low bound
4207 minus 1. */
4208 if (high == 0 && low && ! integer_zerop (low))
4209 {
4210 in_p = ! in_p;
4211 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4212 build_int_cst (TREE_TYPE (low), 1), 0);
4213 low = build_int_cst (arg0_type, 0);
4214 }
4215 }
4216
4217 *p_low = low;
4218 *p_high = high;
4219 *p_in_p = in_p;
4220 return arg0;
4221
4222 case NEGATE_EXPR:
4223 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4224 low and high are non-NULL, then normalize will DTRT. */
4225 if (!TYPE_UNSIGNED (arg0_type)
4226 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4227 {
4228 if (low == NULL_TREE)
4229 low = TYPE_MIN_VALUE (arg0_type);
4230 if (high == NULL_TREE)
4231 high = TYPE_MAX_VALUE (arg0_type);
4232 }
4233
4234 /* (-x) IN [a,b] -> x in [-b, -a] */
4235 n_low = range_binop (MINUS_EXPR, exp_type,
4236 build_int_cst (exp_type, 0),
4237 0, high, 1);
4238 n_high = range_binop (MINUS_EXPR, exp_type,
4239 build_int_cst (exp_type, 0),
4240 0, low, 0);
4241 if (n_high != 0 && TREE_OVERFLOW (n_high))
4242 return NULL_TREE;
4243 goto normalize;
4244
4245 case BIT_NOT_EXPR:
4246 /* ~ X -> -X - 1 */
4247 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4248 build_int_cst (exp_type, 1));
4249
4250 case PLUS_EXPR:
4251 case MINUS_EXPR:
4252 if (TREE_CODE (arg1) != INTEGER_CST)
4253 return NULL_TREE;
4254
4255 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4256 move a constant to the other side. */
4257 if (!TYPE_UNSIGNED (arg0_type)
4258 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4259 return NULL_TREE;
4260
4261 /* If EXP is signed, any overflow in the computation is undefined,
4262 so we don't worry about it so long as our computations on
4263 the bounds don't overflow. For unsigned, overflow is defined
4264 and this is exactly the right thing. */
4265 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4266 arg0_type, low, 0, arg1, 0);
4267 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4268 arg0_type, high, 1, arg1, 0);
4269 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4270 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4271 return NULL_TREE;
4272
4273 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4274 *strict_overflow_p = true;
4275
4276 normalize:
4277 /* Check for an unsigned range which has wrapped around the maximum
4278 value thus making n_high < n_low, and normalize it. */
4279 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4280 {
4281 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4282 build_int_cst (TREE_TYPE (n_high), 1), 0);
4283 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4284 build_int_cst (TREE_TYPE (n_low), 1), 0);
4285
4286 /* If the range is of the form +/- [ x+1, x ], we won't
4287 be able to normalize it. But then, it represents the
4288 whole range or the empty set, so make it
4289 +/- [ -, - ]. */
4290 if (tree_int_cst_equal (n_low, low)
4291 && tree_int_cst_equal (n_high, high))
4292 low = high = 0;
4293 else
4294 in_p = ! in_p;
4295 }
4296 else
4297 low = n_low, high = n_high;
4298
4299 *p_low = low;
4300 *p_high = high;
4301 *p_in_p = in_p;
4302 return arg0;
4303
4304 CASE_CONVERT:
4305 case NON_LVALUE_EXPR:
4306 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4307 return NULL_TREE;
4308
4309 if (! INTEGRAL_TYPE_P (arg0_type)
4310 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4311 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4312 return NULL_TREE;
4313
4314 n_low = low, n_high = high;
4315
4316 if (n_low != 0)
4317 n_low = fold_convert_loc (loc, arg0_type, n_low);
4318
4319 if (n_high != 0)
4320 n_high = fold_convert_loc (loc, arg0_type, n_high);
4321
4322 /* If we're converting arg0 from an unsigned type, to exp,
4323 a signed type, we will be doing the comparison as unsigned.
4324 The tests above have already verified that LOW and HIGH
4325 are both positive.
4326
4327 So we have to ensure that we will handle large unsigned
4328 values the same way that the current signed bounds treat
4329 negative values. */
4330
4331 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4332 {
4333 tree high_positive;
4334 tree equiv_type;
4335 /* For fixed-point modes, we need to pass the saturating flag
4336 as the 2nd parameter. */
4337 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4338 equiv_type
4339 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4340 TYPE_SATURATING (arg0_type));
4341 else
4342 equiv_type
4343 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4344
4345 /* A range without an upper bound is, naturally, unbounded.
4346 Since convert would have cropped a very large value, use
4347 the max value for the destination type. */
4348 high_positive
4349 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4350 : TYPE_MAX_VALUE (arg0_type);
4351
4352 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4353 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4354 fold_convert_loc (loc, arg0_type,
4355 high_positive),
4356 build_int_cst (arg0_type, 1));
4357
4358 /* If the low bound is specified, "and" the range with the
4359 range for which the original unsigned value will be
4360 positive. */
4361 if (low != 0)
4362 {
4363 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4364 1, fold_convert_loc (loc, arg0_type,
4365 integer_zero_node),
4366 high_positive))
4367 return NULL_TREE;
4368
4369 in_p = (n_in_p == in_p);
4370 }
4371 else
4372 {
4373 /* Otherwise, "or" the range with the range of the input
4374 that will be interpreted as negative. */
4375 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4376 1, fold_convert_loc (loc, arg0_type,
4377 integer_zero_node),
4378 high_positive))
4379 return NULL_TREE;
4380
4381 in_p = (in_p != n_in_p);
4382 }
4383 }
4384
4385 *p_low = n_low;
4386 *p_high = n_high;
4387 *p_in_p = in_p;
4388 return arg0;
4389
4390 default:
4391 return NULL_TREE;
4392 }
4393 }
4394
4395 /* Given EXP, a logical expression, set the range it is testing into
4396 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4397 actually being tested. *PLOW and *PHIGH will be made of the same
4398 type as the returned expression. If EXP is not a comparison, we
4399 will most likely not be returning a useful value and range. Set
4400 *STRICT_OVERFLOW_P to true if the return value is only valid
4401 because signed overflow is undefined; otherwise, do not change
4402 *STRICT_OVERFLOW_P. */
4403
4404 tree
4405 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4406 bool *strict_overflow_p)
4407 {
4408 enum tree_code code;
4409 tree arg0, arg1 = NULL_TREE;
4410 tree exp_type, nexp;
4411 int in_p;
4412 tree low, high;
4413 location_t loc = EXPR_LOCATION (exp);
4414
4415 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4416 and see if we can refine the range. Some of the cases below may not
4417 happen, but it doesn't seem worth worrying about this. We "continue"
4418 the outer loop when we've changed something; otherwise we "break"
4419 the switch, which will "break" the while. */
4420
4421 in_p = 0;
4422 low = high = build_int_cst (TREE_TYPE (exp), 0);
4423
4424 while (1)
4425 {
4426 code = TREE_CODE (exp);
4427 exp_type = TREE_TYPE (exp);
4428 arg0 = NULL_TREE;
4429
4430 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4431 {
4432 if (TREE_OPERAND_LENGTH (exp) > 0)
4433 arg0 = TREE_OPERAND (exp, 0);
4434 if (TREE_CODE_CLASS (code) == tcc_binary
4435 || TREE_CODE_CLASS (code) == tcc_comparison
4436 || (TREE_CODE_CLASS (code) == tcc_expression
4437 && TREE_OPERAND_LENGTH (exp) > 1))
4438 arg1 = TREE_OPERAND (exp, 1);
4439 }
4440 if (arg0 == NULL_TREE)
4441 break;
4442
4443 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4444 &high, &in_p, strict_overflow_p);
4445 if (nexp == NULL_TREE)
4446 break;
4447 exp = nexp;
4448 }
4449
4450 /* If EXP is a constant, we can evaluate whether this is true or false. */
4451 if (TREE_CODE (exp) == INTEGER_CST)
4452 {
4453 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4454 exp, 0, low, 0))
4455 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4456 exp, 1, high, 1)));
4457 low = high = 0;
4458 exp = 0;
4459 }
4460
4461 *pin_p = in_p, *plow = low, *phigh = high;
4462 return exp;
4463 }
4464 \f
4465 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4466 type, TYPE, return an expression to test if EXP is in (or out of, depending
4467 on IN_P) the range. Return 0 if the test couldn't be created. */
4468
4469 tree
4470 build_range_check (location_t loc, tree type, tree exp, int in_p,
4471 tree low, tree high)
4472 {
4473 tree etype = TREE_TYPE (exp), value;
4474
4475 #ifdef HAVE_canonicalize_funcptr_for_compare
4476 /* Disable this optimization for function pointer expressions
4477 on targets that require function pointer canonicalization. */
4478 if (HAVE_canonicalize_funcptr_for_compare
4479 && TREE_CODE (etype) == POINTER_TYPE
4480 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4481 return NULL_TREE;
4482 #endif
4483
4484 if (! in_p)
4485 {
4486 value = build_range_check (loc, type, exp, 1, low, high);
4487 if (value != 0)
4488 return invert_truthvalue_loc (loc, value);
4489
4490 return 0;
4491 }
4492
4493 if (low == 0 && high == 0)
4494 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4495
4496 if (low == 0)
4497 return fold_build2_loc (loc, LE_EXPR, type, exp,
4498 fold_convert_loc (loc, etype, high));
4499
4500 if (high == 0)
4501 return fold_build2_loc (loc, GE_EXPR, type, exp,
4502 fold_convert_loc (loc, etype, low));
4503
4504 if (operand_equal_p (low, high, 0))
4505 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4506 fold_convert_loc (loc, etype, low));
4507
4508 if (integer_zerop (low))
4509 {
4510 if (! TYPE_UNSIGNED (etype))
4511 {
4512 etype = unsigned_type_for (etype);
4513 high = fold_convert_loc (loc, etype, high);
4514 exp = fold_convert_loc (loc, etype, exp);
4515 }
4516 return build_range_check (loc, type, exp, 1, 0, high);
4517 }
4518
4519 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4520 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4521 {
4522 int prec = TYPE_PRECISION (etype);
4523
4524 if (wi::mask (prec - 1, false, prec) == high)
4525 {
4526 if (TYPE_UNSIGNED (etype))
4527 {
4528 tree signed_etype = signed_type_for (etype);
4529 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4530 etype
4531 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4532 else
4533 etype = signed_etype;
4534 exp = fold_convert_loc (loc, etype, exp);
4535 }
4536 return fold_build2_loc (loc, GT_EXPR, type, exp,
4537 build_int_cst (etype, 0));
4538 }
4539 }
4540
4541 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4542 This requires wrap-around arithmetics for the type of the expression.
4543 First make sure that arithmetics in this type is valid, then make sure
4544 that it wraps around. */
4545 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4546 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4547 TYPE_UNSIGNED (etype));
4548
4549 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4550 {
4551 tree utype, minv, maxv;
4552
4553 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4554 for the type in question, as we rely on this here. */
4555 utype = unsigned_type_for (etype);
4556 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4557 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4558 build_int_cst (TREE_TYPE (maxv), 1), 1);
4559 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4560
4561 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4562 minv, 1, maxv, 1)))
4563 etype = utype;
4564 else
4565 return 0;
4566 }
4567
4568 high = fold_convert_loc (loc, etype, high);
4569 low = fold_convert_loc (loc, etype, low);
4570 exp = fold_convert_loc (loc, etype, exp);
4571
4572 value = const_binop (MINUS_EXPR, high, low);
4573
4574
4575 if (POINTER_TYPE_P (etype))
4576 {
4577 if (value != 0 && !TREE_OVERFLOW (value))
4578 {
4579 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4580 return build_range_check (loc, type,
4581 fold_build_pointer_plus_loc (loc, exp, low),
4582 1, build_int_cst (etype, 0), value);
4583 }
4584 return 0;
4585 }
4586
4587 if (value != 0 && !TREE_OVERFLOW (value))
4588 return build_range_check (loc, type,
4589 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4590 1, build_int_cst (etype, 0), value);
4591
4592 return 0;
4593 }
4594 \f
4595 /* Return the predecessor of VAL in its type, handling the infinite case. */
4596
4597 static tree
4598 range_predecessor (tree val)
4599 {
4600 tree type = TREE_TYPE (val);
4601
4602 if (INTEGRAL_TYPE_P (type)
4603 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4604 return 0;
4605 else
4606 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4607 build_int_cst (TREE_TYPE (val), 1), 0);
4608 }
4609
4610 /* Return the successor of VAL in its type, handling the infinite case. */
4611
4612 static tree
4613 range_successor (tree val)
4614 {
4615 tree type = TREE_TYPE (val);
4616
4617 if (INTEGRAL_TYPE_P (type)
4618 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4619 return 0;
4620 else
4621 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4622 build_int_cst (TREE_TYPE (val), 1), 0);
4623 }
4624
4625 /* Given two ranges, see if we can merge them into one. Return 1 if we
4626 can, 0 if we can't. Set the output range into the specified parameters. */
4627
4628 bool
4629 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4630 tree high0, int in1_p, tree low1, tree high1)
4631 {
4632 int no_overlap;
4633 int subset;
4634 int temp;
4635 tree tem;
4636 int in_p;
4637 tree low, high;
4638 int lowequal = ((low0 == 0 && low1 == 0)
4639 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4640 low0, 0, low1, 0)));
4641 int highequal = ((high0 == 0 && high1 == 0)
4642 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4643 high0, 1, high1, 1)));
4644
4645 /* Make range 0 be the range that starts first, or ends last if they
4646 start at the same value. Swap them if it isn't. */
4647 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4648 low0, 0, low1, 0))
4649 || (lowequal
4650 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4651 high1, 1, high0, 1))))
4652 {
4653 temp = in0_p, in0_p = in1_p, in1_p = temp;
4654 tem = low0, low0 = low1, low1 = tem;
4655 tem = high0, high0 = high1, high1 = tem;
4656 }
4657
4658 /* Now flag two cases, whether the ranges are disjoint or whether the
4659 second range is totally subsumed in the first. Note that the tests
4660 below are simplified by the ones above. */
4661 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4662 high0, 1, low1, 0));
4663 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4664 high1, 1, high0, 1));
4665
4666 /* We now have four cases, depending on whether we are including or
4667 excluding the two ranges. */
4668 if (in0_p && in1_p)
4669 {
4670 /* If they don't overlap, the result is false. If the second range
4671 is a subset it is the result. Otherwise, the range is from the start
4672 of the second to the end of the first. */
4673 if (no_overlap)
4674 in_p = 0, low = high = 0;
4675 else if (subset)
4676 in_p = 1, low = low1, high = high1;
4677 else
4678 in_p = 1, low = low1, high = high0;
4679 }
4680
4681 else if (in0_p && ! in1_p)
4682 {
4683 /* If they don't overlap, the result is the first range. If they are
4684 equal, the result is false. If the second range is a subset of the
4685 first, and the ranges begin at the same place, we go from just after
4686 the end of the second range to the end of the first. If the second
4687 range is not a subset of the first, or if it is a subset and both
4688 ranges end at the same place, the range starts at the start of the
4689 first range and ends just before the second range.
4690 Otherwise, we can't describe this as a single range. */
4691 if (no_overlap)
4692 in_p = 1, low = low0, high = high0;
4693 else if (lowequal && highequal)
4694 in_p = 0, low = high = 0;
4695 else if (subset && lowequal)
4696 {
4697 low = range_successor (high1);
4698 high = high0;
4699 in_p = 1;
4700 if (low == 0)
4701 {
4702 /* We are in the weird situation where high0 > high1 but
4703 high1 has no successor. Punt. */
4704 return 0;
4705 }
4706 }
4707 else if (! subset || highequal)
4708 {
4709 low = low0;
4710 high = range_predecessor (low1);
4711 in_p = 1;
4712 if (high == 0)
4713 {
4714 /* low0 < low1 but low1 has no predecessor. Punt. */
4715 return 0;
4716 }
4717 }
4718 else
4719 return 0;
4720 }
4721
4722 else if (! in0_p && in1_p)
4723 {
4724 /* If they don't overlap, the result is the second range. If the second
4725 is a subset of the first, the result is false. Otherwise,
4726 the range starts just after the first range and ends at the
4727 end of the second. */
4728 if (no_overlap)
4729 in_p = 1, low = low1, high = high1;
4730 else if (subset || highequal)
4731 in_p = 0, low = high = 0;
4732 else
4733 {
4734 low = range_successor (high0);
4735 high = high1;
4736 in_p = 1;
4737 if (low == 0)
4738 {
4739 /* high1 > high0 but high0 has no successor. Punt. */
4740 return 0;
4741 }
4742 }
4743 }
4744
4745 else
4746 {
4747 /* The case where we are excluding both ranges. Here the complex case
4748 is if they don't overlap. In that case, the only time we have a
4749 range is if they are adjacent. If the second is a subset of the
4750 first, the result is the first. Otherwise, the range to exclude
4751 starts at the beginning of the first range and ends at the end of the
4752 second. */
4753 if (no_overlap)
4754 {
4755 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4756 range_successor (high0),
4757 1, low1, 0)))
4758 in_p = 0, low = low0, high = high1;
4759 else
4760 {
4761 /* Canonicalize - [min, x] into - [-, x]. */
4762 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4763 switch (TREE_CODE (TREE_TYPE (low0)))
4764 {
4765 case ENUMERAL_TYPE:
4766 if (TYPE_PRECISION (TREE_TYPE (low0))
4767 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4768 break;
4769 /* FALLTHROUGH */
4770 case INTEGER_TYPE:
4771 if (tree_int_cst_equal (low0,
4772 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4773 low0 = 0;
4774 break;
4775 case POINTER_TYPE:
4776 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4777 && integer_zerop (low0))
4778 low0 = 0;
4779 break;
4780 default:
4781 break;
4782 }
4783
4784 /* Canonicalize - [x, max] into - [x, -]. */
4785 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4786 switch (TREE_CODE (TREE_TYPE (high1)))
4787 {
4788 case ENUMERAL_TYPE:
4789 if (TYPE_PRECISION (TREE_TYPE (high1))
4790 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4791 break;
4792 /* FALLTHROUGH */
4793 case INTEGER_TYPE:
4794 if (tree_int_cst_equal (high1,
4795 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4796 high1 = 0;
4797 break;
4798 case POINTER_TYPE:
4799 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4800 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4801 high1, 1,
4802 build_int_cst (TREE_TYPE (high1), 1),
4803 1)))
4804 high1 = 0;
4805 break;
4806 default:
4807 break;
4808 }
4809
4810 /* The ranges might be also adjacent between the maximum and
4811 minimum values of the given type. For
4812 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4813 return + [x + 1, y - 1]. */
4814 if (low0 == 0 && high1 == 0)
4815 {
4816 low = range_successor (high0);
4817 high = range_predecessor (low1);
4818 if (low == 0 || high == 0)
4819 return 0;
4820
4821 in_p = 1;
4822 }
4823 else
4824 return 0;
4825 }
4826 }
4827 else if (subset)
4828 in_p = 0, low = low0, high = high0;
4829 else
4830 in_p = 0, low = low0, high = high1;
4831 }
4832
4833 *pin_p = in_p, *plow = low, *phigh = high;
4834 return 1;
4835 }
4836 \f
4837
4838 /* Subroutine of fold, looking inside expressions of the form
4839 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4840 of the COND_EXPR. This function is being used also to optimize
4841 A op B ? C : A, by reversing the comparison first.
4842
4843 Return a folded expression whose code is not a COND_EXPR
4844 anymore, or NULL_TREE if no folding opportunity is found. */
4845
4846 static tree
4847 fold_cond_expr_with_comparison (location_t loc, tree type,
4848 tree arg0, tree arg1, tree arg2)
4849 {
4850 enum tree_code comp_code = TREE_CODE (arg0);
4851 tree arg00 = TREE_OPERAND (arg0, 0);
4852 tree arg01 = TREE_OPERAND (arg0, 1);
4853 tree arg1_type = TREE_TYPE (arg1);
4854 tree tem;
4855
4856 STRIP_NOPS (arg1);
4857 STRIP_NOPS (arg2);
4858
4859 /* If we have A op 0 ? A : -A, consider applying the following
4860 transformations:
4861
4862 A == 0? A : -A same as -A
4863 A != 0? A : -A same as A
4864 A >= 0? A : -A same as abs (A)
4865 A > 0? A : -A same as abs (A)
4866 A <= 0? A : -A same as -abs (A)
4867 A < 0? A : -A same as -abs (A)
4868
4869 None of these transformations work for modes with signed
4870 zeros. If A is +/-0, the first two transformations will
4871 change the sign of the result (from +0 to -0, or vice
4872 versa). The last four will fix the sign of the result,
4873 even though the original expressions could be positive or
4874 negative, depending on the sign of A.
4875
4876 Note that all these transformations are correct if A is
4877 NaN, since the two alternatives (A and -A) are also NaNs. */
4878 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4879 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4880 ? real_zerop (arg01)
4881 : integer_zerop (arg01))
4882 && ((TREE_CODE (arg2) == NEGATE_EXPR
4883 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4884 /* In the case that A is of the form X-Y, '-A' (arg2) may
4885 have already been folded to Y-X, check for that. */
4886 || (TREE_CODE (arg1) == MINUS_EXPR
4887 && TREE_CODE (arg2) == MINUS_EXPR
4888 && operand_equal_p (TREE_OPERAND (arg1, 0),
4889 TREE_OPERAND (arg2, 1), 0)
4890 && operand_equal_p (TREE_OPERAND (arg1, 1),
4891 TREE_OPERAND (arg2, 0), 0))))
4892 switch (comp_code)
4893 {
4894 case EQ_EXPR:
4895 case UNEQ_EXPR:
4896 tem = fold_convert_loc (loc, arg1_type, arg1);
4897 return pedantic_non_lvalue_loc (loc,
4898 fold_convert_loc (loc, type,
4899 negate_expr (tem)));
4900 case NE_EXPR:
4901 case LTGT_EXPR:
4902 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4903 case UNGE_EXPR:
4904 case UNGT_EXPR:
4905 if (flag_trapping_math)
4906 break;
4907 /* Fall through. */
4908 case GE_EXPR:
4909 case GT_EXPR:
4910 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4911 arg1 = fold_convert_loc (loc, signed_type_for
4912 (TREE_TYPE (arg1)), arg1);
4913 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4914 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4915 case UNLE_EXPR:
4916 case UNLT_EXPR:
4917 if (flag_trapping_math)
4918 break;
4919 case LE_EXPR:
4920 case LT_EXPR:
4921 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4922 arg1 = fold_convert_loc (loc, signed_type_for
4923 (TREE_TYPE (arg1)), arg1);
4924 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4925 return negate_expr (fold_convert_loc (loc, type, tem));
4926 default:
4927 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4928 break;
4929 }
4930
4931 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4932 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4933 both transformations are correct when A is NaN: A != 0
4934 is then true, and A == 0 is false. */
4935
4936 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4937 && integer_zerop (arg01) && integer_zerop (arg2))
4938 {
4939 if (comp_code == NE_EXPR)
4940 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4941 else if (comp_code == EQ_EXPR)
4942 return build_zero_cst (type);
4943 }
4944
4945 /* Try some transformations of A op B ? A : B.
4946
4947 A == B? A : B same as B
4948 A != B? A : B same as A
4949 A >= B? A : B same as max (A, B)
4950 A > B? A : B same as max (B, A)
4951 A <= B? A : B same as min (A, B)
4952 A < B? A : B same as min (B, A)
4953
4954 As above, these transformations don't work in the presence
4955 of signed zeros. For example, if A and B are zeros of
4956 opposite sign, the first two transformations will change
4957 the sign of the result. In the last four, the original
4958 expressions give different results for (A=+0, B=-0) and
4959 (A=-0, B=+0), but the transformed expressions do not.
4960
4961 The first two transformations are correct if either A or B
4962 is a NaN. In the first transformation, the condition will
4963 be false, and B will indeed be chosen. In the case of the
4964 second transformation, the condition A != B will be true,
4965 and A will be chosen.
4966
4967 The conversions to max() and min() are not correct if B is
4968 a number and A is not. The conditions in the original
4969 expressions will be false, so all four give B. The min()
4970 and max() versions would give a NaN instead. */
4971 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4972 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4973 /* Avoid these transformations if the COND_EXPR may be used
4974 as an lvalue in the C++ front-end. PR c++/19199. */
4975 && (in_gimple_form
4976 || VECTOR_TYPE_P (type)
4977 || (! lang_GNU_CXX ()
4978 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4979 || ! maybe_lvalue_p (arg1)
4980 || ! maybe_lvalue_p (arg2)))
4981 {
4982 tree comp_op0 = arg00;
4983 tree comp_op1 = arg01;
4984 tree comp_type = TREE_TYPE (comp_op0);
4985
4986 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4987 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4988 {
4989 comp_type = type;
4990 comp_op0 = arg1;
4991 comp_op1 = arg2;
4992 }
4993
4994 switch (comp_code)
4995 {
4996 case EQ_EXPR:
4997 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4998 case NE_EXPR:
4999 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5000 case LE_EXPR:
5001 case LT_EXPR:
5002 case UNLE_EXPR:
5003 case UNLT_EXPR:
5004 /* In C++ a ?: expression can be an lvalue, so put the
5005 operand which will be used if they are equal first
5006 so that we can convert this back to the
5007 corresponding COND_EXPR. */
5008 if (!HONOR_NANS (arg1))
5009 {
5010 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5011 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5012 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5013 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5014 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5015 comp_op1, comp_op0);
5016 return pedantic_non_lvalue_loc (loc,
5017 fold_convert_loc (loc, type, tem));
5018 }
5019 break;
5020 case GE_EXPR:
5021 case GT_EXPR:
5022 case UNGE_EXPR:
5023 case UNGT_EXPR:
5024 if (!HONOR_NANS (arg1))
5025 {
5026 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5027 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5028 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5029 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5030 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5031 comp_op1, comp_op0);
5032 return pedantic_non_lvalue_loc (loc,
5033 fold_convert_loc (loc, type, tem));
5034 }
5035 break;
5036 case UNEQ_EXPR:
5037 if (!HONOR_NANS (arg1))
5038 return pedantic_non_lvalue_loc (loc,
5039 fold_convert_loc (loc, type, arg2));
5040 break;
5041 case LTGT_EXPR:
5042 if (!HONOR_NANS (arg1))
5043 return pedantic_non_lvalue_loc (loc,
5044 fold_convert_loc (loc, type, arg1));
5045 break;
5046 default:
5047 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5048 break;
5049 }
5050 }
5051
5052 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5053 we might still be able to simplify this. For example,
5054 if C1 is one less or one more than C2, this might have started
5055 out as a MIN or MAX and been transformed by this function.
5056 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5057
5058 if (INTEGRAL_TYPE_P (type)
5059 && TREE_CODE (arg01) == INTEGER_CST
5060 && TREE_CODE (arg2) == INTEGER_CST)
5061 switch (comp_code)
5062 {
5063 case EQ_EXPR:
5064 if (TREE_CODE (arg1) == INTEGER_CST)
5065 break;
5066 /* We can replace A with C1 in this case. */
5067 arg1 = fold_convert_loc (loc, type, arg01);
5068 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5069
5070 case LT_EXPR:
5071 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5072 MIN_EXPR, to preserve the signedness of the comparison. */
5073 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5074 OEP_ONLY_CONST)
5075 && operand_equal_p (arg01,
5076 const_binop (PLUS_EXPR, arg2,
5077 build_int_cst (type, 1)),
5078 OEP_ONLY_CONST))
5079 {
5080 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5081 fold_convert_loc (loc, TREE_TYPE (arg00),
5082 arg2));
5083 return pedantic_non_lvalue_loc (loc,
5084 fold_convert_loc (loc, type, tem));
5085 }
5086 break;
5087
5088 case LE_EXPR:
5089 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5090 as above. */
5091 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5092 OEP_ONLY_CONST)
5093 && operand_equal_p (arg01,
5094 const_binop (MINUS_EXPR, arg2,
5095 build_int_cst (type, 1)),
5096 OEP_ONLY_CONST))
5097 {
5098 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5099 fold_convert_loc (loc, TREE_TYPE (arg00),
5100 arg2));
5101 return pedantic_non_lvalue_loc (loc,
5102 fold_convert_loc (loc, type, tem));
5103 }
5104 break;
5105
5106 case GT_EXPR:
5107 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5108 MAX_EXPR, to preserve the signedness of the comparison. */
5109 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5110 OEP_ONLY_CONST)
5111 && operand_equal_p (arg01,
5112 const_binop (MINUS_EXPR, arg2,
5113 build_int_cst (type, 1)),
5114 OEP_ONLY_CONST))
5115 {
5116 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5117 fold_convert_loc (loc, TREE_TYPE (arg00),
5118 arg2));
5119 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5120 }
5121 break;
5122
5123 case GE_EXPR:
5124 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5125 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5126 OEP_ONLY_CONST)
5127 && operand_equal_p (arg01,
5128 const_binop (PLUS_EXPR, arg2,
5129 build_int_cst (type, 1)),
5130 OEP_ONLY_CONST))
5131 {
5132 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5133 fold_convert_loc (loc, TREE_TYPE (arg00),
5134 arg2));
5135 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5136 }
5137 break;
5138 case NE_EXPR:
5139 break;
5140 default:
5141 gcc_unreachable ();
5142 }
5143
5144 return NULL_TREE;
5145 }
5146
5147
5148 \f
5149 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5150 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5151 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5152 false) >= 2)
5153 #endif
5154
5155 /* EXP is some logical combination of boolean tests. See if we can
5156 merge it into some range test. Return the new tree if so. */
5157
5158 static tree
5159 fold_range_test (location_t loc, enum tree_code code, tree type,
5160 tree op0, tree op1)
5161 {
5162 int or_op = (code == TRUTH_ORIF_EXPR
5163 || code == TRUTH_OR_EXPR);
5164 int in0_p, in1_p, in_p;
5165 tree low0, low1, low, high0, high1, high;
5166 bool strict_overflow_p = false;
5167 tree tem, lhs, rhs;
5168 const char * const warnmsg = G_("assuming signed overflow does not occur "
5169 "when simplifying range test");
5170
5171 if (!INTEGRAL_TYPE_P (type))
5172 return 0;
5173
5174 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5175 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5176
5177 /* If this is an OR operation, invert both sides; we will invert
5178 again at the end. */
5179 if (or_op)
5180 in0_p = ! in0_p, in1_p = ! in1_p;
5181
5182 /* If both expressions are the same, if we can merge the ranges, and we
5183 can build the range test, return it or it inverted. If one of the
5184 ranges is always true or always false, consider it to be the same
5185 expression as the other. */
5186 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5187 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5188 in1_p, low1, high1)
5189 && 0 != (tem = (build_range_check (loc, type,
5190 lhs != 0 ? lhs
5191 : rhs != 0 ? rhs : integer_zero_node,
5192 in_p, low, high))))
5193 {
5194 if (strict_overflow_p)
5195 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5196 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5197 }
5198
5199 /* On machines where the branch cost is expensive, if this is a
5200 short-circuited branch and the underlying object on both sides
5201 is the same, make a non-short-circuit operation. */
5202 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5203 && lhs != 0 && rhs != 0
5204 && (code == TRUTH_ANDIF_EXPR
5205 || code == TRUTH_ORIF_EXPR)
5206 && operand_equal_p (lhs, rhs, 0))
5207 {
5208 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5209 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5210 which cases we can't do this. */
5211 if (simple_operand_p (lhs))
5212 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5213 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5214 type, op0, op1);
5215
5216 else if (!lang_hooks.decls.global_bindings_p ()
5217 && !CONTAINS_PLACEHOLDER_P (lhs))
5218 {
5219 tree common = save_expr (lhs);
5220
5221 if (0 != (lhs = build_range_check (loc, type, common,
5222 or_op ? ! in0_p : in0_p,
5223 low0, high0))
5224 && (0 != (rhs = build_range_check (loc, type, common,
5225 or_op ? ! in1_p : in1_p,
5226 low1, high1))))
5227 {
5228 if (strict_overflow_p)
5229 fold_overflow_warning (warnmsg,
5230 WARN_STRICT_OVERFLOW_COMPARISON);
5231 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5232 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5233 type, lhs, rhs);
5234 }
5235 }
5236 }
5237
5238 return 0;
5239 }
5240 \f
5241 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5242 bit value. Arrange things so the extra bits will be set to zero if and
5243 only if C is signed-extended to its full width. If MASK is nonzero,
5244 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5245
5246 static tree
5247 unextend (tree c, int p, int unsignedp, tree mask)
5248 {
5249 tree type = TREE_TYPE (c);
5250 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5251 tree temp;
5252
5253 if (p == modesize || unsignedp)
5254 return c;
5255
5256 /* We work by getting just the sign bit into the low-order bit, then
5257 into the high-order bit, then sign-extend. We then XOR that value
5258 with C. */
5259 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5260
5261 /* We must use a signed type in order to get an arithmetic right shift.
5262 However, we must also avoid introducing accidental overflows, so that
5263 a subsequent call to integer_zerop will work. Hence we must
5264 do the type conversion here. At this point, the constant is either
5265 zero or one, and the conversion to a signed type can never overflow.
5266 We could get an overflow if this conversion is done anywhere else. */
5267 if (TYPE_UNSIGNED (type))
5268 temp = fold_convert (signed_type_for (type), temp);
5269
5270 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5271 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5272 if (mask != 0)
5273 temp = const_binop (BIT_AND_EXPR, temp,
5274 fold_convert (TREE_TYPE (c), mask));
5275 /* If necessary, convert the type back to match the type of C. */
5276 if (TYPE_UNSIGNED (type))
5277 temp = fold_convert (type, temp);
5278
5279 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5280 }
5281 \f
5282 /* For an expression that has the form
5283 (A && B) || ~B
5284 or
5285 (A || B) && ~B,
5286 we can drop one of the inner expressions and simplify to
5287 A || ~B
5288 or
5289 A && ~B
5290 LOC is the location of the resulting expression. OP is the inner
5291 logical operation; the left-hand side in the examples above, while CMPOP
5292 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5293 removing a condition that guards another, as in
5294 (A != NULL && A->...) || A == NULL
5295 which we must not transform. If RHS_ONLY is true, only eliminate the
5296 right-most operand of the inner logical operation. */
5297
5298 static tree
5299 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5300 bool rhs_only)
5301 {
5302 tree type = TREE_TYPE (cmpop);
5303 enum tree_code code = TREE_CODE (cmpop);
5304 enum tree_code truthop_code = TREE_CODE (op);
5305 tree lhs = TREE_OPERAND (op, 0);
5306 tree rhs = TREE_OPERAND (op, 1);
5307 tree orig_lhs = lhs, orig_rhs = rhs;
5308 enum tree_code rhs_code = TREE_CODE (rhs);
5309 enum tree_code lhs_code = TREE_CODE (lhs);
5310 enum tree_code inv_code;
5311
5312 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5313 return NULL_TREE;
5314
5315 if (TREE_CODE_CLASS (code) != tcc_comparison)
5316 return NULL_TREE;
5317
5318 if (rhs_code == truthop_code)
5319 {
5320 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5321 if (newrhs != NULL_TREE)
5322 {
5323 rhs = newrhs;
5324 rhs_code = TREE_CODE (rhs);
5325 }
5326 }
5327 if (lhs_code == truthop_code && !rhs_only)
5328 {
5329 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5330 if (newlhs != NULL_TREE)
5331 {
5332 lhs = newlhs;
5333 lhs_code = TREE_CODE (lhs);
5334 }
5335 }
5336
5337 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5338 if (inv_code == rhs_code
5339 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5340 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5341 return lhs;
5342 if (!rhs_only && inv_code == lhs_code
5343 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5344 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5345 return rhs;
5346 if (rhs != orig_rhs || lhs != orig_lhs)
5347 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5348 lhs, rhs);
5349 return NULL_TREE;
5350 }
5351
5352 /* Find ways of folding logical expressions of LHS and RHS:
5353 Try to merge two comparisons to the same innermost item.
5354 Look for range tests like "ch >= '0' && ch <= '9'".
5355 Look for combinations of simple terms on machines with expensive branches
5356 and evaluate the RHS unconditionally.
5357
5358 For example, if we have p->a == 2 && p->b == 4 and we can make an
5359 object large enough to span both A and B, we can do this with a comparison
5360 against the object ANDed with the a mask.
5361
5362 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5363 operations to do this with one comparison.
5364
5365 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5366 function and the one above.
5367
5368 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5369 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5370
5371 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5372 two operands.
5373
5374 We return the simplified tree or 0 if no optimization is possible. */
5375
5376 static tree
5377 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5378 tree lhs, tree rhs)
5379 {
5380 /* If this is the "or" of two comparisons, we can do something if
5381 the comparisons are NE_EXPR. If this is the "and", we can do something
5382 if the comparisons are EQ_EXPR. I.e.,
5383 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5384
5385 WANTED_CODE is this operation code. For single bit fields, we can
5386 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5387 comparison for one-bit fields. */
5388
5389 enum tree_code wanted_code;
5390 enum tree_code lcode, rcode;
5391 tree ll_arg, lr_arg, rl_arg, rr_arg;
5392 tree ll_inner, lr_inner, rl_inner, rr_inner;
5393 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5394 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5395 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5396 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5397 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5398 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5399 machine_mode lnmode, rnmode;
5400 tree ll_mask, lr_mask, rl_mask, rr_mask;
5401 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5402 tree l_const, r_const;
5403 tree lntype, rntype, result;
5404 HOST_WIDE_INT first_bit, end_bit;
5405 int volatilep;
5406
5407 /* Start by getting the comparison codes. Fail if anything is volatile.
5408 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5409 it were surrounded with a NE_EXPR. */
5410
5411 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5412 return 0;
5413
5414 lcode = TREE_CODE (lhs);
5415 rcode = TREE_CODE (rhs);
5416
5417 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5418 {
5419 lhs = build2 (NE_EXPR, truth_type, lhs,
5420 build_int_cst (TREE_TYPE (lhs), 0));
5421 lcode = NE_EXPR;
5422 }
5423
5424 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5425 {
5426 rhs = build2 (NE_EXPR, truth_type, rhs,
5427 build_int_cst (TREE_TYPE (rhs), 0));
5428 rcode = NE_EXPR;
5429 }
5430
5431 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5432 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5433 return 0;
5434
5435 ll_arg = TREE_OPERAND (lhs, 0);
5436 lr_arg = TREE_OPERAND (lhs, 1);
5437 rl_arg = TREE_OPERAND (rhs, 0);
5438 rr_arg = TREE_OPERAND (rhs, 1);
5439
5440 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5441 if (simple_operand_p (ll_arg)
5442 && simple_operand_p (lr_arg))
5443 {
5444 if (operand_equal_p (ll_arg, rl_arg, 0)
5445 && operand_equal_p (lr_arg, rr_arg, 0))
5446 {
5447 result = combine_comparisons (loc, code, lcode, rcode,
5448 truth_type, ll_arg, lr_arg);
5449 if (result)
5450 return result;
5451 }
5452 else if (operand_equal_p (ll_arg, rr_arg, 0)
5453 && operand_equal_p (lr_arg, rl_arg, 0))
5454 {
5455 result = combine_comparisons (loc, code, lcode,
5456 swap_tree_comparison (rcode),
5457 truth_type, ll_arg, lr_arg);
5458 if (result)
5459 return result;
5460 }
5461 }
5462
5463 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5464 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5465
5466 /* If the RHS can be evaluated unconditionally and its operands are
5467 simple, it wins to evaluate the RHS unconditionally on machines
5468 with expensive branches. In this case, this isn't a comparison
5469 that can be merged. */
5470
5471 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5472 false) >= 2
5473 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5474 && simple_operand_p (rl_arg)
5475 && simple_operand_p (rr_arg))
5476 {
5477 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5478 if (code == TRUTH_OR_EXPR
5479 && lcode == NE_EXPR && integer_zerop (lr_arg)
5480 && rcode == NE_EXPR && integer_zerop (rr_arg)
5481 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5482 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5483 return build2_loc (loc, NE_EXPR, truth_type,
5484 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5485 ll_arg, rl_arg),
5486 build_int_cst (TREE_TYPE (ll_arg), 0));
5487
5488 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5489 if (code == TRUTH_AND_EXPR
5490 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5491 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5492 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5493 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5494 return build2_loc (loc, EQ_EXPR, truth_type,
5495 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5496 ll_arg, rl_arg),
5497 build_int_cst (TREE_TYPE (ll_arg), 0));
5498 }
5499
5500 /* See if the comparisons can be merged. Then get all the parameters for
5501 each side. */
5502
5503 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5504 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5505 return 0;
5506
5507 volatilep = 0;
5508 ll_inner = decode_field_reference (loc, ll_arg,
5509 &ll_bitsize, &ll_bitpos, &ll_mode,
5510 &ll_unsignedp, &volatilep, &ll_mask,
5511 &ll_and_mask);
5512 lr_inner = decode_field_reference (loc, lr_arg,
5513 &lr_bitsize, &lr_bitpos, &lr_mode,
5514 &lr_unsignedp, &volatilep, &lr_mask,
5515 &lr_and_mask);
5516 rl_inner = decode_field_reference (loc, rl_arg,
5517 &rl_bitsize, &rl_bitpos, &rl_mode,
5518 &rl_unsignedp, &volatilep, &rl_mask,
5519 &rl_and_mask);
5520 rr_inner = decode_field_reference (loc, rr_arg,
5521 &rr_bitsize, &rr_bitpos, &rr_mode,
5522 &rr_unsignedp, &volatilep, &rr_mask,
5523 &rr_and_mask);
5524
5525 /* It must be true that the inner operation on the lhs of each
5526 comparison must be the same if we are to be able to do anything.
5527 Then see if we have constants. If not, the same must be true for
5528 the rhs's. */
5529 if (volatilep || ll_inner == 0 || rl_inner == 0
5530 || ! operand_equal_p (ll_inner, rl_inner, 0))
5531 return 0;
5532
5533 if (TREE_CODE (lr_arg) == INTEGER_CST
5534 && TREE_CODE (rr_arg) == INTEGER_CST)
5535 l_const = lr_arg, r_const = rr_arg;
5536 else if (lr_inner == 0 || rr_inner == 0
5537 || ! operand_equal_p (lr_inner, rr_inner, 0))
5538 return 0;
5539 else
5540 l_const = r_const = 0;
5541
5542 /* If either comparison code is not correct for our logical operation,
5543 fail. However, we can convert a one-bit comparison against zero into
5544 the opposite comparison against that bit being set in the field. */
5545
5546 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5547 if (lcode != wanted_code)
5548 {
5549 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5550 {
5551 /* Make the left operand unsigned, since we are only interested
5552 in the value of one bit. Otherwise we are doing the wrong
5553 thing below. */
5554 ll_unsignedp = 1;
5555 l_const = ll_mask;
5556 }
5557 else
5558 return 0;
5559 }
5560
5561 /* This is analogous to the code for l_const above. */
5562 if (rcode != wanted_code)
5563 {
5564 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5565 {
5566 rl_unsignedp = 1;
5567 r_const = rl_mask;
5568 }
5569 else
5570 return 0;
5571 }
5572
5573 /* See if we can find a mode that contains both fields being compared on
5574 the left. If we can't, fail. Otherwise, update all constants and masks
5575 to be relative to a field of that size. */
5576 first_bit = MIN (ll_bitpos, rl_bitpos);
5577 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5578 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5579 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5580 volatilep);
5581 if (lnmode == VOIDmode)
5582 return 0;
5583
5584 lnbitsize = GET_MODE_BITSIZE (lnmode);
5585 lnbitpos = first_bit & ~ (lnbitsize - 1);
5586 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5587 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5588
5589 if (BYTES_BIG_ENDIAN)
5590 {
5591 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5592 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5593 }
5594
5595 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5596 size_int (xll_bitpos));
5597 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5598 size_int (xrl_bitpos));
5599
5600 if (l_const)
5601 {
5602 l_const = fold_convert_loc (loc, lntype, l_const);
5603 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5604 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5605 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5606 fold_build1_loc (loc, BIT_NOT_EXPR,
5607 lntype, ll_mask))))
5608 {
5609 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5610
5611 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5612 }
5613 }
5614 if (r_const)
5615 {
5616 r_const = fold_convert_loc (loc, lntype, r_const);
5617 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5618 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5619 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5620 fold_build1_loc (loc, BIT_NOT_EXPR,
5621 lntype, rl_mask))))
5622 {
5623 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5624
5625 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5626 }
5627 }
5628
5629 /* If the right sides are not constant, do the same for it. Also,
5630 disallow this optimization if a size or signedness mismatch occurs
5631 between the left and right sides. */
5632 if (l_const == 0)
5633 {
5634 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5635 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5636 /* Make sure the two fields on the right
5637 correspond to the left without being swapped. */
5638 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5639 return 0;
5640
5641 first_bit = MIN (lr_bitpos, rr_bitpos);
5642 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5643 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5644 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5645 volatilep);
5646 if (rnmode == VOIDmode)
5647 return 0;
5648
5649 rnbitsize = GET_MODE_BITSIZE (rnmode);
5650 rnbitpos = first_bit & ~ (rnbitsize - 1);
5651 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5652 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5653
5654 if (BYTES_BIG_ENDIAN)
5655 {
5656 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5657 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5658 }
5659
5660 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5661 rntype, lr_mask),
5662 size_int (xlr_bitpos));
5663 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5664 rntype, rr_mask),
5665 size_int (xrr_bitpos));
5666
5667 /* Make a mask that corresponds to both fields being compared.
5668 Do this for both items being compared. If the operands are the
5669 same size and the bits being compared are in the same position
5670 then we can do this by masking both and comparing the masked
5671 results. */
5672 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5673 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5674 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5675 {
5676 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5677 ll_unsignedp || rl_unsignedp);
5678 if (! all_ones_mask_p (ll_mask, lnbitsize))
5679 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5680
5681 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5682 lr_unsignedp || rr_unsignedp);
5683 if (! all_ones_mask_p (lr_mask, rnbitsize))
5684 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5685
5686 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5687 }
5688
5689 /* There is still another way we can do something: If both pairs of
5690 fields being compared are adjacent, we may be able to make a wider
5691 field containing them both.
5692
5693 Note that we still must mask the lhs/rhs expressions. Furthermore,
5694 the mask must be shifted to account for the shift done by
5695 make_bit_field_ref. */
5696 if ((ll_bitsize + ll_bitpos == rl_bitpos
5697 && lr_bitsize + lr_bitpos == rr_bitpos)
5698 || (ll_bitpos == rl_bitpos + rl_bitsize
5699 && lr_bitpos == rr_bitpos + rr_bitsize))
5700 {
5701 tree type;
5702
5703 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5704 ll_bitsize + rl_bitsize,
5705 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5706 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5707 lr_bitsize + rr_bitsize,
5708 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5709
5710 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5711 size_int (MIN (xll_bitpos, xrl_bitpos)));
5712 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5713 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5714
5715 /* Convert to the smaller type before masking out unwanted bits. */
5716 type = lntype;
5717 if (lntype != rntype)
5718 {
5719 if (lnbitsize > rnbitsize)
5720 {
5721 lhs = fold_convert_loc (loc, rntype, lhs);
5722 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5723 type = rntype;
5724 }
5725 else if (lnbitsize < rnbitsize)
5726 {
5727 rhs = fold_convert_loc (loc, lntype, rhs);
5728 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5729 type = lntype;
5730 }
5731 }
5732
5733 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5734 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5735
5736 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5737 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5738
5739 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5740 }
5741
5742 return 0;
5743 }
5744
5745 /* Handle the case of comparisons with constants. If there is something in
5746 common between the masks, those bits of the constants must be the same.
5747 If not, the condition is always false. Test for this to avoid generating
5748 incorrect code below. */
5749 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5750 if (! integer_zerop (result)
5751 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5752 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5753 {
5754 if (wanted_code == NE_EXPR)
5755 {
5756 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5757 return constant_boolean_node (true, truth_type);
5758 }
5759 else
5760 {
5761 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5762 return constant_boolean_node (false, truth_type);
5763 }
5764 }
5765
5766 /* Construct the expression we will return. First get the component
5767 reference we will make. Unless the mask is all ones the width of
5768 that field, perform the mask operation. Then compare with the
5769 merged constant. */
5770 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5771 ll_unsignedp || rl_unsignedp);
5772
5773 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5774 if (! all_ones_mask_p (ll_mask, lnbitsize))
5775 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5776
5777 return build2_loc (loc, wanted_code, truth_type, result,
5778 const_binop (BIT_IOR_EXPR, l_const, r_const));
5779 }
5780 \f
5781 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5782 constant. */
5783
5784 static tree
5785 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5786 tree op0, tree op1)
5787 {
5788 tree arg0 = op0;
5789 enum tree_code op_code;
5790 tree comp_const;
5791 tree minmax_const;
5792 int consts_equal, consts_lt;
5793 tree inner;
5794
5795 STRIP_SIGN_NOPS (arg0);
5796
5797 op_code = TREE_CODE (arg0);
5798 minmax_const = TREE_OPERAND (arg0, 1);
5799 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5800 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5801 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5802 inner = TREE_OPERAND (arg0, 0);
5803
5804 /* If something does not permit us to optimize, return the original tree. */
5805 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5806 || TREE_CODE (comp_const) != INTEGER_CST
5807 || TREE_OVERFLOW (comp_const)
5808 || TREE_CODE (minmax_const) != INTEGER_CST
5809 || TREE_OVERFLOW (minmax_const))
5810 return NULL_TREE;
5811
5812 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5813 and GT_EXPR, doing the rest with recursive calls using logical
5814 simplifications. */
5815 switch (code)
5816 {
5817 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5818 {
5819 tree tem
5820 = optimize_minmax_comparison (loc,
5821 invert_tree_comparison (code, false),
5822 type, op0, op1);
5823 if (tem)
5824 return invert_truthvalue_loc (loc, tem);
5825 return NULL_TREE;
5826 }
5827
5828 case GE_EXPR:
5829 return
5830 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5831 optimize_minmax_comparison
5832 (loc, EQ_EXPR, type, arg0, comp_const),
5833 optimize_minmax_comparison
5834 (loc, GT_EXPR, type, arg0, comp_const));
5835
5836 case EQ_EXPR:
5837 if (op_code == MAX_EXPR && consts_equal)
5838 /* MAX (X, 0) == 0 -> X <= 0 */
5839 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5840
5841 else if (op_code == MAX_EXPR && consts_lt)
5842 /* MAX (X, 0) == 5 -> X == 5 */
5843 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5844
5845 else if (op_code == MAX_EXPR)
5846 /* MAX (X, 0) == -1 -> false */
5847 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5848
5849 else if (consts_equal)
5850 /* MIN (X, 0) == 0 -> X >= 0 */
5851 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5852
5853 else if (consts_lt)
5854 /* MIN (X, 0) == 5 -> false */
5855 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5856
5857 else
5858 /* MIN (X, 0) == -1 -> X == -1 */
5859 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5860
5861 case GT_EXPR:
5862 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5863 /* MAX (X, 0) > 0 -> X > 0
5864 MAX (X, 0) > 5 -> X > 5 */
5865 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5866
5867 else if (op_code == MAX_EXPR)
5868 /* MAX (X, 0) > -1 -> true */
5869 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5870
5871 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5872 /* MIN (X, 0) > 0 -> false
5873 MIN (X, 0) > 5 -> false */
5874 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5875
5876 else
5877 /* MIN (X, 0) > -1 -> X > -1 */
5878 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5879
5880 default:
5881 return NULL_TREE;
5882 }
5883 }
5884 \f
5885 /* T is an integer expression that is being multiplied, divided, or taken a
5886 modulus (CODE says which and what kind of divide or modulus) by a
5887 constant C. See if we can eliminate that operation by folding it with
5888 other operations already in T. WIDE_TYPE, if non-null, is a type that
5889 should be used for the computation if wider than our type.
5890
5891 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5892 (X * 2) + (Y * 4). We must, however, be assured that either the original
5893 expression would not overflow or that overflow is undefined for the type
5894 in the language in question.
5895
5896 If we return a non-null expression, it is an equivalent form of the
5897 original computation, but need not be in the original type.
5898
5899 We set *STRICT_OVERFLOW_P to true if the return values depends on
5900 signed overflow being undefined. Otherwise we do not change
5901 *STRICT_OVERFLOW_P. */
5902
5903 static tree
5904 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5905 bool *strict_overflow_p)
5906 {
5907 /* To avoid exponential search depth, refuse to allow recursion past
5908 three levels. Beyond that (1) it's highly unlikely that we'll find
5909 something interesting and (2) we've probably processed it before
5910 when we built the inner expression. */
5911
5912 static int depth;
5913 tree ret;
5914
5915 if (depth > 3)
5916 return NULL;
5917
5918 depth++;
5919 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5920 depth--;
5921
5922 return ret;
5923 }
5924
5925 static tree
5926 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5927 bool *strict_overflow_p)
5928 {
5929 tree type = TREE_TYPE (t);
5930 enum tree_code tcode = TREE_CODE (t);
5931 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5932 > GET_MODE_SIZE (TYPE_MODE (type)))
5933 ? wide_type : type);
5934 tree t1, t2;
5935 int same_p = tcode == code;
5936 tree op0 = NULL_TREE, op1 = NULL_TREE;
5937 bool sub_strict_overflow_p;
5938
5939 /* Don't deal with constants of zero here; they confuse the code below. */
5940 if (integer_zerop (c))
5941 return NULL_TREE;
5942
5943 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5944 op0 = TREE_OPERAND (t, 0);
5945
5946 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5947 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5948
5949 /* Note that we need not handle conditional operations here since fold
5950 already handles those cases. So just do arithmetic here. */
5951 switch (tcode)
5952 {
5953 case INTEGER_CST:
5954 /* For a constant, we can always simplify if we are a multiply
5955 or (for divide and modulus) if it is a multiple of our constant. */
5956 if (code == MULT_EXPR
5957 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5958 return const_binop (code, fold_convert (ctype, t),
5959 fold_convert (ctype, c));
5960 break;
5961
5962 CASE_CONVERT: case NON_LVALUE_EXPR:
5963 /* If op0 is an expression ... */
5964 if ((COMPARISON_CLASS_P (op0)
5965 || UNARY_CLASS_P (op0)
5966 || BINARY_CLASS_P (op0)
5967 || VL_EXP_CLASS_P (op0)
5968 || EXPRESSION_CLASS_P (op0))
5969 /* ... and has wrapping overflow, and its type is smaller
5970 than ctype, then we cannot pass through as widening. */
5971 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5972 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5973 && (TYPE_PRECISION (ctype)
5974 > TYPE_PRECISION (TREE_TYPE (op0))))
5975 /* ... or this is a truncation (t is narrower than op0),
5976 then we cannot pass through this narrowing. */
5977 || (TYPE_PRECISION (type)
5978 < TYPE_PRECISION (TREE_TYPE (op0)))
5979 /* ... or signedness changes for division or modulus,
5980 then we cannot pass through this conversion. */
5981 || (code != MULT_EXPR
5982 && (TYPE_UNSIGNED (ctype)
5983 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5984 /* ... or has undefined overflow while the converted to
5985 type has not, we cannot do the operation in the inner type
5986 as that would introduce undefined overflow. */
5987 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5988 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
5989 && !TYPE_OVERFLOW_UNDEFINED (type))))
5990 break;
5991
5992 /* Pass the constant down and see if we can make a simplification. If
5993 we can, replace this expression with the inner simplification for
5994 possible later conversion to our or some other type. */
5995 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5996 && TREE_CODE (t2) == INTEGER_CST
5997 && !TREE_OVERFLOW (t2)
5998 && (0 != (t1 = extract_muldiv (op0, t2, code,
5999 code == MULT_EXPR
6000 ? ctype : NULL_TREE,
6001 strict_overflow_p))))
6002 return t1;
6003 break;
6004
6005 case ABS_EXPR:
6006 /* If widening the type changes it from signed to unsigned, then we
6007 must avoid building ABS_EXPR itself as unsigned. */
6008 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6009 {
6010 tree cstype = (*signed_type_for) (ctype);
6011 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6012 != 0)
6013 {
6014 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6015 return fold_convert (ctype, t1);
6016 }
6017 break;
6018 }
6019 /* If the constant is negative, we cannot simplify this. */
6020 if (tree_int_cst_sgn (c) == -1)
6021 break;
6022 /* FALLTHROUGH */
6023 case NEGATE_EXPR:
6024 /* For division and modulus, type can't be unsigned, as e.g.
6025 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6026 For signed types, even with wrapping overflow, this is fine. */
6027 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6028 break;
6029 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6030 != 0)
6031 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6032 break;
6033
6034 case MIN_EXPR: case MAX_EXPR:
6035 /* If widening the type changes the signedness, then we can't perform
6036 this optimization as that changes the result. */
6037 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6038 break;
6039
6040 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6041 sub_strict_overflow_p = false;
6042 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6043 &sub_strict_overflow_p)) != 0
6044 && (t2 = extract_muldiv (op1, c, code, wide_type,
6045 &sub_strict_overflow_p)) != 0)
6046 {
6047 if (tree_int_cst_sgn (c) < 0)
6048 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6049 if (sub_strict_overflow_p)
6050 *strict_overflow_p = true;
6051 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6052 fold_convert (ctype, t2));
6053 }
6054 break;
6055
6056 case LSHIFT_EXPR: case RSHIFT_EXPR:
6057 /* If the second operand is constant, this is a multiplication
6058 or floor division, by a power of two, so we can treat it that
6059 way unless the multiplier or divisor overflows. Signed
6060 left-shift overflow is implementation-defined rather than
6061 undefined in C90, so do not convert signed left shift into
6062 multiplication. */
6063 if (TREE_CODE (op1) == INTEGER_CST
6064 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6065 /* const_binop may not detect overflow correctly,
6066 so check for it explicitly here. */
6067 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6068 && 0 != (t1 = fold_convert (ctype,
6069 const_binop (LSHIFT_EXPR,
6070 size_one_node,
6071 op1)))
6072 && !TREE_OVERFLOW (t1))
6073 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6074 ? MULT_EXPR : FLOOR_DIV_EXPR,
6075 ctype,
6076 fold_convert (ctype, op0),
6077 t1),
6078 c, code, wide_type, strict_overflow_p);
6079 break;
6080
6081 case PLUS_EXPR: case MINUS_EXPR:
6082 /* See if we can eliminate the operation on both sides. If we can, we
6083 can return a new PLUS or MINUS. If we can't, the only remaining
6084 cases where we can do anything are if the second operand is a
6085 constant. */
6086 sub_strict_overflow_p = false;
6087 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6088 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6089 if (t1 != 0 && t2 != 0
6090 && (code == MULT_EXPR
6091 /* If not multiplication, we can only do this if both operands
6092 are divisible by c. */
6093 || (multiple_of_p (ctype, op0, c)
6094 && multiple_of_p (ctype, op1, c))))
6095 {
6096 if (sub_strict_overflow_p)
6097 *strict_overflow_p = true;
6098 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6099 fold_convert (ctype, t2));
6100 }
6101
6102 /* If this was a subtraction, negate OP1 and set it to be an addition.
6103 This simplifies the logic below. */
6104 if (tcode == MINUS_EXPR)
6105 {
6106 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6107 /* If OP1 was not easily negatable, the constant may be OP0. */
6108 if (TREE_CODE (op0) == INTEGER_CST)
6109 {
6110 tree tem = op0;
6111 op0 = op1;
6112 op1 = tem;
6113 tem = t1;
6114 t1 = t2;
6115 t2 = tem;
6116 }
6117 }
6118
6119 if (TREE_CODE (op1) != INTEGER_CST)
6120 break;
6121
6122 /* If either OP1 or C are negative, this optimization is not safe for
6123 some of the division and remainder types while for others we need
6124 to change the code. */
6125 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6126 {
6127 if (code == CEIL_DIV_EXPR)
6128 code = FLOOR_DIV_EXPR;
6129 else if (code == FLOOR_DIV_EXPR)
6130 code = CEIL_DIV_EXPR;
6131 else if (code != MULT_EXPR
6132 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6133 break;
6134 }
6135
6136 /* If it's a multiply or a division/modulus operation of a multiple
6137 of our constant, do the operation and verify it doesn't overflow. */
6138 if (code == MULT_EXPR
6139 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6140 {
6141 op1 = const_binop (code, fold_convert (ctype, op1),
6142 fold_convert (ctype, c));
6143 /* We allow the constant to overflow with wrapping semantics. */
6144 if (op1 == 0
6145 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6146 break;
6147 }
6148 else
6149 break;
6150
6151 /* If we have an unsigned type, we cannot widen the operation since it
6152 will change the result if the original computation overflowed. */
6153 if (TYPE_UNSIGNED (ctype) && ctype != type)
6154 break;
6155
6156 /* If we were able to eliminate our operation from the first side,
6157 apply our operation to the second side and reform the PLUS. */
6158 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6159 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6160
6161 /* The last case is if we are a multiply. In that case, we can
6162 apply the distributive law to commute the multiply and addition
6163 if the multiplication of the constants doesn't overflow
6164 and overflow is defined. With undefined overflow
6165 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6166 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6167 return fold_build2 (tcode, ctype,
6168 fold_build2 (code, ctype,
6169 fold_convert (ctype, op0),
6170 fold_convert (ctype, c)),
6171 op1);
6172
6173 break;
6174
6175 case MULT_EXPR:
6176 /* We have a special case here if we are doing something like
6177 (C * 8) % 4 since we know that's zero. */
6178 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6179 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6180 /* If the multiplication can overflow we cannot optimize this. */
6181 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6182 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6183 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6184 {
6185 *strict_overflow_p = true;
6186 return omit_one_operand (type, integer_zero_node, op0);
6187 }
6188
6189 /* ... fall through ... */
6190
6191 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6192 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6193 /* If we can extract our operation from the LHS, do so and return a
6194 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6195 do something only if the second operand is a constant. */
6196 if (same_p
6197 && (t1 = extract_muldiv (op0, c, code, wide_type,
6198 strict_overflow_p)) != 0)
6199 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6200 fold_convert (ctype, op1));
6201 else if (tcode == MULT_EXPR && code == MULT_EXPR
6202 && (t1 = extract_muldiv (op1, c, code, wide_type,
6203 strict_overflow_p)) != 0)
6204 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6205 fold_convert (ctype, t1));
6206 else if (TREE_CODE (op1) != INTEGER_CST)
6207 return 0;
6208
6209 /* If these are the same operation types, we can associate them
6210 assuming no overflow. */
6211 if (tcode == code)
6212 {
6213 bool overflow_p = false;
6214 bool overflow_mul_p;
6215 signop sign = TYPE_SIGN (ctype);
6216 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6217 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6218 if (overflow_mul_p
6219 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6220 overflow_p = true;
6221 if (!overflow_p)
6222 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6223 wide_int_to_tree (ctype, mul));
6224 }
6225
6226 /* If these operations "cancel" each other, we have the main
6227 optimizations of this pass, which occur when either constant is a
6228 multiple of the other, in which case we replace this with either an
6229 operation or CODE or TCODE.
6230
6231 If we have an unsigned type, we cannot do this since it will change
6232 the result if the original computation overflowed. */
6233 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6234 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6235 || (tcode == MULT_EXPR
6236 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6237 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6238 && code != MULT_EXPR)))
6239 {
6240 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6241 {
6242 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6243 *strict_overflow_p = true;
6244 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6245 fold_convert (ctype,
6246 const_binop (TRUNC_DIV_EXPR,
6247 op1, c)));
6248 }
6249 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6250 {
6251 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6252 *strict_overflow_p = true;
6253 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6254 fold_convert (ctype,
6255 const_binop (TRUNC_DIV_EXPR,
6256 c, op1)));
6257 }
6258 }
6259 break;
6260
6261 default:
6262 break;
6263 }
6264
6265 return 0;
6266 }
6267 \f
6268 /* Return a node which has the indicated constant VALUE (either 0 or
6269 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6270 and is of the indicated TYPE. */
6271
6272 tree
6273 constant_boolean_node (bool value, tree type)
6274 {
6275 if (type == integer_type_node)
6276 return value ? integer_one_node : integer_zero_node;
6277 else if (type == boolean_type_node)
6278 return value ? boolean_true_node : boolean_false_node;
6279 else if (TREE_CODE (type) == VECTOR_TYPE)
6280 return build_vector_from_val (type,
6281 build_int_cst (TREE_TYPE (type),
6282 value ? -1 : 0));
6283 else
6284 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6285 }
6286
6287
6288 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6289 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6290 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6291 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6292 COND is the first argument to CODE; otherwise (as in the example
6293 given here), it is the second argument. TYPE is the type of the
6294 original expression. Return NULL_TREE if no simplification is
6295 possible. */
6296
6297 static tree
6298 fold_binary_op_with_conditional_arg (location_t loc,
6299 enum tree_code code,
6300 tree type, tree op0, tree op1,
6301 tree cond, tree arg, int cond_first_p)
6302 {
6303 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6304 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6305 tree test, true_value, false_value;
6306 tree lhs = NULL_TREE;
6307 tree rhs = NULL_TREE;
6308 enum tree_code cond_code = COND_EXPR;
6309
6310 if (TREE_CODE (cond) == COND_EXPR
6311 || TREE_CODE (cond) == VEC_COND_EXPR)
6312 {
6313 test = TREE_OPERAND (cond, 0);
6314 true_value = TREE_OPERAND (cond, 1);
6315 false_value = TREE_OPERAND (cond, 2);
6316 /* If this operand throws an expression, then it does not make
6317 sense to try to perform a logical or arithmetic operation
6318 involving it. */
6319 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6320 lhs = true_value;
6321 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6322 rhs = false_value;
6323 }
6324 else
6325 {
6326 tree testtype = TREE_TYPE (cond);
6327 test = cond;
6328 true_value = constant_boolean_node (true, testtype);
6329 false_value = constant_boolean_node (false, testtype);
6330 }
6331
6332 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6333 cond_code = VEC_COND_EXPR;
6334
6335 /* This transformation is only worthwhile if we don't have to wrap ARG
6336 in a SAVE_EXPR and the operation can be simplified without recursing
6337 on at least one of the branches once its pushed inside the COND_EXPR. */
6338 if (!TREE_CONSTANT (arg)
6339 && (TREE_SIDE_EFFECTS (arg)
6340 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6341 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6342 return NULL_TREE;
6343
6344 arg = fold_convert_loc (loc, arg_type, arg);
6345 if (lhs == 0)
6346 {
6347 true_value = fold_convert_loc (loc, cond_type, true_value);
6348 if (cond_first_p)
6349 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6350 else
6351 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6352 }
6353 if (rhs == 0)
6354 {
6355 false_value = fold_convert_loc (loc, cond_type, false_value);
6356 if (cond_first_p)
6357 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6358 else
6359 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6360 }
6361
6362 /* Check that we have simplified at least one of the branches. */
6363 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6364 return NULL_TREE;
6365
6366 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6367 }
6368
6369 \f
6370 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6371
6372 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6373 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6374 ADDEND is the same as X.
6375
6376 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6377 and finite. The problematic cases are when X is zero, and its mode
6378 has signed zeros. In the case of rounding towards -infinity,
6379 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6380 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6381
6382 bool
6383 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6384 {
6385 if (!real_zerop (addend))
6386 return false;
6387
6388 /* Don't allow the fold with -fsignaling-nans. */
6389 if (HONOR_SNANS (element_mode (type)))
6390 return false;
6391
6392 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6393 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6394 return true;
6395
6396 /* In a vector or complex, we would need to check the sign of all zeros. */
6397 if (TREE_CODE (addend) != REAL_CST)
6398 return false;
6399
6400 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6401 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6402 negate = !negate;
6403
6404 /* The mode has signed zeros, and we have to honor their sign.
6405 In this situation, there is only one case we can return true for.
6406 X - 0 is the same as X unless rounding towards -infinity is
6407 supported. */
6408 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6409 }
6410
6411 /* Subroutine of fold() that checks comparisons of built-in math
6412 functions against real constants.
6413
6414 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6415 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6416 is the type of the result and ARG0 and ARG1 are the operands of the
6417 comparison. ARG1 must be a TREE_REAL_CST.
6418
6419 The function returns the constant folded tree if a simplification
6420 can be made, and NULL_TREE otherwise. */
6421
6422 static tree
6423 fold_mathfn_compare (location_t loc,
6424 enum built_in_function fcode, enum tree_code code,
6425 tree type, tree arg0, tree arg1)
6426 {
6427 REAL_VALUE_TYPE c;
6428
6429 if (BUILTIN_SQRT_P (fcode))
6430 {
6431 tree arg = CALL_EXPR_ARG (arg0, 0);
6432 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6433
6434 c = TREE_REAL_CST (arg1);
6435 if (REAL_VALUE_NEGATIVE (c))
6436 {
6437 /* sqrt(x) < y is always false, if y is negative. */
6438 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6439 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6440
6441 /* sqrt(x) > y is always true, if y is negative and we
6442 don't care about NaNs, i.e. negative values of x. */
6443 if (code == NE_EXPR || !HONOR_NANS (mode))
6444 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6445
6446 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6447 return fold_build2_loc (loc, GE_EXPR, type, arg,
6448 build_real (TREE_TYPE (arg), dconst0));
6449 }
6450 else if (code == GT_EXPR || code == GE_EXPR)
6451 {
6452 REAL_VALUE_TYPE c2;
6453
6454 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6455 real_convert (&c2, mode, &c2);
6456
6457 if (REAL_VALUE_ISINF (c2))
6458 {
6459 /* sqrt(x) > y is x == +Inf, when y is very large. */
6460 if (HONOR_INFINITIES (mode))
6461 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6462 build_real (TREE_TYPE (arg), c2));
6463
6464 /* sqrt(x) > y is always false, when y is very large
6465 and we don't care about infinities. */
6466 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6467 }
6468
6469 /* sqrt(x) > c is the same as x > c*c. */
6470 return fold_build2_loc (loc, code, type, arg,
6471 build_real (TREE_TYPE (arg), c2));
6472 }
6473 else if (code == LT_EXPR || code == LE_EXPR)
6474 {
6475 REAL_VALUE_TYPE c2;
6476
6477 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6478 real_convert (&c2, mode, &c2);
6479
6480 if (REAL_VALUE_ISINF (c2))
6481 {
6482 /* sqrt(x) < y is always true, when y is a very large
6483 value and we don't care about NaNs or Infinities. */
6484 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6485 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6486
6487 /* sqrt(x) < y is x != +Inf when y is very large and we
6488 don't care about NaNs. */
6489 if (! HONOR_NANS (mode))
6490 return fold_build2_loc (loc, NE_EXPR, type, arg,
6491 build_real (TREE_TYPE (arg), c2));
6492
6493 /* sqrt(x) < y is x >= 0 when y is very large and we
6494 don't care about Infinities. */
6495 if (! HONOR_INFINITIES (mode))
6496 return fold_build2_loc (loc, GE_EXPR, type, arg,
6497 build_real (TREE_TYPE (arg), dconst0));
6498
6499 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6500 arg = save_expr (arg);
6501 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6502 fold_build2_loc (loc, GE_EXPR, type, arg,
6503 build_real (TREE_TYPE (arg),
6504 dconst0)),
6505 fold_build2_loc (loc, NE_EXPR, type, arg,
6506 build_real (TREE_TYPE (arg),
6507 c2)));
6508 }
6509
6510 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6511 if (! HONOR_NANS (mode))
6512 return fold_build2_loc (loc, code, type, arg,
6513 build_real (TREE_TYPE (arg), c2));
6514
6515 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6516 arg = save_expr (arg);
6517 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6518 fold_build2_loc (loc, GE_EXPR, type, arg,
6519 build_real (TREE_TYPE (arg),
6520 dconst0)),
6521 fold_build2_loc (loc, code, type, arg,
6522 build_real (TREE_TYPE (arg),
6523 c2)));
6524 }
6525 }
6526
6527 return NULL_TREE;
6528 }
6529
6530 /* Subroutine of fold() that optimizes comparisons against Infinities,
6531 either +Inf or -Inf.
6532
6533 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6534 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6535 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6536
6537 The function returns the constant folded tree if a simplification
6538 can be made, and NULL_TREE otherwise. */
6539
6540 static tree
6541 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6542 tree arg0, tree arg1)
6543 {
6544 machine_mode mode;
6545 REAL_VALUE_TYPE max;
6546 tree temp;
6547 bool neg;
6548
6549 mode = TYPE_MODE (TREE_TYPE (arg0));
6550
6551 /* For negative infinity swap the sense of the comparison. */
6552 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6553 if (neg)
6554 code = swap_tree_comparison (code);
6555
6556 switch (code)
6557 {
6558 case GT_EXPR:
6559 /* x > +Inf is always false, if with ignore sNANs. */
6560 if (HONOR_SNANS (mode))
6561 return NULL_TREE;
6562 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6563
6564 case LE_EXPR:
6565 /* x <= +Inf is always true, if we don't case about NaNs. */
6566 if (! HONOR_NANS (mode))
6567 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6568
6569 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6570 arg0 = save_expr (arg0);
6571 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6572
6573 case EQ_EXPR:
6574 case GE_EXPR:
6575 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6576 real_maxval (&max, neg, mode);
6577 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6578 arg0, build_real (TREE_TYPE (arg0), max));
6579
6580 case LT_EXPR:
6581 /* x < +Inf is always equal to x <= DBL_MAX. */
6582 real_maxval (&max, neg, mode);
6583 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6584 arg0, build_real (TREE_TYPE (arg0), max));
6585
6586 case NE_EXPR:
6587 /* x != +Inf is always equal to !(x > DBL_MAX). */
6588 real_maxval (&max, neg, mode);
6589 if (! HONOR_NANS (mode))
6590 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6591 arg0, build_real (TREE_TYPE (arg0), max));
6592
6593 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6594 arg0, build_real (TREE_TYPE (arg0), max));
6595 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6596
6597 default:
6598 break;
6599 }
6600
6601 return NULL_TREE;
6602 }
6603
6604 /* Subroutine of fold() that optimizes comparisons of a division by
6605 a nonzero integer constant against an integer constant, i.e.
6606 X/C1 op C2.
6607
6608 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6609 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6610 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6611
6612 The function returns the constant folded tree if a simplification
6613 can be made, and NULL_TREE otherwise. */
6614
6615 static tree
6616 fold_div_compare (location_t loc,
6617 enum tree_code code, tree type, tree arg0, tree arg1)
6618 {
6619 tree prod, tmp, hi, lo;
6620 tree arg00 = TREE_OPERAND (arg0, 0);
6621 tree arg01 = TREE_OPERAND (arg0, 1);
6622 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6623 bool neg_overflow = false;
6624 bool overflow;
6625
6626 /* We have to do this the hard way to detect unsigned overflow.
6627 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6628 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6629 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6630 neg_overflow = false;
6631
6632 if (sign == UNSIGNED)
6633 {
6634 tmp = int_const_binop (MINUS_EXPR, arg01,
6635 build_int_cst (TREE_TYPE (arg01), 1));
6636 lo = prod;
6637
6638 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6639 val = wi::add (prod, tmp, sign, &overflow);
6640 hi = force_fit_type (TREE_TYPE (arg00), val,
6641 -1, overflow | TREE_OVERFLOW (prod));
6642 }
6643 else if (tree_int_cst_sgn (arg01) >= 0)
6644 {
6645 tmp = int_const_binop (MINUS_EXPR, arg01,
6646 build_int_cst (TREE_TYPE (arg01), 1));
6647 switch (tree_int_cst_sgn (arg1))
6648 {
6649 case -1:
6650 neg_overflow = true;
6651 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6652 hi = prod;
6653 break;
6654
6655 case 0:
6656 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6657 hi = tmp;
6658 break;
6659
6660 case 1:
6661 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6662 lo = prod;
6663 break;
6664
6665 default:
6666 gcc_unreachable ();
6667 }
6668 }
6669 else
6670 {
6671 /* A negative divisor reverses the relational operators. */
6672 code = swap_tree_comparison (code);
6673
6674 tmp = int_const_binop (PLUS_EXPR, arg01,
6675 build_int_cst (TREE_TYPE (arg01), 1));
6676 switch (tree_int_cst_sgn (arg1))
6677 {
6678 case -1:
6679 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6680 lo = prod;
6681 break;
6682
6683 case 0:
6684 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6685 lo = tmp;
6686 break;
6687
6688 case 1:
6689 neg_overflow = true;
6690 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6691 hi = prod;
6692 break;
6693
6694 default:
6695 gcc_unreachable ();
6696 }
6697 }
6698
6699 switch (code)
6700 {
6701 case EQ_EXPR:
6702 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6703 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6704 if (TREE_OVERFLOW (hi))
6705 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6706 if (TREE_OVERFLOW (lo))
6707 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6708 return build_range_check (loc, type, arg00, 1, lo, hi);
6709
6710 case NE_EXPR:
6711 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6712 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6713 if (TREE_OVERFLOW (hi))
6714 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6715 if (TREE_OVERFLOW (lo))
6716 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6717 return build_range_check (loc, type, arg00, 0, lo, hi);
6718
6719 case LT_EXPR:
6720 if (TREE_OVERFLOW (lo))
6721 {
6722 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6723 return omit_one_operand_loc (loc, type, tmp, arg00);
6724 }
6725 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6726
6727 case LE_EXPR:
6728 if (TREE_OVERFLOW (hi))
6729 {
6730 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6731 return omit_one_operand_loc (loc, type, tmp, arg00);
6732 }
6733 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6734
6735 case GT_EXPR:
6736 if (TREE_OVERFLOW (hi))
6737 {
6738 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6739 return omit_one_operand_loc (loc, type, tmp, arg00);
6740 }
6741 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6742
6743 case GE_EXPR:
6744 if (TREE_OVERFLOW (lo))
6745 {
6746 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6747 return omit_one_operand_loc (loc, type, tmp, arg00);
6748 }
6749 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6750
6751 default:
6752 break;
6753 }
6754
6755 return NULL_TREE;
6756 }
6757
6758
6759 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6760 equality/inequality test, then return a simplified form of the test
6761 using a sign testing. Otherwise return NULL. TYPE is the desired
6762 result type. */
6763
6764 static tree
6765 fold_single_bit_test_into_sign_test (location_t loc,
6766 enum tree_code code, tree arg0, tree arg1,
6767 tree result_type)
6768 {
6769 /* If this is testing a single bit, we can optimize the test. */
6770 if ((code == NE_EXPR || code == EQ_EXPR)
6771 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6772 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6773 {
6774 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6775 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6776 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6777
6778 if (arg00 != NULL_TREE
6779 /* This is only a win if casting to a signed type is cheap,
6780 i.e. when arg00's type is not a partial mode. */
6781 && TYPE_PRECISION (TREE_TYPE (arg00))
6782 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6783 {
6784 tree stype = signed_type_for (TREE_TYPE (arg00));
6785 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6786 result_type,
6787 fold_convert_loc (loc, stype, arg00),
6788 build_int_cst (stype, 0));
6789 }
6790 }
6791
6792 return NULL_TREE;
6793 }
6794
6795 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6796 equality/inequality test, then return a simplified form of
6797 the test using shifts and logical operations. Otherwise return
6798 NULL. TYPE is the desired result type. */
6799
6800 tree
6801 fold_single_bit_test (location_t loc, enum tree_code code,
6802 tree arg0, tree arg1, tree result_type)
6803 {
6804 /* If this is testing a single bit, we can optimize the test. */
6805 if ((code == NE_EXPR || code == EQ_EXPR)
6806 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6807 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6808 {
6809 tree inner = TREE_OPERAND (arg0, 0);
6810 tree type = TREE_TYPE (arg0);
6811 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6812 machine_mode operand_mode = TYPE_MODE (type);
6813 int ops_unsigned;
6814 tree signed_type, unsigned_type, intermediate_type;
6815 tree tem, one;
6816
6817 /* First, see if we can fold the single bit test into a sign-bit
6818 test. */
6819 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6820 result_type);
6821 if (tem)
6822 return tem;
6823
6824 /* Otherwise we have (A & C) != 0 where C is a single bit,
6825 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6826 Similarly for (A & C) == 0. */
6827
6828 /* If INNER is a right shift of a constant and it plus BITNUM does
6829 not overflow, adjust BITNUM and INNER. */
6830 if (TREE_CODE (inner) == RSHIFT_EXPR
6831 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6832 && bitnum < TYPE_PRECISION (type)
6833 && wi::ltu_p (TREE_OPERAND (inner, 1),
6834 TYPE_PRECISION (type) - bitnum))
6835 {
6836 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6837 inner = TREE_OPERAND (inner, 0);
6838 }
6839
6840 /* If we are going to be able to omit the AND below, we must do our
6841 operations as unsigned. If we must use the AND, we have a choice.
6842 Normally unsigned is faster, but for some machines signed is. */
6843 #ifdef LOAD_EXTEND_OP
6844 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6845 && !flag_syntax_only) ? 0 : 1;
6846 #else
6847 ops_unsigned = 1;
6848 #endif
6849
6850 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6851 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6852 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6853 inner = fold_convert_loc (loc, intermediate_type, inner);
6854
6855 if (bitnum != 0)
6856 inner = build2 (RSHIFT_EXPR, intermediate_type,
6857 inner, size_int (bitnum));
6858
6859 one = build_int_cst (intermediate_type, 1);
6860
6861 if (code == EQ_EXPR)
6862 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6863
6864 /* Put the AND last so it can combine with more things. */
6865 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6866
6867 /* Make sure to return the proper type. */
6868 inner = fold_convert_loc (loc, result_type, inner);
6869
6870 return inner;
6871 }
6872 return NULL_TREE;
6873 }
6874
6875 /* Check whether we are allowed to reorder operands arg0 and arg1,
6876 such that the evaluation of arg1 occurs before arg0. */
6877
6878 static bool
6879 reorder_operands_p (const_tree arg0, const_tree arg1)
6880 {
6881 if (! flag_evaluation_order)
6882 return true;
6883 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6884 return true;
6885 return ! TREE_SIDE_EFFECTS (arg0)
6886 && ! TREE_SIDE_EFFECTS (arg1);
6887 }
6888
6889 /* Test whether it is preferable two swap two operands, ARG0 and
6890 ARG1, for example because ARG0 is an integer constant and ARG1
6891 isn't. If REORDER is true, only recommend swapping if we can
6892 evaluate the operands in reverse order. */
6893
6894 bool
6895 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6896 {
6897 if (CONSTANT_CLASS_P (arg1))
6898 return 0;
6899 if (CONSTANT_CLASS_P (arg0))
6900 return 1;
6901
6902 STRIP_NOPS (arg0);
6903 STRIP_NOPS (arg1);
6904
6905 if (TREE_CONSTANT (arg1))
6906 return 0;
6907 if (TREE_CONSTANT (arg0))
6908 return 1;
6909
6910 if (reorder && flag_evaluation_order
6911 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6912 return 0;
6913
6914 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6915 for commutative and comparison operators. Ensuring a canonical
6916 form allows the optimizers to find additional redundancies without
6917 having to explicitly check for both orderings. */
6918 if (TREE_CODE (arg0) == SSA_NAME
6919 && TREE_CODE (arg1) == SSA_NAME
6920 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6921 return 1;
6922
6923 /* Put SSA_NAMEs last. */
6924 if (TREE_CODE (arg1) == SSA_NAME)
6925 return 0;
6926 if (TREE_CODE (arg0) == SSA_NAME)
6927 return 1;
6928
6929 /* Put variables last. */
6930 if (DECL_P (arg1))
6931 return 0;
6932 if (DECL_P (arg0))
6933 return 1;
6934
6935 return 0;
6936 }
6937
6938 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6939 ARG0 is extended to a wider type. */
6940
6941 static tree
6942 fold_widened_comparison (location_t loc, enum tree_code code,
6943 tree type, tree arg0, tree arg1)
6944 {
6945 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6946 tree arg1_unw;
6947 tree shorter_type, outer_type;
6948 tree min, max;
6949 bool above, below;
6950
6951 if (arg0_unw == arg0)
6952 return NULL_TREE;
6953 shorter_type = TREE_TYPE (arg0_unw);
6954
6955 #ifdef HAVE_canonicalize_funcptr_for_compare
6956 /* Disable this optimization if we're casting a function pointer
6957 type on targets that require function pointer canonicalization. */
6958 if (HAVE_canonicalize_funcptr_for_compare
6959 && TREE_CODE (shorter_type) == POINTER_TYPE
6960 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6961 return NULL_TREE;
6962 #endif
6963
6964 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6965 return NULL_TREE;
6966
6967 arg1_unw = get_unwidened (arg1, NULL_TREE);
6968
6969 /* If possible, express the comparison in the shorter mode. */
6970 if ((code == EQ_EXPR || code == NE_EXPR
6971 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6972 && (TREE_TYPE (arg1_unw) == shorter_type
6973 || ((TYPE_PRECISION (shorter_type)
6974 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6975 && (TYPE_UNSIGNED (shorter_type)
6976 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6977 || (TREE_CODE (arg1_unw) == INTEGER_CST
6978 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6979 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6980 && int_fits_type_p (arg1_unw, shorter_type))))
6981 return fold_build2_loc (loc, code, type, arg0_unw,
6982 fold_convert_loc (loc, shorter_type, arg1_unw));
6983
6984 if (TREE_CODE (arg1_unw) != INTEGER_CST
6985 || TREE_CODE (shorter_type) != INTEGER_TYPE
6986 || !int_fits_type_p (arg1_unw, shorter_type))
6987 return NULL_TREE;
6988
6989 /* If we are comparing with the integer that does not fit into the range
6990 of the shorter type, the result is known. */
6991 outer_type = TREE_TYPE (arg1_unw);
6992 min = lower_bound_in_type (outer_type, shorter_type);
6993 max = upper_bound_in_type (outer_type, shorter_type);
6994
6995 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6996 max, arg1_unw));
6997 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6998 arg1_unw, min));
6999
7000 switch (code)
7001 {
7002 case EQ_EXPR:
7003 if (above || below)
7004 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7005 break;
7006
7007 case NE_EXPR:
7008 if (above || below)
7009 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7010 break;
7011
7012 case LT_EXPR:
7013 case LE_EXPR:
7014 if (above)
7015 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7016 else if (below)
7017 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7018
7019 case GT_EXPR:
7020 case GE_EXPR:
7021 if (above)
7022 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7023 else if (below)
7024 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7025
7026 default:
7027 break;
7028 }
7029
7030 return NULL_TREE;
7031 }
7032
7033 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7034 ARG0 just the signedness is changed. */
7035
7036 static tree
7037 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7038 tree arg0, tree arg1)
7039 {
7040 tree arg0_inner;
7041 tree inner_type, outer_type;
7042
7043 if (!CONVERT_EXPR_P (arg0))
7044 return NULL_TREE;
7045
7046 outer_type = TREE_TYPE (arg0);
7047 arg0_inner = TREE_OPERAND (arg0, 0);
7048 inner_type = TREE_TYPE (arg0_inner);
7049
7050 #ifdef HAVE_canonicalize_funcptr_for_compare
7051 /* Disable this optimization if we're casting a function pointer
7052 type on targets that require function pointer canonicalization. */
7053 if (HAVE_canonicalize_funcptr_for_compare
7054 && TREE_CODE (inner_type) == POINTER_TYPE
7055 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7056 return NULL_TREE;
7057 #endif
7058
7059 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7060 return NULL_TREE;
7061
7062 if (TREE_CODE (arg1) != INTEGER_CST
7063 && !(CONVERT_EXPR_P (arg1)
7064 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7065 return NULL_TREE;
7066
7067 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7068 && code != NE_EXPR
7069 && code != EQ_EXPR)
7070 return NULL_TREE;
7071
7072 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7073 return NULL_TREE;
7074
7075 if (TREE_CODE (arg1) == INTEGER_CST)
7076 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7077 TREE_OVERFLOW (arg1));
7078 else
7079 arg1 = fold_convert_loc (loc, inner_type, arg1);
7080
7081 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7082 }
7083
7084
7085 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7086 means A >= Y && A != MAX, but in this case we know that
7087 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7088
7089 static tree
7090 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7091 {
7092 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7093
7094 if (TREE_CODE (bound) == LT_EXPR)
7095 a = TREE_OPERAND (bound, 0);
7096 else if (TREE_CODE (bound) == GT_EXPR)
7097 a = TREE_OPERAND (bound, 1);
7098 else
7099 return NULL_TREE;
7100
7101 typea = TREE_TYPE (a);
7102 if (!INTEGRAL_TYPE_P (typea)
7103 && !POINTER_TYPE_P (typea))
7104 return NULL_TREE;
7105
7106 if (TREE_CODE (ineq) == LT_EXPR)
7107 {
7108 a1 = TREE_OPERAND (ineq, 1);
7109 y = TREE_OPERAND (ineq, 0);
7110 }
7111 else if (TREE_CODE (ineq) == GT_EXPR)
7112 {
7113 a1 = TREE_OPERAND (ineq, 0);
7114 y = TREE_OPERAND (ineq, 1);
7115 }
7116 else
7117 return NULL_TREE;
7118
7119 if (TREE_TYPE (a1) != typea)
7120 return NULL_TREE;
7121
7122 if (POINTER_TYPE_P (typea))
7123 {
7124 /* Convert the pointer types into integer before taking the difference. */
7125 tree ta = fold_convert_loc (loc, ssizetype, a);
7126 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7127 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7128 }
7129 else
7130 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7131
7132 if (!diff || !integer_onep (diff))
7133 return NULL_TREE;
7134
7135 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7136 }
7137
7138 /* Fold a sum or difference of at least one multiplication.
7139 Returns the folded tree or NULL if no simplification could be made. */
7140
7141 static tree
7142 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7143 tree arg0, tree arg1)
7144 {
7145 tree arg00, arg01, arg10, arg11;
7146 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7147
7148 /* (A * C) +- (B * C) -> (A+-B) * C.
7149 (A * C) +- A -> A * (C+-1).
7150 We are most concerned about the case where C is a constant,
7151 but other combinations show up during loop reduction. Since
7152 it is not difficult, try all four possibilities. */
7153
7154 if (TREE_CODE (arg0) == MULT_EXPR)
7155 {
7156 arg00 = TREE_OPERAND (arg0, 0);
7157 arg01 = TREE_OPERAND (arg0, 1);
7158 }
7159 else if (TREE_CODE (arg0) == INTEGER_CST)
7160 {
7161 arg00 = build_one_cst (type);
7162 arg01 = arg0;
7163 }
7164 else
7165 {
7166 /* We cannot generate constant 1 for fract. */
7167 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7168 return NULL_TREE;
7169 arg00 = arg0;
7170 arg01 = build_one_cst (type);
7171 }
7172 if (TREE_CODE (arg1) == MULT_EXPR)
7173 {
7174 arg10 = TREE_OPERAND (arg1, 0);
7175 arg11 = TREE_OPERAND (arg1, 1);
7176 }
7177 else if (TREE_CODE (arg1) == INTEGER_CST)
7178 {
7179 arg10 = build_one_cst (type);
7180 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7181 the purpose of this canonicalization. */
7182 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7183 && negate_expr_p (arg1)
7184 && code == PLUS_EXPR)
7185 {
7186 arg11 = negate_expr (arg1);
7187 code = MINUS_EXPR;
7188 }
7189 else
7190 arg11 = arg1;
7191 }
7192 else
7193 {
7194 /* We cannot generate constant 1 for fract. */
7195 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7196 return NULL_TREE;
7197 arg10 = arg1;
7198 arg11 = build_one_cst (type);
7199 }
7200 same = NULL_TREE;
7201
7202 if (operand_equal_p (arg01, arg11, 0))
7203 same = arg01, alt0 = arg00, alt1 = arg10;
7204 else if (operand_equal_p (arg00, arg10, 0))
7205 same = arg00, alt0 = arg01, alt1 = arg11;
7206 else if (operand_equal_p (arg00, arg11, 0))
7207 same = arg00, alt0 = arg01, alt1 = arg10;
7208 else if (operand_equal_p (arg01, arg10, 0))
7209 same = arg01, alt0 = arg00, alt1 = arg11;
7210
7211 /* No identical multiplicands; see if we can find a common
7212 power-of-two factor in non-power-of-two multiplies. This
7213 can help in multi-dimensional array access. */
7214 else if (tree_fits_shwi_p (arg01)
7215 && tree_fits_shwi_p (arg11))
7216 {
7217 HOST_WIDE_INT int01, int11, tmp;
7218 bool swap = false;
7219 tree maybe_same;
7220 int01 = tree_to_shwi (arg01);
7221 int11 = tree_to_shwi (arg11);
7222
7223 /* Move min of absolute values to int11. */
7224 if (absu_hwi (int01) < absu_hwi (int11))
7225 {
7226 tmp = int01, int01 = int11, int11 = tmp;
7227 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7228 maybe_same = arg01;
7229 swap = true;
7230 }
7231 else
7232 maybe_same = arg11;
7233
7234 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7235 /* The remainder should not be a constant, otherwise we
7236 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7237 increased the number of multiplications necessary. */
7238 && TREE_CODE (arg10) != INTEGER_CST)
7239 {
7240 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7241 build_int_cst (TREE_TYPE (arg00),
7242 int01 / int11));
7243 alt1 = arg10;
7244 same = maybe_same;
7245 if (swap)
7246 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7247 }
7248 }
7249
7250 if (same)
7251 return fold_build2_loc (loc, MULT_EXPR, type,
7252 fold_build2_loc (loc, code, type,
7253 fold_convert_loc (loc, type, alt0),
7254 fold_convert_loc (loc, type, alt1)),
7255 fold_convert_loc (loc, type, same));
7256
7257 return NULL_TREE;
7258 }
7259
7260 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7261 specified by EXPR into the buffer PTR of length LEN bytes.
7262 Return the number of bytes placed in the buffer, or zero
7263 upon failure. */
7264
7265 static int
7266 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7267 {
7268 tree type = TREE_TYPE (expr);
7269 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7270 int byte, offset, word, words;
7271 unsigned char value;
7272
7273 if ((off == -1 && total_bytes > len)
7274 || off >= total_bytes)
7275 return 0;
7276 if (off == -1)
7277 off = 0;
7278 words = total_bytes / UNITS_PER_WORD;
7279
7280 for (byte = 0; byte < total_bytes; byte++)
7281 {
7282 int bitpos = byte * BITS_PER_UNIT;
7283 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7284 number of bytes. */
7285 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7286
7287 if (total_bytes > UNITS_PER_WORD)
7288 {
7289 word = byte / UNITS_PER_WORD;
7290 if (WORDS_BIG_ENDIAN)
7291 word = (words - 1) - word;
7292 offset = word * UNITS_PER_WORD;
7293 if (BYTES_BIG_ENDIAN)
7294 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7295 else
7296 offset += byte % UNITS_PER_WORD;
7297 }
7298 else
7299 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7300 if (offset >= off
7301 && offset - off < len)
7302 ptr[offset - off] = value;
7303 }
7304 return MIN (len, total_bytes - off);
7305 }
7306
7307
7308 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7309 specified by EXPR into the buffer PTR of length LEN bytes.
7310 Return the number of bytes placed in the buffer, or zero
7311 upon failure. */
7312
7313 static int
7314 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7315 {
7316 tree type = TREE_TYPE (expr);
7317 machine_mode mode = TYPE_MODE (type);
7318 int total_bytes = GET_MODE_SIZE (mode);
7319 FIXED_VALUE_TYPE value;
7320 tree i_value, i_type;
7321
7322 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7323 return 0;
7324
7325 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7326
7327 if (NULL_TREE == i_type
7328 || TYPE_PRECISION (i_type) != total_bytes)
7329 return 0;
7330
7331 value = TREE_FIXED_CST (expr);
7332 i_value = double_int_to_tree (i_type, value.data);
7333
7334 return native_encode_int (i_value, ptr, len, off);
7335 }
7336
7337
7338 /* Subroutine of native_encode_expr. Encode the REAL_CST
7339 specified by EXPR into the buffer PTR of length LEN bytes.
7340 Return the number of bytes placed in the buffer, or zero
7341 upon failure. */
7342
7343 static int
7344 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7345 {
7346 tree type = TREE_TYPE (expr);
7347 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7348 int byte, offset, word, words, bitpos;
7349 unsigned char value;
7350
7351 /* There are always 32 bits in each long, no matter the size of
7352 the hosts long. We handle floating point representations with
7353 up to 192 bits. */
7354 long tmp[6];
7355
7356 if ((off == -1 && total_bytes > len)
7357 || off >= total_bytes)
7358 return 0;
7359 if (off == -1)
7360 off = 0;
7361 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7362
7363 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7364
7365 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7366 bitpos += BITS_PER_UNIT)
7367 {
7368 byte = (bitpos / BITS_PER_UNIT) & 3;
7369 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7370
7371 if (UNITS_PER_WORD < 4)
7372 {
7373 word = byte / UNITS_PER_WORD;
7374 if (WORDS_BIG_ENDIAN)
7375 word = (words - 1) - word;
7376 offset = word * UNITS_PER_WORD;
7377 if (BYTES_BIG_ENDIAN)
7378 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7379 else
7380 offset += byte % UNITS_PER_WORD;
7381 }
7382 else
7383 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7384 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7385 if (offset >= off
7386 && offset - off < len)
7387 ptr[offset - off] = value;
7388 }
7389 return MIN (len, total_bytes - off);
7390 }
7391
7392 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7393 specified by EXPR into the buffer PTR of length LEN bytes.
7394 Return the number of bytes placed in the buffer, or zero
7395 upon failure. */
7396
7397 static int
7398 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7399 {
7400 int rsize, isize;
7401 tree part;
7402
7403 part = TREE_REALPART (expr);
7404 rsize = native_encode_expr (part, ptr, len, off);
7405 if (off == -1
7406 && rsize == 0)
7407 return 0;
7408 part = TREE_IMAGPART (expr);
7409 if (off != -1)
7410 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7411 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7412 if (off == -1
7413 && isize != rsize)
7414 return 0;
7415 return rsize + isize;
7416 }
7417
7418
7419 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7420 specified by EXPR into the buffer PTR of length LEN bytes.
7421 Return the number of bytes placed in the buffer, or zero
7422 upon failure. */
7423
7424 static int
7425 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7426 {
7427 unsigned i, count;
7428 int size, offset;
7429 tree itype, elem;
7430
7431 offset = 0;
7432 count = VECTOR_CST_NELTS (expr);
7433 itype = TREE_TYPE (TREE_TYPE (expr));
7434 size = GET_MODE_SIZE (TYPE_MODE (itype));
7435 for (i = 0; i < count; i++)
7436 {
7437 if (off >= size)
7438 {
7439 off -= size;
7440 continue;
7441 }
7442 elem = VECTOR_CST_ELT (expr, i);
7443 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7444 if ((off == -1 && res != size)
7445 || res == 0)
7446 return 0;
7447 offset += res;
7448 if (offset >= len)
7449 return offset;
7450 if (off != -1)
7451 off = 0;
7452 }
7453 return offset;
7454 }
7455
7456
7457 /* Subroutine of native_encode_expr. Encode the STRING_CST
7458 specified by EXPR into the buffer PTR of length LEN bytes.
7459 Return the number of bytes placed in the buffer, or zero
7460 upon failure. */
7461
7462 static int
7463 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7464 {
7465 tree type = TREE_TYPE (expr);
7466 HOST_WIDE_INT total_bytes;
7467
7468 if (TREE_CODE (type) != ARRAY_TYPE
7469 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7470 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7471 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7472 return 0;
7473 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7474 if ((off == -1 && total_bytes > len)
7475 || off >= total_bytes)
7476 return 0;
7477 if (off == -1)
7478 off = 0;
7479 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7480 {
7481 int written = 0;
7482 if (off < TREE_STRING_LENGTH (expr))
7483 {
7484 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7485 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7486 }
7487 memset (ptr + written, 0,
7488 MIN (total_bytes - written, len - written));
7489 }
7490 else
7491 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7492 return MIN (total_bytes - off, len);
7493 }
7494
7495
7496 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7497 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7498 buffer PTR of length LEN bytes. If OFF is not -1 then start
7499 the encoding at byte offset OFF and encode at most LEN bytes.
7500 Return the number of bytes placed in the buffer, or zero upon failure. */
7501
7502 int
7503 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7504 {
7505 switch (TREE_CODE (expr))
7506 {
7507 case INTEGER_CST:
7508 return native_encode_int (expr, ptr, len, off);
7509
7510 case REAL_CST:
7511 return native_encode_real (expr, ptr, len, off);
7512
7513 case FIXED_CST:
7514 return native_encode_fixed (expr, ptr, len, off);
7515
7516 case COMPLEX_CST:
7517 return native_encode_complex (expr, ptr, len, off);
7518
7519 case VECTOR_CST:
7520 return native_encode_vector (expr, ptr, len, off);
7521
7522 case STRING_CST:
7523 return native_encode_string (expr, ptr, len, off);
7524
7525 default:
7526 return 0;
7527 }
7528 }
7529
7530
7531 /* Subroutine of native_interpret_expr. Interpret the contents of
7532 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7533 If the buffer cannot be interpreted, return NULL_TREE. */
7534
7535 static tree
7536 native_interpret_int (tree type, const unsigned char *ptr, int len)
7537 {
7538 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7539
7540 if (total_bytes > len
7541 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7542 return NULL_TREE;
7543
7544 wide_int result = wi::from_buffer (ptr, total_bytes);
7545
7546 return wide_int_to_tree (type, result);
7547 }
7548
7549
7550 /* Subroutine of native_interpret_expr. Interpret the contents of
7551 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7552 If the buffer cannot be interpreted, return NULL_TREE. */
7553
7554 static tree
7555 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7556 {
7557 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7558 double_int result;
7559 FIXED_VALUE_TYPE fixed_value;
7560
7561 if (total_bytes > len
7562 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7563 return NULL_TREE;
7564
7565 result = double_int::from_buffer (ptr, total_bytes);
7566 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7567
7568 return build_fixed (type, fixed_value);
7569 }
7570
7571
7572 /* Subroutine of native_interpret_expr. Interpret the contents of
7573 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7574 If the buffer cannot be interpreted, return NULL_TREE. */
7575
7576 static tree
7577 native_interpret_real (tree type, const unsigned char *ptr, int len)
7578 {
7579 machine_mode mode = TYPE_MODE (type);
7580 int total_bytes = GET_MODE_SIZE (mode);
7581 int byte, offset, word, words, bitpos;
7582 unsigned char value;
7583 /* There are always 32 bits in each long, no matter the size of
7584 the hosts long. We handle floating point representations with
7585 up to 192 bits. */
7586 REAL_VALUE_TYPE r;
7587 long tmp[6];
7588
7589 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7590 if (total_bytes > len || total_bytes > 24)
7591 return NULL_TREE;
7592 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7593
7594 memset (tmp, 0, sizeof (tmp));
7595 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7596 bitpos += BITS_PER_UNIT)
7597 {
7598 byte = (bitpos / BITS_PER_UNIT) & 3;
7599 if (UNITS_PER_WORD < 4)
7600 {
7601 word = byte / UNITS_PER_WORD;
7602 if (WORDS_BIG_ENDIAN)
7603 word = (words - 1) - word;
7604 offset = word * UNITS_PER_WORD;
7605 if (BYTES_BIG_ENDIAN)
7606 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7607 else
7608 offset += byte % UNITS_PER_WORD;
7609 }
7610 else
7611 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7612 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7613
7614 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7615 }
7616
7617 real_from_target (&r, tmp, mode);
7618 return build_real (type, r);
7619 }
7620
7621
7622 /* Subroutine of native_interpret_expr. Interpret the contents of
7623 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7624 If the buffer cannot be interpreted, return NULL_TREE. */
7625
7626 static tree
7627 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7628 {
7629 tree etype, rpart, ipart;
7630 int size;
7631
7632 etype = TREE_TYPE (type);
7633 size = GET_MODE_SIZE (TYPE_MODE (etype));
7634 if (size * 2 > len)
7635 return NULL_TREE;
7636 rpart = native_interpret_expr (etype, ptr, size);
7637 if (!rpart)
7638 return NULL_TREE;
7639 ipart = native_interpret_expr (etype, ptr+size, size);
7640 if (!ipart)
7641 return NULL_TREE;
7642 return build_complex (type, rpart, ipart);
7643 }
7644
7645
7646 /* Subroutine of native_interpret_expr. Interpret the contents of
7647 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7648 If the buffer cannot be interpreted, return NULL_TREE. */
7649
7650 static tree
7651 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7652 {
7653 tree etype, elem;
7654 int i, size, count;
7655 tree *elements;
7656
7657 etype = TREE_TYPE (type);
7658 size = GET_MODE_SIZE (TYPE_MODE (etype));
7659 count = TYPE_VECTOR_SUBPARTS (type);
7660 if (size * count > len)
7661 return NULL_TREE;
7662
7663 elements = XALLOCAVEC (tree, count);
7664 for (i = count - 1; i >= 0; i--)
7665 {
7666 elem = native_interpret_expr (etype, ptr+(i*size), size);
7667 if (!elem)
7668 return NULL_TREE;
7669 elements[i] = elem;
7670 }
7671 return build_vector (type, elements);
7672 }
7673
7674
7675 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7676 the buffer PTR of length LEN as a constant of type TYPE. For
7677 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7678 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7679 return NULL_TREE. */
7680
7681 tree
7682 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7683 {
7684 switch (TREE_CODE (type))
7685 {
7686 case INTEGER_TYPE:
7687 case ENUMERAL_TYPE:
7688 case BOOLEAN_TYPE:
7689 case POINTER_TYPE:
7690 case REFERENCE_TYPE:
7691 return native_interpret_int (type, ptr, len);
7692
7693 case REAL_TYPE:
7694 return native_interpret_real (type, ptr, len);
7695
7696 case FIXED_POINT_TYPE:
7697 return native_interpret_fixed (type, ptr, len);
7698
7699 case COMPLEX_TYPE:
7700 return native_interpret_complex (type, ptr, len);
7701
7702 case VECTOR_TYPE:
7703 return native_interpret_vector (type, ptr, len);
7704
7705 default:
7706 return NULL_TREE;
7707 }
7708 }
7709
7710 /* Returns true if we can interpret the contents of a native encoding
7711 as TYPE. */
7712
7713 static bool
7714 can_native_interpret_type_p (tree type)
7715 {
7716 switch (TREE_CODE (type))
7717 {
7718 case INTEGER_TYPE:
7719 case ENUMERAL_TYPE:
7720 case BOOLEAN_TYPE:
7721 case POINTER_TYPE:
7722 case REFERENCE_TYPE:
7723 case FIXED_POINT_TYPE:
7724 case REAL_TYPE:
7725 case COMPLEX_TYPE:
7726 case VECTOR_TYPE:
7727 return true;
7728 default:
7729 return false;
7730 }
7731 }
7732
7733 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7734 TYPE at compile-time. If we're unable to perform the conversion
7735 return NULL_TREE. */
7736
7737 static tree
7738 fold_view_convert_expr (tree type, tree expr)
7739 {
7740 /* We support up to 512-bit values (for V8DFmode). */
7741 unsigned char buffer[64];
7742 int len;
7743
7744 /* Check that the host and target are sane. */
7745 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7746 return NULL_TREE;
7747
7748 len = native_encode_expr (expr, buffer, sizeof (buffer));
7749 if (len == 0)
7750 return NULL_TREE;
7751
7752 return native_interpret_expr (type, buffer, len);
7753 }
7754
7755 /* Build an expression for the address of T. Folds away INDIRECT_REF
7756 to avoid confusing the gimplify process. */
7757
7758 tree
7759 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7760 {
7761 /* The size of the object is not relevant when talking about its address. */
7762 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7763 t = TREE_OPERAND (t, 0);
7764
7765 if (TREE_CODE (t) == INDIRECT_REF)
7766 {
7767 t = TREE_OPERAND (t, 0);
7768
7769 if (TREE_TYPE (t) != ptrtype)
7770 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7771 }
7772 else if (TREE_CODE (t) == MEM_REF
7773 && integer_zerop (TREE_OPERAND (t, 1)))
7774 return TREE_OPERAND (t, 0);
7775 else if (TREE_CODE (t) == MEM_REF
7776 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7777 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7778 TREE_OPERAND (t, 0),
7779 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7780 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7781 {
7782 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7783
7784 if (TREE_TYPE (t) != ptrtype)
7785 t = fold_convert_loc (loc, ptrtype, t);
7786 }
7787 else
7788 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7789
7790 return t;
7791 }
7792
7793 /* Build an expression for the address of T. */
7794
7795 tree
7796 build_fold_addr_expr_loc (location_t loc, tree t)
7797 {
7798 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7799
7800 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7801 }
7802
7803 /* Fold a unary expression of code CODE and type TYPE with operand
7804 OP0. Return the folded expression if folding is successful.
7805 Otherwise, return NULL_TREE. */
7806
7807 tree
7808 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7809 {
7810 tree tem;
7811 tree arg0;
7812 enum tree_code_class kind = TREE_CODE_CLASS (code);
7813
7814 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7815 && TREE_CODE_LENGTH (code) == 1);
7816
7817 arg0 = op0;
7818 if (arg0)
7819 {
7820 if (CONVERT_EXPR_CODE_P (code)
7821 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7822 {
7823 /* Don't use STRIP_NOPS, because signedness of argument type
7824 matters. */
7825 STRIP_SIGN_NOPS (arg0);
7826 }
7827 else
7828 {
7829 /* Strip any conversions that don't change the mode. This
7830 is safe for every expression, except for a comparison
7831 expression because its signedness is derived from its
7832 operands.
7833
7834 Note that this is done as an internal manipulation within
7835 the constant folder, in order to find the simplest
7836 representation of the arguments so that their form can be
7837 studied. In any cases, the appropriate type conversions
7838 should be put back in the tree that will get out of the
7839 constant folder. */
7840 STRIP_NOPS (arg0);
7841 }
7842
7843 if (CONSTANT_CLASS_P (arg0))
7844 {
7845 tree tem = const_unop (code, type, arg0);
7846 if (tem)
7847 {
7848 if (TREE_TYPE (tem) != type)
7849 tem = fold_convert_loc (loc, type, tem);
7850 return tem;
7851 }
7852 }
7853 }
7854
7855 tem = generic_simplify (loc, code, type, op0);
7856 if (tem)
7857 return tem;
7858
7859 if (TREE_CODE_CLASS (code) == tcc_unary)
7860 {
7861 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7862 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7863 fold_build1_loc (loc, code, type,
7864 fold_convert_loc (loc, TREE_TYPE (op0),
7865 TREE_OPERAND (arg0, 1))));
7866 else if (TREE_CODE (arg0) == COND_EXPR)
7867 {
7868 tree arg01 = TREE_OPERAND (arg0, 1);
7869 tree arg02 = TREE_OPERAND (arg0, 2);
7870 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7871 arg01 = fold_build1_loc (loc, code, type,
7872 fold_convert_loc (loc,
7873 TREE_TYPE (op0), arg01));
7874 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7875 arg02 = fold_build1_loc (loc, code, type,
7876 fold_convert_loc (loc,
7877 TREE_TYPE (op0), arg02));
7878 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7879 arg01, arg02);
7880
7881 /* If this was a conversion, and all we did was to move into
7882 inside the COND_EXPR, bring it back out. But leave it if
7883 it is a conversion from integer to integer and the
7884 result precision is no wider than a word since such a
7885 conversion is cheap and may be optimized away by combine,
7886 while it couldn't if it were outside the COND_EXPR. Then return
7887 so we don't get into an infinite recursion loop taking the
7888 conversion out and then back in. */
7889
7890 if ((CONVERT_EXPR_CODE_P (code)
7891 || code == NON_LVALUE_EXPR)
7892 && TREE_CODE (tem) == COND_EXPR
7893 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7894 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7895 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7896 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7897 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7898 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7899 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7900 && (INTEGRAL_TYPE_P
7901 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7902 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7903 || flag_syntax_only))
7904 tem = build1_loc (loc, code, type,
7905 build3 (COND_EXPR,
7906 TREE_TYPE (TREE_OPERAND
7907 (TREE_OPERAND (tem, 1), 0)),
7908 TREE_OPERAND (tem, 0),
7909 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7910 TREE_OPERAND (TREE_OPERAND (tem, 2),
7911 0)));
7912 return tem;
7913 }
7914 }
7915
7916 switch (code)
7917 {
7918 case NON_LVALUE_EXPR:
7919 if (!maybe_lvalue_p (op0))
7920 return fold_convert_loc (loc, type, op0);
7921 return NULL_TREE;
7922
7923 CASE_CONVERT:
7924 case FLOAT_EXPR:
7925 case FIX_TRUNC_EXPR:
7926 if (COMPARISON_CLASS_P (op0))
7927 {
7928 /* If we have (type) (a CMP b) and type is an integral type, return
7929 new expression involving the new type. Canonicalize
7930 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7931 non-integral type.
7932 Do not fold the result as that would not simplify further, also
7933 folding again results in recursions. */
7934 if (TREE_CODE (type) == BOOLEAN_TYPE)
7935 return build2_loc (loc, TREE_CODE (op0), type,
7936 TREE_OPERAND (op0, 0),
7937 TREE_OPERAND (op0, 1));
7938 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7939 && TREE_CODE (type) != VECTOR_TYPE)
7940 return build3_loc (loc, COND_EXPR, type, op0,
7941 constant_boolean_node (true, type),
7942 constant_boolean_node (false, type));
7943 }
7944
7945 /* Handle (T *)&A.B.C for A being of type T and B and C
7946 living at offset zero. This occurs frequently in
7947 C++ upcasting and then accessing the base. */
7948 if (TREE_CODE (op0) == ADDR_EXPR
7949 && POINTER_TYPE_P (type)
7950 && handled_component_p (TREE_OPERAND (op0, 0)))
7951 {
7952 HOST_WIDE_INT bitsize, bitpos;
7953 tree offset;
7954 machine_mode mode;
7955 int unsignedp, volatilep;
7956 tree base = TREE_OPERAND (op0, 0);
7957 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7958 &mode, &unsignedp, &volatilep, false);
7959 /* If the reference was to a (constant) zero offset, we can use
7960 the address of the base if it has the same base type
7961 as the result type and the pointer type is unqualified. */
7962 if (! offset && bitpos == 0
7963 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7964 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7965 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7966 return fold_convert_loc (loc, type,
7967 build_fold_addr_expr_loc (loc, base));
7968 }
7969
7970 if (TREE_CODE (op0) == MODIFY_EXPR
7971 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7972 /* Detect assigning a bitfield. */
7973 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7974 && DECL_BIT_FIELD
7975 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7976 {
7977 /* Don't leave an assignment inside a conversion
7978 unless assigning a bitfield. */
7979 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7980 /* First do the assignment, then return converted constant. */
7981 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7982 TREE_NO_WARNING (tem) = 1;
7983 TREE_USED (tem) = 1;
7984 return tem;
7985 }
7986
7987 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7988 constants (if x has signed type, the sign bit cannot be set
7989 in c). This folds extension into the BIT_AND_EXPR.
7990 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7991 very likely don't have maximal range for their precision and this
7992 transformation effectively doesn't preserve non-maximal ranges. */
7993 if (TREE_CODE (type) == INTEGER_TYPE
7994 && TREE_CODE (op0) == BIT_AND_EXPR
7995 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7996 {
7997 tree and_expr = op0;
7998 tree and0 = TREE_OPERAND (and_expr, 0);
7999 tree and1 = TREE_OPERAND (and_expr, 1);
8000 int change = 0;
8001
8002 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8003 || (TYPE_PRECISION (type)
8004 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8005 change = 1;
8006 else if (TYPE_PRECISION (TREE_TYPE (and1))
8007 <= HOST_BITS_PER_WIDE_INT
8008 && tree_fits_uhwi_p (and1))
8009 {
8010 unsigned HOST_WIDE_INT cst;
8011
8012 cst = tree_to_uhwi (and1);
8013 cst &= HOST_WIDE_INT_M1U
8014 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8015 change = (cst == 0);
8016 #ifdef LOAD_EXTEND_OP
8017 if (change
8018 && !flag_syntax_only
8019 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8020 == ZERO_EXTEND))
8021 {
8022 tree uns = unsigned_type_for (TREE_TYPE (and0));
8023 and0 = fold_convert_loc (loc, uns, and0);
8024 and1 = fold_convert_loc (loc, uns, and1);
8025 }
8026 #endif
8027 }
8028 if (change)
8029 {
8030 tem = force_fit_type (type, wi::to_widest (and1), 0,
8031 TREE_OVERFLOW (and1));
8032 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8033 fold_convert_loc (loc, type, and0), tem);
8034 }
8035 }
8036
8037 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8038 when one of the new casts will fold away. Conservatively we assume
8039 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8040 if (POINTER_TYPE_P (type)
8041 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8042 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8043 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8044 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8045 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8046 {
8047 tree arg00 = TREE_OPERAND (arg0, 0);
8048 tree arg01 = TREE_OPERAND (arg0, 1);
8049
8050 return fold_build_pointer_plus_loc
8051 (loc, fold_convert_loc (loc, type, arg00), arg01);
8052 }
8053
8054 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8055 of the same precision, and X is an integer type not narrower than
8056 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8057 if (INTEGRAL_TYPE_P (type)
8058 && TREE_CODE (op0) == BIT_NOT_EXPR
8059 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8060 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8061 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8062 {
8063 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8064 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8065 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8066 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8067 fold_convert_loc (loc, type, tem));
8068 }
8069
8070 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8071 type of X and Y (integer types only). */
8072 if (INTEGRAL_TYPE_P (type)
8073 && TREE_CODE (op0) == MULT_EXPR
8074 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8075 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8076 {
8077 /* Be careful not to introduce new overflows. */
8078 tree mult_type;
8079 if (TYPE_OVERFLOW_WRAPS (type))
8080 mult_type = type;
8081 else
8082 mult_type = unsigned_type_for (type);
8083
8084 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8085 {
8086 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8087 fold_convert_loc (loc, mult_type,
8088 TREE_OPERAND (op0, 0)),
8089 fold_convert_loc (loc, mult_type,
8090 TREE_OPERAND (op0, 1)));
8091 return fold_convert_loc (loc, type, tem);
8092 }
8093 }
8094
8095 return NULL_TREE;
8096
8097 case VIEW_CONVERT_EXPR:
8098 if (TREE_CODE (op0) == MEM_REF)
8099 return fold_build2_loc (loc, MEM_REF, type,
8100 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8101
8102 return NULL_TREE;
8103
8104 case NEGATE_EXPR:
8105 tem = fold_negate_expr (loc, arg0);
8106 if (tem)
8107 return fold_convert_loc (loc, type, tem);
8108 return NULL_TREE;
8109
8110 case ABS_EXPR:
8111 /* Convert fabs((double)float) into (double)fabsf(float). */
8112 if (TREE_CODE (arg0) == NOP_EXPR
8113 && TREE_CODE (type) == REAL_TYPE)
8114 {
8115 tree targ0 = strip_float_extensions (arg0);
8116 if (targ0 != arg0)
8117 return fold_convert_loc (loc, type,
8118 fold_build1_loc (loc, ABS_EXPR,
8119 TREE_TYPE (targ0),
8120 targ0));
8121 }
8122 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8123 else if (TREE_CODE (arg0) == ABS_EXPR)
8124 return arg0;
8125
8126 /* Strip sign ops from argument. */
8127 if (TREE_CODE (type) == REAL_TYPE)
8128 {
8129 tem = fold_strip_sign_ops (arg0);
8130 if (tem)
8131 return fold_build1_loc (loc, ABS_EXPR, type,
8132 fold_convert_loc (loc, type, tem));
8133 }
8134 return NULL_TREE;
8135
8136 case CONJ_EXPR:
8137 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8138 return fold_convert_loc (loc, type, arg0);
8139 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8140 {
8141 tree itype = TREE_TYPE (type);
8142 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8143 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8144 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8145 negate_expr (ipart));
8146 }
8147 if (TREE_CODE (arg0) == CONJ_EXPR)
8148 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8149 return NULL_TREE;
8150
8151 case BIT_NOT_EXPR:
8152 /* Convert ~ (-A) to A - 1. */
8153 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8154 return fold_build2_loc (loc, MINUS_EXPR, type,
8155 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8156 build_int_cst (type, 1));
8157 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8158 else if (INTEGRAL_TYPE_P (type)
8159 && ((TREE_CODE (arg0) == MINUS_EXPR
8160 && integer_onep (TREE_OPERAND (arg0, 1)))
8161 || (TREE_CODE (arg0) == PLUS_EXPR
8162 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8163 {
8164 /* Perform the negation in ARG0's type and only then convert
8165 to TYPE as to avoid introducing undefined behavior. */
8166 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8167 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8168 TREE_OPERAND (arg0, 0));
8169 return fold_convert_loc (loc, type, t);
8170 }
8171 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8172 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8173 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8174 fold_convert_loc (loc, type,
8175 TREE_OPERAND (arg0, 0)))))
8176 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8177 fold_convert_loc (loc, type,
8178 TREE_OPERAND (arg0, 1)));
8179 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8180 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8181 fold_convert_loc (loc, type,
8182 TREE_OPERAND (arg0, 1)))))
8183 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8184 fold_convert_loc (loc, type,
8185 TREE_OPERAND (arg0, 0)), tem);
8186
8187 return NULL_TREE;
8188
8189 case TRUTH_NOT_EXPR:
8190 /* Note that the operand of this must be an int
8191 and its values must be 0 or 1.
8192 ("true" is a fixed value perhaps depending on the language,
8193 but we don't handle values other than 1 correctly yet.) */
8194 tem = fold_truth_not_expr (loc, arg0);
8195 if (!tem)
8196 return NULL_TREE;
8197 return fold_convert_loc (loc, type, tem);
8198
8199 case REALPART_EXPR:
8200 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8201 return fold_convert_loc (loc, type, arg0);
8202 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8203 {
8204 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8205 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8206 fold_build1_loc (loc, REALPART_EXPR, itype,
8207 TREE_OPERAND (arg0, 0)),
8208 fold_build1_loc (loc, REALPART_EXPR, itype,
8209 TREE_OPERAND (arg0, 1)));
8210 return fold_convert_loc (loc, type, tem);
8211 }
8212 if (TREE_CODE (arg0) == CONJ_EXPR)
8213 {
8214 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8215 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8216 TREE_OPERAND (arg0, 0));
8217 return fold_convert_loc (loc, type, tem);
8218 }
8219 if (TREE_CODE (arg0) == CALL_EXPR)
8220 {
8221 tree fn = get_callee_fndecl (arg0);
8222 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8223 switch (DECL_FUNCTION_CODE (fn))
8224 {
8225 CASE_FLT_FN (BUILT_IN_CEXPI):
8226 fn = mathfn_built_in (type, BUILT_IN_COS);
8227 if (fn)
8228 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8229 break;
8230
8231 default:
8232 break;
8233 }
8234 }
8235 return NULL_TREE;
8236
8237 case IMAGPART_EXPR:
8238 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8239 return build_zero_cst (type);
8240 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8241 {
8242 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8243 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8244 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8245 TREE_OPERAND (arg0, 0)),
8246 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8247 TREE_OPERAND (arg0, 1)));
8248 return fold_convert_loc (loc, type, tem);
8249 }
8250 if (TREE_CODE (arg0) == CONJ_EXPR)
8251 {
8252 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8253 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8254 return fold_convert_loc (loc, type, negate_expr (tem));
8255 }
8256 if (TREE_CODE (arg0) == CALL_EXPR)
8257 {
8258 tree fn = get_callee_fndecl (arg0);
8259 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8260 switch (DECL_FUNCTION_CODE (fn))
8261 {
8262 CASE_FLT_FN (BUILT_IN_CEXPI):
8263 fn = mathfn_built_in (type, BUILT_IN_SIN);
8264 if (fn)
8265 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8266 break;
8267
8268 default:
8269 break;
8270 }
8271 }
8272 return NULL_TREE;
8273
8274 case INDIRECT_REF:
8275 /* Fold *&X to X if X is an lvalue. */
8276 if (TREE_CODE (op0) == ADDR_EXPR)
8277 {
8278 tree op00 = TREE_OPERAND (op0, 0);
8279 if ((TREE_CODE (op00) == VAR_DECL
8280 || TREE_CODE (op00) == PARM_DECL
8281 || TREE_CODE (op00) == RESULT_DECL)
8282 && !TREE_READONLY (op00))
8283 return op00;
8284 }
8285 return NULL_TREE;
8286
8287 default:
8288 return NULL_TREE;
8289 } /* switch (code) */
8290 }
8291
8292
8293 /* If the operation was a conversion do _not_ mark a resulting constant
8294 with TREE_OVERFLOW if the original constant was not. These conversions
8295 have implementation defined behavior and retaining the TREE_OVERFLOW
8296 flag here would confuse later passes such as VRP. */
8297 tree
8298 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8299 tree type, tree op0)
8300 {
8301 tree res = fold_unary_loc (loc, code, type, op0);
8302 if (res
8303 && TREE_CODE (res) == INTEGER_CST
8304 && TREE_CODE (op0) == INTEGER_CST
8305 && CONVERT_EXPR_CODE_P (code))
8306 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8307
8308 return res;
8309 }
8310
8311 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8312 operands OP0 and OP1. LOC is the location of the resulting expression.
8313 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8314 Return the folded expression if folding is successful. Otherwise,
8315 return NULL_TREE. */
8316 static tree
8317 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8318 tree arg0, tree arg1, tree op0, tree op1)
8319 {
8320 tree tem;
8321
8322 /* We only do these simplifications if we are optimizing. */
8323 if (!optimize)
8324 return NULL_TREE;
8325
8326 /* Check for things like (A || B) && (A || C). We can convert this
8327 to A || (B && C). Note that either operator can be any of the four
8328 truth and/or operations and the transformation will still be
8329 valid. Also note that we only care about order for the
8330 ANDIF and ORIF operators. If B contains side effects, this
8331 might change the truth-value of A. */
8332 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8333 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8334 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8335 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8336 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8337 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8338 {
8339 tree a00 = TREE_OPERAND (arg0, 0);
8340 tree a01 = TREE_OPERAND (arg0, 1);
8341 tree a10 = TREE_OPERAND (arg1, 0);
8342 tree a11 = TREE_OPERAND (arg1, 1);
8343 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8344 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8345 && (code == TRUTH_AND_EXPR
8346 || code == TRUTH_OR_EXPR));
8347
8348 if (operand_equal_p (a00, a10, 0))
8349 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8350 fold_build2_loc (loc, code, type, a01, a11));
8351 else if (commutative && operand_equal_p (a00, a11, 0))
8352 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8353 fold_build2_loc (loc, code, type, a01, a10));
8354 else if (commutative && operand_equal_p (a01, a10, 0))
8355 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8356 fold_build2_loc (loc, code, type, a00, a11));
8357
8358 /* This case if tricky because we must either have commutative
8359 operators or else A10 must not have side-effects. */
8360
8361 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8362 && operand_equal_p (a01, a11, 0))
8363 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8364 fold_build2_loc (loc, code, type, a00, a10),
8365 a01);
8366 }
8367
8368 /* See if we can build a range comparison. */
8369 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8370 return tem;
8371
8372 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8373 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8374 {
8375 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8376 if (tem)
8377 return fold_build2_loc (loc, code, type, tem, arg1);
8378 }
8379
8380 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8381 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8382 {
8383 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8384 if (tem)
8385 return fold_build2_loc (loc, code, type, arg0, tem);
8386 }
8387
8388 /* Check for the possibility of merging component references. If our
8389 lhs is another similar operation, try to merge its rhs with our
8390 rhs. Then try to merge our lhs and rhs. */
8391 if (TREE_CODE (arg0) == code
8392 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8393 TREE_OPERAND (arg0, 1), arg1)))
8394 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8395
8396 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8397 return tem;
8398
8399 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8400 && (code == TRUTH_AND_EXPR
8401 || code == TRUTH_ANDIF_EXPR
8402 || code == TRUTH_OR_EXPR
8403 || code == TRUTH_ORIF_EXPR))
8404 {
8405 enum tree_code ncode, icode;
8406
8407 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8408 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8409 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8410
8411 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8412 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8413 We don't want to pack more than two leafs to a non-IF AND/OR
8414 expression.
8415 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8416 equal to IF-CODE, then we don't want to add right-hand operand.
8417 If the inner right-hand side of left-hand operand has
8418 side-effects, or isn't simple, then we can't add to it,
8419 as otherwise we might destroy if-sequence. */
8420 if (TREE_CODE (arg0) == icode
8421 && simple_operand_p_2 (arg1)
8422 /* Needed for sequence points to handle trappings, and
8423 side-effects. */
8424 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8425 {
8426 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8427 arg1);
8428 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8429 tem);
8430 }
8431 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8432 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8433 else if (TREE_CODE (arg1) == icode
8434 && simple_operand_p_2 (arg0)
8435 /* Needed for sequence points to handle trappings, and
8436 side-effects. */
8437 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8438 {
8439 tem = fold_build2_loc (loc, ncode, type,
8440 arg0, TREE_OPERAND (arg1, 0));
8441 return fold_build2_loc (loc, icode, type, tem,
8442 TREE_OPERAND (arg1, 1));
8443 }
8444 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8445 into (A OR B).
8446 For sequence point consistancy, we need to check for trapping,
8447 and side-effects. */
8448 else if (code == icode && simple_operand_p_2 (arg0)
8449 && simple_operand_p_2 (arg1))
8450 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8451 }
8452
8453 return NULL_TREE;
8454 }
8455
8456 /* Fold a binary expression of code CODE and type TYPE with operands
8457 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8458 Return the folded expression if folding is successful. Otherwise,
8459 return NULL_TREE. */
8460
8461 static tree
8462 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8463 {
8464 enum tree_code compl_code;
8465
8466 if (code == MIN_EXPR)
8467 compl_code = MAX_EXPR;
8468 else if (code == MAX_EXPR)
8469 compl_code = MIN_EXPR;
8470 else
8471 gcc_unreachable ();
8472
8473 /* MIN (MAX (a, b), b) == b. */
8474 if (TREE_CODE (op0) == compl_code
8475 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8476 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8477
8478 /* MIN (MAX (b, a), b) == b. */
8479 if (TREE_CODE (op0) == compl_code
8480 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8481 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8482 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8483
8484 /* MIN (a, MAX (a, b)) == a. */
8485 if (TREE_CODE (op1) == compl_code
8486 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8487 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8488 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8489
8490 /* MIN (a, MAX (b, a)) == a. */
8491 if (TREE_CODE (op1) == compl_code
8492 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8493 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8494 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8495
8496 return NULL_TREE;
8497 }
8498
8499 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8500 by changing CODE to reduce the magnitude of constants involved in
8501 ARG0 of the comparison.
8502 Returns a canonicalized comparison tree if a simplification was
8503 possible, otherwise returns NULL_TREE.
8504 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8505 valid if signed overflow is undefined. */
8506
8507 static tree
8508 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8509 tree arg0, tree arg1,
8510 bool *strict_overflow_p)
8511 {
8512 enum tree_code code0 = TREE_CODE (arg0);
8513 tree t, cst0 = NULL_TREE;
8514 int sgn0;
8515 bool swap = false;
8516
8517 /* Match A +- CST code arg1 and CST code arg1. We can change the
8518 first form only if overflow is undefined. */
8519 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8520 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8521 /* In principle pointers also have undefined overflow behavior,
8522 but that causes problems elsewhere. */
8523 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8524 && (code0 == MINUS_EXPR
8525 || code0 == PLUS_EXPR)
8526 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8527 || code0 == INTEGER_CST))
8528 return NULL_TREE;
8529
8530 /* Identify the constant in arg0 and its sign. */
8531 if (code0 == INTEGER_CST)
8532 cst0 = arg0;
8533 else
8534 cst0 = TREE_OPERAND (arg0, 1);
8535 sgn0 = tree_int_cst_sgn (cst0);
8536
8537 /* Overflowed constants and zero will cause problems. */
8538 if (integer_zerop (cst0)
8539 || TREE_OVERFLOW (cst0))
8540 return NULL_TREE;
8541
8542 /* See if we can reduce the magnitude of the constant in
8543 arg0 by changing the comparison code. */
8544 if (code0 == INTEGER_CST)
8545 {
8546 /* CST <= arg1 -> CST-1 < arg1. */
8547 if (code == LE_EXPR && sgn0 == 1)
8548 code = LT_EXPR;
8549 /* -CST < arg1 -> -CST-1 <= arg1. */
8550 else if (code == LT_EXPR && sgn0 == -1)
8551 code = LE_EXPR;
8552 /* CST > arg1 -> CST-1 >= arg1. */
8553 else if (code == GT_EXPR && sgn0 == 1)
8554 code = GE_EXPR;
8555 /* -CST >= arg1 -> -CST-1 > arg1. */
8556 else if (code == GE_EXPR && sgn0 == -1)
8557 code = GT_EXPR;
8558 else
8559 return NULL_TREE;
8560 /* arg1 code' CST' might be more canonical. */
8561 swap = true;
8562 }
8563 else
8564 {
8565 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8566 if (code == LT_EXPR
8567 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8568 code = LE_EXPR;
8569 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8570 else if (code == GT_EXPR
8571 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8572 code = GE_EXPR;
8573 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8574 else if (code == LE_EXPR
8575 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8576 code = LT_EXPR;
8577 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8578 else if (code == GE_EXPR
8579 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8580 code = GT_EXPR;
8581 else
8582 return NULL_TREE;
8583 *strict_overflow_p = true;
8584 }
8585
8586 /* Now build the constant reduced in magnitude. But not if that
8587 would produce one outside of its types range. */
8588 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8589 && ((sgn0 == 1
8590 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8591 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8592 || (sgn0 == -1
8593 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8594 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8595 /* We cannot swap the comparison here as that would cause us to
8596 endlessly recurse. */
8597 return NULL_TREE;
8598
8599 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8600 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8601 if (code0 != INTEGER_CST)
8602 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8603 t = fold_convert (TREE_TYPE (arg1), t);
8604
8605 /* If swapping might yield to a more canonical form, do so. */
8606 if (swap)
8607 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8608 else
8609 return fold_build2_loc (loc, code, type, t, arg1);
8610 }
8611
8612 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8613 overflow further. Try to decrease the magnitude of constants involved
8614 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8615 and put sole constants at the second argument position.
8616 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8617
8618 static tree
8619 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8620 tree arg0, tree arg1)
8621 {
8622 tree t;
8623 bool strict_overflow_p;
8624 const char * const warnmsg = G_("assuming signed overflow does not occur "
8625 "when reducing constant in comparison");
8626
8627 /* Try canonicalization by simplifying arg0. */
8628 strict_overflow_p = false;
8629 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8630 &strict_overflow_p);
8631 if (t)
8632 {
8633 if (strict_overflow_p)
8634 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8635 return t;
8636 }
8637
8638 /* Try canonicalization by simplifying arg1 using the swapped
8639 comparison. */
8640 code = swap_tree_comparison (code);
8641 strict_overflow_p = false;
8642 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8643 &strict_overflow_p);
8644 if (t && strict_overflow_p)
8645 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8646 return t;
8647 }
8648
8649 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8650 space. This is used to avoid issuing overflow warnings for
8651 expressions like &p->x which can not wrap. */
8652
8653 static bool
8654 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8655 {
8656 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8657 return true;
8658
8659 if (bitpos < 0)
8660 return true;
8661
8662 wide_int wi_offset;
8663 int precision = TYPE_PRECISION (TREE_TYPE (base));
8664 if (offset == NULL_TREE)
8665 wi_offset = wi::zero (precision);
8666 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8667 return true;
8668 else
8669 wi_offset = offset;
8670
8671 bool overflow;
8672 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8673 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8674 if (overflow)
8675 return true;
8676
8677 if (!wi::fits_uhwi_p (total))
8678 return true;
8679
8680 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8681 if (size <= 0)
8682 return true;
8683
8684 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8685 array. */
8686 if (TREE_CODE (base) == ADDR_EXPR)
8687 {
8688 HOST_WIDE_INT base_size;
8689
8690 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8691 if (base_size > 0 && size < base_size)
8692 size = base_size;
8693 }
8694
8695 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8696 }
8697
8698 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8699 kind INTEGER_CST. This makes sure to properly sign-extend the
8700 constant. */
8701
8702 static HOST_WIDE_INT
8703 size_low_cst (const_tree t)
8704 {
8705 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8706 int prec = TYPE_PRECISION (TREE_TYPE (t));
8707 if (prec < HOST_BITS_PER_WIDE_INT)
8708 return sext_hwi (w, prec);
8709 return w;
8710 }
8711
8712 /* Subroutine of fold_binary. This routine performs all of the
8713 transformations that are common to the equality/inequality
8714 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8715 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8716 fold_binary should call fold_binary. Fold a comparison with
8717 tree code CODE and type TYPE with operands OP0 and OP1. Return
8718 the folded comparison or NULL_TREE. */
8719
8720 static tree
8721 fold_comparison (location_t loc, enum tree_code code, tree type,
8722 tree op0, tree op1)
8723 {
8724 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8725 tree arg0, arg1, tem;
8726
8727 arg0 = op0;
8728 arg1 = op1;
8729
8730 STRIP_SIGN_NOPS (arg0);
8731 STRIP_SIGN_NOPS (arg1);
8732
8733 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8734 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8735 && (equality_code
8736 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8737 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8738 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8739 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8740 && TREE_CODE (arg1) == INTEGER_CST
8741 && !TREE_OVERFLOW (arg1))
8742 {
8743 const enum tree_code
8744 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8745 tree const1 = TREE_OPERAND (arg0, 1);
8746 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8747 tree variable = TREE_OPERAND (arg0, 0);
8748 tree new_const = int_const_binop (reverse_op, const2, const1);
8749
8750 /* If the constant operation overflowed this can be
8751 simplified as a comparison against INT_MAX/INT_MIN. */
8752 if (TREE_OVERFLOW (new_const)
8753 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8754 {
8755 int const1_sgn = tree_int_cst_sgn (const1);
8756 enum tree_code code2 = code;
8757
8758 /* Get the sign of the constant on the lhs if the
8759 operation were VARIABLE + CONST1. */
8760 if (TREE_CODE (arg0) == MINUS_EXPR)
8761 const1_sgn = -const1_sgn;
8762
8763 /* The sign of the constant determines if we overflowed
8764 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8765 Canonicalize to the INT_MIN overflow by swapping the comparison
8766 if necessary. */
8767 if (const1_sgn == -1)
8768 code2 = swap_tree_comparison (code);
8769
8770 /* We now can look at the canonicalized case
8771 VARIABLE + 1 CODE2 INT_MIN
8772 and decide on the result. */
8773 switch (code2)
8774 {
8775 case EQ_EXPR:
8776 case LT_EXPR:
8777 case LE_EXPR:
8778 return
8779 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8780
8781 case NE_EXPR:
8782 case GE_EXPR:
8783 case GT_EXPR:
8784 return
8785 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8786
8787 default:
8788 gcc_unreachable ();
8789 }
8790 }
8791 else
8792 {
8793 if (!equality_code)
8794 fold_overflow_warning ("assuming signed overflow does not occur "
8795 "when changing X +- C1 cmp C2 to "
8796 "X cmp C2 -+ C1",
8797 WARN_STRICT_OVERFLOW_COMPARISON);
8798 return fold_build2_loc (loc, code, type, variable, new_const);
8799 }
8800 }
8801
8802 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8803 if (TREE_CODE (arg0) == MINUS_EXPR
8804 && equality_code
8805 && integer_zerop (arg1))
8806 {
8807 /* ??? The transformation is valid for the other operators if overflow
8808 is undefined for the type, but performing it here badly interacts
8809 with the transformation in fold_cond_expr_with_comparison which
8810 attempts to synthetize ABS_EXPR. */
8811 if (!equality_code)
8812 fold_overflow_warning ("assuming signed overflow does not occur "
8813 "when changing X - Y cmp 0 to X cmp Y",
8814 WARN_STRICT_OVERFLOW_COMPARISON);
8815 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8816 TREE_OPERAND (arg0, 1));
8817 }
8818
8819 /* For comparisons of pointers we can decompose it to a compile time
8820 comparison of the base objects and the offsets into the object.
8821 This requires at least one operand being an ADDR_EXPR or a
8822 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8823 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8824 && (TREE_CODE (arg0) == ADDR_EXPR
8825 || TREE_CODE (arg1) == ADDR_EXPR
8826 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8827 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8828 {
8829 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8830 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8831 machine_mode mode;
8832 int volatilep, unsignedp;
8833 bool indirect_base0 = false, indirect_base1 = false;
8834
8835 /* Get base and offset for the access. Strip ADDR_EXPR for
8836 get_inner_reference, but put it back by stripping INDIRECT_REF
8837 off the base object if possible. indirect_baseN will be true
8838 if baseN is not an address but refers to the object itself. */
8839 base0 = arg0;
8840 if (TREE_CODE (arg0) == ADDR_EXPR)
8841 {
8842 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8843 &bitsize, &bitpos0, &offset0, &mode,
8844 &unsignedp, &volatilep, false);
8845 if (TREE_CODE (base0) == INDIRECT_REF)
8846 base0 = TREE_OPERAND (base0, 0);
8847 else
8848 indirect_base0 = true;
8849 }
8850 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8851 {
8852 base0 = TREE_OPERAND (arg0, 0);
8853 STRIP_SIGN_NOPS (base0);
8854 if (TREE_CODE (base0) == ADDR_EXPR)
8855 {
8856 base0 = TREE_OPERAND (base0, 0);
8857 indirect_base0 = true;
8858 }
8859 offset0 = TREE_OPERAND (arg0, 1);
8860 if (tree_fits_shwi_p (offset0))
8861 {
8862 HOST_WIDE_INT off = size_low_cst (offset0);
8863 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8864 * BITS_PER_UNIT)
8865 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8866 {
8867 bitpos0 = off * BITS_PER_UNIT;
8868 offset0 = NULL_TREE;
8869 }
8870 }
8871 }
8872
8873 base1 = arg1;
8874 if (TREE_CODE (arg1) == ADDR_EXPR)
8875 {
8876 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8877 &bitsize, &bitpos1, &offset1, &mode,
8878 &unsignedp, &volatilep, false);
8879 if (TREE_CODE (base1) == INDIRECT_REF)
8880 base1 = TREE_OPERAND (base1, 0);
8881 else
8882 indirect_base1 = true;
8883 }
8884 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8885 {
8886 base1 = TREE_OPERAND (arg1, 0);
8887 STRIP_SIGN_NOPS (base1);
8888 if (TREE_CODE (base1) == ADDR_EXPR)
8889 {
8890 base1 = TREE_OPERAND (base1, 0);
8891 indirect_base1 = true;
8892 }
8893 offset1 = TREE_OPERAND (arg1, 1);
8894 if (tree_fits_shwi_p (offset1))
8895 {
8896 HOST_WIDE_INT off = size_low_cst (offset1);
8897 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8898 * BITS_PER_UNIT)
8899 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8900 {
8901 bitpos1 = off * BITS_PER_UNIT;
8902 offset1 = NULL_TREE;
8903 }
8904 }
8905 }
8906
8907 /* A local variable can never be pointed to by
8908 the default SSA name of an incoming parameter. */
8909 if ((TREE_CODE (arg0) == ADDR_EXPR
8910 && indirect_base0
8911 && TREE_CODE (base0) == VAR_DECL
8912 && auto_var_in_fn_p (base0, current_function_decl)
8913 && !indirect_base1
8914 && TREE_CODE (base1) == SSA_NAME
8915 && SSA_NAME_IS_DEFAULT_DEF (base1)
8916 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8917 || (TREE_CODE (arg1) == ADDR_EXPR
8918 && indirect_base1
8919 && TREE_CODE (base1) == VAR_DECL
8920 && auto_var_in_fn_p (base1, current_function_decl)
8921 && !indirect_base0
8922 && TREE_CODE (base0) == SSA_NAME
8923 && SSA_NAME_IS_DEFAULT_DEF (base0)
8924 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8925 {
8926 if (code == NE_EXPR)
8927 return constant_boolean_node (1, type);
8928 else if (code == EQ_EXPR)
8929 return constant_boolean_node (0, type);
8930 }
8931 /* If we have equivalent bases we might be able to simplify. */
8932 else if (indirect_base0 == indirect_base1
8933 && operand_equal_p (base0, base1, 0))
8934 {
8935 /* We can fold this expression to a constant if the non-constant
8936 offset parts are equal. */
8937 if ((offset0 == offset1
8938 || (offset0 && offset1
8939 && operand_equal_p (offset0, offset1, 0)))
8940 && (code == EQ_EXPR
8941 || code == NE_EXPR
8942 || (indirect_base0 && DECL_P (base0))
8943 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8944
8945 {
8946 if (!equality_code
8947 && bitpos0 != bitpos1
8948 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8949 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8950 fold_overflow_warning (("assuming pointer wraparound does not "
8951 "occur when comparing P +- C1 with "
8952 "P +- C2"),
8953 WARN_STRICT_OVERFLOW_CONDITIONAL);
8954
8955 switch (code)
8956 {
8957 case EQ_EXPR:
8958 return constant_boolean_node (bitpos0 == bitpos1, type);
8959 case NE_EXPR:
8960 return constant_boolean_node (bitpos0 != bitpos1, type);
8961 case LT_EXPR:
8962 return constant_boolean_node (bitpos0 < bitpos1, type);
8963 case LE_EXPR:
8964 return constant_boolean_node (bitpos0 <= bitpos1, type);
8965 case GE_EXPR:
8966 return constant_boolean_node (bitpos0 >= bitpos1, type);
8967 case GT_EXPR:
8968 return constant_boolean_node (bitpos0 > bitpos1, type);
8969 default:;
8970 }
8971 }
8972 /* We can simplify the comparison to a comparison of the variable
8973 offset parts if the constant offset parts are equal.
8974 Be careful to use signed sizetype here because otherwise we
8975 mess with array offsets in the wrong way. This is possible
8976 because pointer arithmetic is restricted to retain within an
8977 object and overflow on pointer differences is undefined as of
8978 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8979 else if (bitpos0 == bitpos1
8980 && (equality_code
8981 || (indirect_base0 && DECL_P (base0))
8982 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8983 {
8984 /* By converting to signed sizetype we cover middle-end pointer
8985 arithmetic which operates on unsigned pointer types of size
8986 type size and ARRAY_REF offsets which are properly sign or
8987 zero extended from their type in case it is narrower than
8988 sizetype. */
8989 if (offset0 == NULL_TREE)
8990 offset0 = build_int_cst (ssizetype, 0);
8991 else
8992 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8993 if (offset1 == NULL_TREE)
8994 offset1 = build_int_cst (ssizetype, 0);
8995 else
8996 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8997
8998 if (!equality_code
8999 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9000 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9001 fold_overflow_warning (("assuming pointer wraparound does not "
9002 "occur when comparing P +- C1 with "
9003 "P +- C2"),
9004 WARN_STRICT_OVERFLOW_COMPARISON);
9005
9006 return fold_build2_loc (loc, code, type, offset0, offset1);
9007 }
9008 }
9009 /* For non-equal bases we can simplify if they are addresses
9010 declarations with different addresses. */
9011 else if (indirect_base0 && indirect_base1
9012 /* We know that !operand_equal_p (base0, base1, 0)
9013 because the if condition was false. But make
9014 sure two decls are not the same. */
9015 && base0 != base1
9016 && TREE_CODE (arg0) == ADDR_EXPR
9017 && TREE_CODE (arg1) == ADDR_EXPR
9018 && DECL_P (base0)
9019 && DECL_P (base1)
9020 /* Watch for aliases. */
9021 && (!decl_in_symtab_p (base0)
9022 || !decl_in_symtab_p (base1)
9023 || !symtab_node::get_create (base0)->equal_address_to
9024 (symtab_node::get_create (base1))))
9025 {
9026 if (code == EQ_EXPR)
9027 return omit_two_operands_loc (loc, type, boolean_false_node,
9028 arg0, arg1);
9029 else if (code == NE_EXPR)
9030 return omit_two_operands_loc (loc, type, boolean_true_node,
9031 arg0, arg1);
9032 }
9033 /* For equal offsets we can simplify to a comparison of the
9034 base addresses. */
9035 else if (bitpos0 == bitpos1
9036 && (indirect_base0
9037 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9038 && (indirect_base1
9039 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9040 && ((offset0 == offset1)
9041 || (offset0 && offset1
9042 && operand_equal_p (offset0, offset1, 0))))
9043 {
9044 if (indirect_base0)
9045 base0 = build_fold_addr_expr_loc (loc, base0);
9046 if (indirect_base1)
9047 base1 = build_fold_addr_expr_loc (loc, base1);
9048 return fold_build2_loc (loc, code, type, base0, base1);
9049 }
9050 }
9051
9052 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9053 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9054 the resulting offset is smaller in absolute value than the
9055 original one and has the same sign. */
9056 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9057 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9058 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9059 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9060 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9061 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9062 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9063 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9064 {
9065 tree const1 = TREE_OPERAND (arg0, 1);
9066 tree const2 = TREE_OPERAND (arg1, 1);
9067 tree variable1 = TREE_OPERAND (arg0, 0);
9068 tree variable2 = TREE_OPERAND (arg1, 0);
9069 tree cst;
9070 const char * const warnmsg = G_("assuming signed overflow does not "
9071 "occur when combining constants around "
9072 "a comparison");
9073
9074 /* Put the constant on the side where it doesn't overflow and is
9075 of lower absolute value and of same sign than before. */
9076 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9077 ? MINUS_EXPR : PLUS_EXPR,
9078 const2, const1);
9079 if (!TREE_OVERFLOW (cst)
9080 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9081 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9082 {
9083 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9084 return fold_build2_loc (loc, code, type,
9085 variable1,
9086 fold_build2_loc (loc, TREE_CODE (arg1),
9087 TREE_TYPE (arg1),
9088 variable2, cst));
9089 }
9090
9091 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9092 ? MINUS_EXPR : PLUS_EXPR,
9093 const1, const2);
9094 if (!TREE_OVERFLOW (cst)
9095 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9096 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9097 {
9098 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9099 return fold_build2_loc (loc, code, type,
9100 fold_build2_loc (loc, TREE_CODE (arg0),
9101 TREE_TYPE (arg0),
9102 variable1, cst),
9103 variable2);
9104 }
9105 }
9106
9107 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9108 signed arithmetic case. That form is created by the compiler
9109 often enough for folding it to be of value. One example is in
9110 computing loop trip counts after Operator Strength Reduction. */
9111 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9112 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9113 && TREE_CODE (arg0) == MULT_EXPR
9114 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9115 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9116 && integer_zerop (arg1))
9117 {
9118 tree const1 = TREE_OPERAND (arg0, 1);
9119 tree const2 = arg1; /* zero */
9120 tree variable1 = TREE_OPERAND (arg0, 0);
9121 enum tree_code cmp_code = code;
9122
9123 /* Handle unfolded multiplication by zero. */
9124 if (integer_zerop (const1))
9125 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9126
9127 fold_overflow_warning (("assuming signed overflow does not occur when "
9128 "eliminating multiplication in comparison "
9129 "with zero"),
9130 WARN_STRICT_OVERFLOW_COMPARISON);
9131
9132 /* If const1 is negative we swap the sense of the comparison. */
9133 if (tree_int_cst_sgn (const1) < 0)
9134 cmp_code = swap_tree_comparison (cmp_code);
9135
9136 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9137 }
9138
9139 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9140 if (tem)
9141 return tem;
9142
9143 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9144 {
9145 tree targ0 = strip_float_extensions (arg0);
9146 tree targ1 = strip_float_extensions (arg1);
9147 tree newtype = TREE_TYPE (targ0);
9148
9149 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9150 newtype = TREE_TYPE (targ1);
9151
9152 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9153 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9154 return fold_build2_loc (loc, code, type,
9155 fold_convert_loc (loc, newtype, targ0),
9156 fold_convert_loc (loc, newtype, targ1));
9157
9158 /* (-a) CMP (-b) -> b CMP a */
9159 if (TREE_CODE (arg0) == NEGATE_EXPR
9160 && TREE_CODE (arg1) == NEGATE_EXPR)
9161 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9162 TREE_OPERAND (arg0, 0));
9163
9164 if (TREE_CODE (arg1) == REAL_CST)
9165 {
9166 REAL_VALUE_TYPE cst;
9167 cst = TREE_REAL_CST (arg1);
9168
9169 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9170 if (TREE_CODE (arg0) == NEGATE_EXPR)
9171 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9172 TREE_OPERAND (arg0, 0),
9173 build_real (TREE_TYPE (arg1),
9174 real_value_negate (&cst)));
9175
9176 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9177 /* a CMP (-0) -> a CMP 0 */
9178 if (REAL_VALUE_MINUS_ZERO (cst))
9179 return fold_build2_loc (loc, code, type, arg0,
9180 build_real (TREE_TYPE (arg1), dconst0));
9181
9182 /* x != NaN is always true, other ops are always false. */
9183 if (REAL_VALUE_ISNAN (cst)
9184 && ! HONOR_SNANS (arg1))
9185 {
9186 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9187 return omit_one_operand_loc (loc, type, tem, arg0);
9188 }
9189
9190 /* Fold comparisons against infinity. */
9191 if (REAL_VALUE_ISINF (cst)
9192 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9193 {
9194 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9195 if (tem != NULL_TREE)
9196 return tem;
9197 }
9198 }
9199
9200 /* If this is a comparison of a real constant with a PLUS_EXPR
9201 or a MINUS_EXPR of a real constant, we can convert it into a
9202 comparison with a revised real constant as long as no overflow
9203 occurs when unsafe_math_optimizations are enabled. */
9204 if (flag_unsafe_math_optimizations
9205 && TREE_CODE (arg1) == REAL_CST
9206 && (TREE_CODE (arg0) == PLUS_EXPR
9207 || TREE_CODE (arg0) == MINUS_EXPR)
9208 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9209 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9210 ? MINUS_EXPR : PLUS_EXPR,
9211 arg1, TREE_OPERAND (arg0, 1)))
9212 && !TREE_OVERFLOW (tem))
9213 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9214
9215 /* Likewise, we can simplify a comparison of a real constant with
9216 a MINUS_EXPR whose first operand is also a real constant, i.e.
9217 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9218 floating-point types only if -fassociative-math is set. */
9219 if (flag_associative_math
9220 && TREE_CODE (arg1) == REAL_CST
9221 && TREE_CODE (arg0) == MINUS_EXPR
9222 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9223 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9224 arg1))
9225 && !TREE_OVERFLOW (tem))
9226 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9227 TREE_OPERAND (arg0, 1), tem);
9228
9229 /* Fold comparisons against built-in math functions. */
9230 if (TREE_CODE (arg1) == REAL_CST
9231 && flag_unsafe_math_optimizations
9232 && ! flag_errno_math)
9233 {
9234 enum built_in_function fcode = builtin_mathfn_code (arg0);
9235
9236 if (fcode != END_BUILTINS)
9237 {
9238 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9239 if (tem != NULL_TREE)
9240 return tem;
9241 }
9242 }
9243 }
9244
9245 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9246 && CONVERT_EXPR_P (arg0))
9247 {
9248 /* If we are widening one operand of an integer comparison,
9249 see if the other operand is similarly being widened. Perhaps we
9250 can do the comparison in the narrower type. */
9251 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9252 if (tem)
9253 return tem;
9254
9255 /* Or if we are changing signedness. */
9256 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9257 if (tem)
9258 return tem;
9259 }
9260
9261 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9262 constant, we can simplify it. */
9263 if (TREE_CODE (arg1) == INTEGER_CST
9264 && (TREE_CODE (arg0) == MIN_EXPR
9265 || TREE_CODE (arg0) == MAX_EXPR)
9266 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9267 {
9268 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9269 if (tem)
9270 return tem;
9271 }
9272
9273 /* Simplify comparison of something with itself. (For IEEE
9274 floating-point, we can only do some of these simplifications.) */
9275 if (operand_equal_p (arg0, arg1, 0))
9276 {
9277 switch (code)
9278 {
9279 case EQ_EXPR:
9280 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9281 || ! HONOR_NANS (arg0))
9282 return constant_boolean_node (1, type);
9283 break;
9284
9285 case GE_EXPR:
9286 case LE_EXPR:
9287 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9288 || ! HONOR_NANS (arg0))
9289 return constant_boolean_node (1, type);
9290 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9291
9292 case NE_EXPR:
9293 /* For NE, we can only do this simplification if integer
9294 or we don't honor IEEE floating point NaNs. */
9295 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9296 && HONOR_NANS (arg0))
9297 break;
9298 /* ... fall through ... */
9299 case GT_EXPR:
9300 case LT_EXPR:
9301 return constant_boolean_node (0, type);
9302 default:
9303 gcc_unreachable ();
9304 }
9305 }
9306
9307 /* If we are comparing an expression that just has comparisons
9308 of two integer values, arithmetic expressions of those comparisons,
9309 and constants, we can simplify it. There are only three cases
9310 to check: the two values can either be equal, the first can be
9311 greater, or the second can be greater. Fold the expression for
9312 those three values. Since each value must be 0 or 1, we have
9313 eight possibilities, each of which corresponds to the constant 0
9314 or 1 or one of the six possible comparisons.
9315
9316 This handles common cases like (a > b) == 0 but also handles
9317 expressions like ((x > y) - (y > x)) > 0, which supposedly
9318 occur in macroized code. */
9319
9320 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9321 {
9322 tree cval1 = 0, cval2 = 0;
9323 int save_p = 0;
9324
9325 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9326 /* Don't handle degenerate cases here; they should already
9327 have been handled anyway. */
9328 && cval1 != 0 && cval2 != 0
9329 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9330 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9331 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9332 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9333 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9334 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9335 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9336 {
9337 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9338 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9339
9340 /* We can't just pass T to eval_subst in case cval1 or cval2
9341 was the same as ARG1. */
9342
9343 tree high_result
9344 = fold_build2_loc (loc, code, type,
9345 eval_subst (loc, arg0, cval1, maxval,
9346 cval2, minval),
9347 arg1);
9348 tree equal_result
9349 = fold_build2_loc (loc, code, type,
9350 eval_subst (loc, arg0, cval1, maxval,
9351 cval2, maxval),
9352 arg1);
9353 tree low_result
9354 = fold_build2_loc (loc, code, type,
9355 eval_subst (loc, arg0, cval1, minval,
9356 cval2, maxval),
9357 arg1);
9358
9359 /* All three of these results should be 0 or 1. Confirm they are.
9360 Then use those values to select the proper code to use. */
9361
9362 if (TREE_CODE (high_result) == INTEGER_CST
9363 && TREE_CODE (equal_result) == INTEGER_CST
9364 && TREE_CODE (low_result) == INTEGER_CST)
9365 {
9366 /* Make a 3-bit mask with the high-order bit being the
9367 value for `>', the next for '=', and the low for '<'. */
9368 switch ((integer_onep (high_result) * 4)
9369 + (integer_onep (equal_result) * 2)
9370 + integer_onep (low_result))
9371 {
9372 case 0:
9373 /* Always false. */
9374 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9375 case 1:
9376 code = LT_EXPR;
9377 break;
9378 case 2:
9379 code = EQ_EXPR;
9380 break;
9381 case 3:
9382 code = LE_EXPR;
9383 break;
9384 case 4:
9385 code = GT_EXPR;
9386 break;
9387 case 5:
9388 code = NE_EXPR;
9389 break;
9390 case 6:
9391 code = GE_EXPR;
9392 break;
9393 case 7:
9394 /* Always true. */
9395 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9396 }
9397
9398 if (save_p)
9399 {
9400 tem = save_expr (build2 (code, type, cval1, cval2));
9401 SET_EXPR_LOCATION (tem, loc);
9402 return tem;
9403 }
9404 return fold_build2_loc (loc, code, type, cval1, cval2);
9405 }
9406 }
9407 }
9408
9409 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9410 into a single range test. */
9411 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9412 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9413 && TREE_CODE (arg1) == INTEGER_CST
9414 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9415 && !integer_zerop (TREE_OPERAND (arg0, 1))
9416 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9417 && !TREE_OVERFLOW (arg1))
9418 {
9419 tem = fold_div_compare (loc, code, type, arg0, arg1);
9420 if (tem != NULL_TREE)
9421 return tem;
9422 }
9423
9424 /* Fold ~X op ~Y as Y op X. */
9425 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9426 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9427 {
9428 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9429 return fold_build2_loc (loc, code, type,
9430 fold_convert_loc (loc, cmp_type,
9431 TREE_OPERAND (arg1, 0)),
9432 TREE_OPERAND (arg0, 0));
9433 }
9434
9435 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9436 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9437 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9438 {
9439 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9440 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9441 TREE_OPERAND (arg0, 0),
9442 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9443 fold_convert_loc (loc, cmp_type, arg1)));
9444 }
9445
9446 return NULL_TREE;
9447 }
9448
9449
9450 /* Subroutine of fold_binary. Optimize complex multiplications of the
9451 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9452 argument EXPR represents the expression "z" of type TYPE. */
9453
9454 static tree
9455 fold_mult_zconjz (location_t loc, tree type, tree expr)
9456 {
9457 tree itype = TREE_TYPE (type);
9458 tree rpart, ipart, tem;
9459
9460 if (TREE_CODE (expr) == COMPLEX_EXPR)
9461 {
9462 rpart = TREE_OPERAND (expr, 0);
9463 ipart = TREE_OPERAND (expr, 1);
9464 }
9465 else if (TREE_CODE (expr) == COMPLEX_CST)
9466 {
9467 rpart = TREE_REALPART (expr);
9468 ipart = TREE_IMAGPART (expr);
9469 }
9470 else
9471 {
9472 expr = save_expr (expr);
9473 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9474 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9475 }
9476
9477 rpart = save_expr (rpart);
9478 ipart = save_expr (ipart);
9479 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9480 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9481 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9482 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9483 build_zero_cst (itype));
9484 }
9485
9486
9487 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9488 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9489 guarantees that P and N have the same least significant log2(M) bits.
9490 N is not otherwise constrained. In particular, N is not normalized to
9491 0 <= N < M as is common. In general, the precise value of P is unknown.
9492 M is chosen as large as possible such that constant N can be determined.
9493
9494 Returns M and sets *RESIDUE to N.
9495
9496 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9497 account. This is not always possible due to PR 35705.
9498 */
9499
9500 static unsigned HOST_WIDE_INT
9501 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9502 bool allow_func_align)
9503 {
9504 enum tree_code code;
9505
9506 *residue = 0;
9507
9508 code = TREE_CODE (expr);
9509 if (code == ADDR_EXPR)
9510 {
9511 unsigned int bitalign;
9512 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9513 *residue /= BITS_PER_UNIT;
9514 return bitalign / BITS_PER_UNIT;
9515 }
9516 else if (code == POINTER_PLUS_EXPR)
9517 {
9518 tree op0, op1;
9519 unsigned HOST_WIDE_INT modulus;
9520 enum tree_code inner_code;
9521
9522 op0 = TREE_OPERAND (expr, 0);
9523 STRIP_NOPS (op0);
9524 modulus = get_pointer_modulus_and_residue (op0, residue,
9525 allow_func_align);
9526
9527 op1 = TREE_OPERAND (expr, 1);
9528 STRIP_NOPS (op1);
9529 inner_code = TREE_CODE (op1);
9530 if (inner_code == INTEGER_CST)
9531 {
9532 *residue += TREE_INT_CST_LOW (op1);
9533 return modulus;
9534 }
9535 else if (inner_code == MULT_EXPR)
9536 {
9537 op1 = TREE_OPERAND (op1, 1);
9538 if (TREE_CODE (op1) == INTEGER_CST)
9539 {
9540 unsigned HOST_WIDE_INT align;
9541
9542 /* Compute the greatest power-of-2 divisor of op1. */
9543 align = TREE_INT_CST_LOW (op1);
9544 align &= -align;
9545
9546 /* If align is non-zero and less than *modulus, replace
9547 *modulus with align., If align is 0, then either op1 is 0
9548 or the greatest power-of-2 divisor of op1 doesn't fit in an
9549 unsigned HOST_WIDE_INT. In either case, no additional
9550 constraint is imposed. */
9551 if (align)
9552 modulus = MIN (modulus, align);
9553
9554 return modulus;
9555 }
9556 }
9557 }
9558
9559 /* If we get here, we were unable to determine anything useful about the
9560 expression. */
9561 return 1;
9562 }
9563
9564 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9565 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9566
9567 static bool
9568 vec_cst_ctor_to_array (tree arg, tree *elts)
9569 {
9570 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9571
9572 if (TREE_CODE (arg) == VECTOR_CST)
9573 {
9574 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9575 elts[i] = VECTOR_CST_ELT (arg, i);
9576 }
9577 else if (TREE_CODE (arg) == CONSTRUCTOR)
9578 {
9579 constructor_elt *elt;
9580
9581 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9582 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9583 return false;
9584 else
9585 elts[i] = elt->value;
9586 }
9587 else
9588 return false;
9589 for (; i < nelts; i++)
9590 elts[i]
9591 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9592 return true;
9593 }
9594
9595 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9596 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9597 NULL_TREE otherwise. */
9598
9599 static tree
9600 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9601 {
9602 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9603 tree *elts;
9604 bool need_ctor = false;
9605
9606 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9607 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9608 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9609 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9610 return NULL_TREE;
9611
9612 elts = XALLOCAVEC (tree, nelts * 3);
9613 if (!vec_cst_ctor_to_array (arg0, elts)
9614 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9615 return NULL_TREE;
9616
9617 for (i = 0; i < nelts; i++)
9618 {
9619 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9620 need_ctor = true;
9621 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9622 }
9623
9624 if (need_ctor)
9625 {
9626 vec<constructor_elt, va_gc> *v;
9627 vec_alloc (v, nelts);
9628 for (i = 0; i < nelts; i++)
9629 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9630 return build_constructor (type, v);
9631 }
9632 else
9633 return build_vector (type, &elts[2 * nelts]);
9634 }
9635
9636 /* Try to fold a pointer difference of type TYPE two address expressions of
9637 array references AREF0 and AREF1 using location LOC. Return a
9638 simplified expression for the difference or NULL_TREE. */
9639
9640 static tree
9641 fold_addr_of_array_ref_difference (location_t loc, tree type,
9642 tree aref0, tree aref1)
9643 {
9644 tree base0 = TREE_OPERAND (aref0, 0);
9645 tree base1 = TREE_OPERAND (aref1, 0);
9646 tree base_offset = build_int_cst (type, 0);
9647
9648 /* If the bases are array references as well, recurse. If the bases
9649 are pointer indirections compute the difference of the pointers.
9650 If the bases are equal, we are set. */
9651 if ((TREE_CODE (base0) == ARRAY_REF
9652 && TREE_CODE (base1) == ARRAY_REF
9653 && (base_offset
9654 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9655 || (INDIRECT_REF_P (base0)
9656 && INDIRECT_REF_P (base1)
9657 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9658 TREE_OPERAND (base0, 0),
9659 TREE_OPERAND (base1, 0))))
9660 || operand_equal_p (base0, base1, 0))
9661 {
9662 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9663 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9664 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9665 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9666 return fold_build2_loc (loc, PLUS_EXPR, type,
9667 base_offset,
9668 fold_build2_loc (loc, MULT_EXPR, type,
9669 diff, esz));
9670 }
9671 return NULL_TREE;
9672 }
9673
9674 /* If the real or vector real constant CST of type TYPE has an exact
9675 inverse, return it, else return NULL. */
9676
9677 tree
9678 exact_inverse (tree type, tree cst)
9679 {
9680 REAL_VALUE_TYPE r;
9681 tree unit_type, *elts;
9682 machine_mode mode;
9683 unsigned vec_nelts, i;
9684
9685 switch (TREE_CODE (cst))
9686 {
9687 case REAL_CST:
9688 r = TREE_REAL_CST (cst);
9689
9690 if (exact_real_inverse (TYPE_MODE (type), &r))
9691 return build_real (type, r);
9692
9693 return NULL_TREE;
9694
9695 case VECTOR_CST:
9696 vec_nelts = VECTOR_CST_NELTS (cst);
9697 elts = XALLOCAVEC (tree, vec_nelts);
9698 unit_type = TREE_TYPE (type);
9699 mode = TYPE_MODE (unit_type);
9700
9701 for (i = 0; i < vec_nelts; i++)
9702 {
9703 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9704 if (!exact_real_inverse (mode, &r))
9705 return NULL_TREE;
9706 elts[i] = build_real (unit_type, r);
9707 }
9708
9709 return build_vector (type, elts);
9710
9711 default:
9712 return NULL_TREE;
9713 }
9714 }
9715
9716 /* Mask out the tz least significant bits of X of type TYPE where
9717 tz is the number of trailing zeroes in Y. */
9718 static wide_int
9719 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9720 {
9721 int tz = wi::ctz (y);
9722 if (tz > 0)
9723 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9724 return x;
9725 }
9726
9727 /* Return true when T is an address and is known to be nonzero.
9728 For floating point we further ensure that T is not denormal.
9729 Similar logic is present in nonzero_address in rtlanal.h.
9730
9731 If the return value is based on the assumption that signed overflow
9732 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9733 change *STRICT_OVERFLOW_P. */
9734
9735 static bool
9736 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9737 {
9738 tree type = TREE_TYPE (t);
9739 enum tree_code code;
9740
9741 /* Doing something useful for floating point would need more work. */
9742 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9743 return false;
9744
9745 code = TREE_CODE (t);
9746 switch (TREE_CODE_CLASS (code))
9747 {
9748 case tcc_unary:
9749 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9750 strict_overflow_p);
9751 case tcc_binary:
9752 case tcc_comparison:
9753 return tree_binary_nonzero_warnv_p (code, type,
9754 TREE_OPERAND (t, 0),
9755 TREE_OPERAND (t, 1),
9756 strict_overflow_p);
9757 case tcc_constant:
9758 case tcc_declaration:
9759 case tcc_reference:
9760 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9761
9762 default:
9763 break;
9764 }
9765
9766 switch (code)
9767 {
9768 case TRUTH_NOT_EXPR:
9769 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9770 strict_overflow_p);
9771
9772 case TRUTH_AND_EXPR:
9773 case TRUTH_OR_EXPR:
9774 case TRUTH_XOR_EXPR:
9775 return tree_binary_nonzero_warnv_p (code, type,
9776 TREE_OPERAND (t, 0),
9777 TREE_OPERAND (t, 1),
9778 strict_overflow_p);
9779
9780 case COND_EXPR:
9781 case CONSTRUCTOR:
9782 case OBJ_TYPE_REF:
9783 case ASSERT_EXPR:
9784 case ADDR_EXPR:
9785 case WITH_SIZE_EXPR:
9786 case SSA_NAME:
9787 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9788
9789 case COMPOUND_EXPR:
9790 case MODIFY_EXPR:
9791 case BIND_EXPR:
9792 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9793 strict_overflow_p);
9794
9795 case SAVE_EXPR:
9796 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9797 strict_overflow_p);
9798
9799 case CALL_EXPR:
9800 {
9801 tree fndecl = get_callee_fndecl (t);
9802 if (!fndecl) return false;
9803 if (flag_delete_null_pointer_checks && !flag_check_new
9804 && DECL_IS_OPERATOR_NEW (fndecl)
9805 && !TREE_NOTHROW (fndecl))
9806 return true;
9807 if (flag_delete_null_pointer_checks
9808 && lookup_attribute ("returns_nonnull",
9809 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9810 return true;
9811 return alloca_call_p (t);
9812 }
9813
9814 default:
9815 break;
9816 }
9817 return false;
9818 }
9819
9820 /* Return true when T is an address and is known to be nonzero.
9821 Handle warnings about undefined signed overflow. */
9822
9823 static bool
9824 tree_expr_nonzero_p (tree t)
9825 {
9826 bool ret, strict_overflow_p;
9827
9828 strict_overflow_p = false;
9829 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9830 if (strict_overflow_p)
9831 fold_overflow_warning (("assuming signed overflow does not occur when "
9832 "determining that expression is always "
9833 "non-zero"),
9834 WARN_STRICT_OVERFLOW_MISC);
9835 return ret;
9836 }
9837
9838 /* Fold a binary expression of code CODE and type TYPE with operands
9839 OP0 and OP1. LOC is the location of the resulting expression.
9840 Return the folded expression if folding is successful. Otherwise,
9841 return NULL_TREE. */
9842
9843 tree
9844 fold_binary_loc (location_t loc,
9845 enum tree_code code, tree type, tree op0, tree op1)
9846 {
9847 enum tree_code_class kind = TREE_CODE_CLASS (code);
9848 tree arg0, arg1, tem;
9849 tree t1 = NULL_TREE;
9850 bool strict_overflow_p;
9851 unsigned int prec;
9852
9853 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9854 && TREE_CODE_LENGTH (code) == 2
9855 && op0 != NULL_TREE
9856 && op1 != NULL_TREE);
9857
9858 arg0 = op0;
9859 arg1 = op1;
9860
9861 /* Strip any conversions that don't change the mode. This is
9862 safe for every expression, except for a comparison expression
9863 because its signedness is derived from its operands. So, in
9864 the latter case, only strip conversions that don't change the
9865 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9866 preserved.
9867
9868 Note that this is done as an internal manipulation within the
9869 constant folder, in order to find the simplest representation
9870 of the arguments so that their form can be studied. In any
9871 cases, the appropriate type conversions should be put back in
9872 the tree that will get out of the constant folder. */
9873
9874 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9875 {
9876 STRIP_SIGN_NOPS (arg0);
9877 STRIP_SIGN_NOPS (arg1);
9878 }
9879 else
9880 {
9881 STRIP_NOPS (arg0);
9882 STRIP_NOPS (arg1);
9883 }
9884
9885 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9886 constant but we can't do arithmetic on them. */
9887 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9888 {
9889 tem = const_binop (code, type, arg0, arg1);
9890 if (tem != NULL_TREE)
9891 {
9892 if (TREE_TYPE (tem) != type)
9893 tem = fold_convert_loc (loc, type, tem);
9894 return tem;
9895 }
9896 }
9897
9898 /* If this is a commutative operation, and ARG0 is a constant, move it
9899 to ARG1 to reduce the number of tests below. */
9900 if (commutative_tree_code (code)
9901 && tree_swap_operands_p (arg0, arg1, true))
9902 return fold_build2_loc (loc, code, type, op1, op0);
9903
9904 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9905 to ARG1 to reduce the number of tests below. */
9906 if (kind == tcc_comparison
9907 && tree_swap_operands_p (arg0, arg1, true))
9908 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9909
9910 tem = generic_simplify (loc, code, type, op0, op1);
9911 if (tem)
9912 return tem;
9913
9914 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9915
9916 First check for cases where an arithmetic operation is applied to a
9917 compound, conditional, or comparison operation. Push the arithmetic
9918 operation inside the compound or conditional to see if any folding
9919 can then be done. Convert comparison to conditional for this purpose.
9920 The also optimizes non-constant cases that used to be done in
9921 expand_expr.
9922
9923 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9924 one of the operands is a comparison and the other is a comparison, a
9925 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9926 code below would make the expression more complex. Change it to a
9927 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9928 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9929
9930 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9931 || code == EQ_EXPR || code == NE_EXPR)
9932 && TREE_CODE (type) != VECTOR_TYPE
9933 && ((truth_value_p (TREE_CODE (arg0))
9934 && (truth_value_p (TREE_CODE (arg1))
9935 || (TREE_CODE (arg1) == BIT_AND_EXPR
9936 && integer_onep (TREE_OPERAND (arg1, 1)))))
9937 || (truth_value_p (TREE_CODE (arg1))
9938 && (truth_value_p (TREE_CODE (arg0))
9939 || (TREE_CODE (arg0) == BIT_AND_EXPR
9940 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9941 {
9942 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9943 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9944 : TRUTH_XOR_EXPR,
9945 boolean_type_node,
9946 fold_convert_loc (loc, boolean_type_node, arg0),
9947 fold_convert_loc (loc, boolean_type_node, arg1));
9948
9949 if (code == EQ_EXPR)
9950 tem = invert_truthvalue_loc (loc, tem);
9951
9952 return fold_convert_loc (loc, type, tem);
9953 }
9954
9955 if (TREE_CODE_CLASS (code) == tcc_binary
9956 || TREE_CODE_CLASS (code) == tcc_comparison)
9957 {
9958 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9959 {
9960 tem = fold_build2_loc (loc, code, type,
9961 fold_convert_loc (loc, TREE_TYPE (op0),
9962 TREE_OPERAND (arg0, 1)), op1);
9963 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9964 tem);
9965 }
9966 if (TREE_CODE (arg1) == COMPOUND_EXPR
9967 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9968 {
9969 tem = fold_build2_loc (loc, code, type, op0,
9970 fold_convert_loc (loc, TREE_TYPE (op1),
9971 TREE_OPERAND (arg1, 1)));
9972 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9973 tem);
9974 }
9975
9976 if (TREE_CODE (arg0) == COND_EXPR
9977 || TREE_CODE (arg0) == VEC_COND_EXPR
9978 || COMPARISON_CLASS_P (arg0))
9979 {
9980 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9981 arg0, arg1,
9982 /*cond_first_p=*/1);
9983 if (tem != NULL_TREE)
9984 return tem;
9985 }
9986
9987 if (TREE_CODE (arg1) == COND_EXPR
9988 || TREE_CODE (arg1) == VEC_COND_EXPR
9989 || COMPARISON_CLASS_P (arg1))
9990 {
9991 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9992 arg1, arg0,
9993 /*cond_first_p=*/0);
9994 if (tem != NULL_TREE)
9995 return tem;
9996 }
9997 }
9998
9999 switch (code)
10000 {
10001 case MEM_REF:
10002 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10003 if (TREE_CODE (arg0) == ADDR_EXPR
10004 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10005 {
10006 tree iref = TREE_OPERAND (arg0, 0);
10007 return fold_build2 (MEM_REF, type,
10008 TREE_OPERAND (iref, 0),
10009 int_const_binop (PLUS_EXPR, arg1,
10010 TREE_OPERAND (iref, 1)));
10011 }
10012
10013 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10014 if (TREE_CODE (arg0) == ADDR_EXPR
10015 && handled_component_p (TREE_OPERAND (arg0, 0)))
10016 {
10017 tree base;
10018 HOST_WIDE_INT coffset;
10019 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10020 &coffset);
10021 if (!base)
10022 return NULL_TREE;
10023 return fold_build2 (MEM_REF, type,
10024 build_fold_addr_expr (base),
10025 int_const_binop (PLUS_EXPR, arg1,
10026 size_int (coffset)));
10027 }
10028
10029 return NULL_TREE;
10030
10031 case POINTER_PLUS_EXPR:
10032 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10033 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10034 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10035 return fold_convert_loc (loc, type,
10036 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10037 fold_convert_loc (loc, sizetype,
10038 arg1),
10039 fold_convert_loc (loc, sizetype,
10040 arg0)));
10041
10042 return NULL_TREE;
10043
10044 case PLUS_EXPR:
10045 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10046 {
10047 /* X + (X / CST) * -CST is X % CST. */
10048 if (TREE_CODE (arg1) == MULT_EXPR
10049 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10050 && operand_equal_p (arg0,
10051 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10052 {
10053 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10054 tree cst1 = TREE_OPERAND (arg1, 1);
10055 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10056 cst1, cst0);
10057 if (sum && integer_zerop (sum))
10058 return fold_convert_loc (loc, type,
10059 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10060 TREE_TYPE (arg0), arg0,
10061 cst0));
10062 }
10063 }
10064
10065 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10066 one. Make sure the type is not saturating and has the signedness of
10067 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10068 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10069 if ((TREE_CODE (arg0) == MULT_EXPR
10070 || TREE_CODE (arg1) == MULT_EXPR)
10071 && !TYPE_SATURATING (type)
10072 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10073 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10074 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10075 {
10076 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10077 if (tem)
10078 return tem;
10079 }
10080
10081 if (! FLOAT_TYPE_P (type))
10082 {
10083 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10084 with a constant, and the two constants have no bits in common,
10085 we should treat this as a BIT_IOR_EXPR since this may produce more
10086 simplifications. */
10087 if (TREE_CODE (arg0) == BIT_AND_EXPR
10088 && TREE_CODE (arg1) == BIT_AND_EXPR
10089 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10090 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10091 && wi::bit_and (TREE_OPERAND (arg0, 1),
10092 TREE_OPERAND (arg1, 1)) == 0)
10093 {
10094 code = BIT_IOR_EXPR;
10095 goto bit_ior;
10096 }
10097
10098 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10099 (plus (plus (mult) (mult)) (foo)) so that we can
10100 take advantage of the factoring cases below. */
10101 if (ANY_INTEGRAL_TYPE_P (type)
10102 && TYPE_OVERFLOW_WRAPS (type)
10103 && (((TREE_CODE (arg0) == PLUS_EXPR
10104 || TREE_CODE (arg0) == MINUS_EXPR)
10105 && TREE_CODE (arg1) == MULT_EXPR)
10106 || ((TREE_CODE (arg1) == PLUS_EXPR
10107 || TREE_CODE (arg1) == MINUS_EXPR)
10108 && TREE_CODE (arg0) == MULT_EXPR)))
10109 {
10110 tree parg0, parg1, parg, marg;
10111 enum tree_code pcode;
10112
10113 if (TREE_CODE (arg1) == MULT_EXPR)
10114 parg = arg0, marg = arg1;
10115 else
10116 parg = arg1, marg = arg0;
10117 pcode = TREE_CODE (parg);
10118 parg0 = TREE_OPERAND (parg, 0);
10119 parg1 = TREE_OPERAND (parg, 1);
10120 STRIP_NOPS (parg0);
10121 STRIP_NOPS (parg1);
10122
10123 if (TREE_CODE (parg0) == MULT_EXPR
10124 && TREE_CODE (parg1) != MULT_EXPR)
10125 return fold_build2_loc (loc, pcode, type,
10126 fold_build2_loc (loc, PLUS_EXPR, type,
10127 fold_convert_loc (loc, type,
10128 parg0),
10129 fold_convert_loc (loc, type,
10130 marg)),
10131 fold_convert_loc (loc, type, parg1));
10132 if (TREE_CODE (parg0) != MULT_EXPR
10133 && TREE_CODE (parg1) == MULT_EXPR)
10134 return
10135 fold_build2_loc (loc, PLUS_EXPR, type,
10136 fold_convert_loc (loc, type, parg0),
10137 fold_build2_loc (loc, pcode, type,
10138 fold_convert_loc (loc, type, marg),
10139 fold_convert_loc (loc, type,
10140 parg1)));
10141 }
10142 }
10143 else
10144 {
10145 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10146 to __complex__ ( x, y ). This is not the same for SNaNs or
10147 if signed zeros are involved. */
10148 if (!HONOR_SNANS (element_mode (arg0))
10149 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10150 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10151 {
10152 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10153 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10154 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10155 bool arg0rz = false, arg0iz = false;
10156 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10157 || (arg0i && (arg0iz = real_zerop (arg0i))))
10158 {
10159 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10160 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10161 if (arg0rz && arg1i && real_zerop (arg1i))
10162 {
10163 tree rp = arg1r ? arg1r
10164 : build1 (REALPART_EXPR, rtype, arg1);
10165 tree ip = arg0i ? arg0i
10166 : build1 (IMAGPART_EXPR, rtype, arg0);
10167 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10168 }
10169 else if (arg0iz && arg1r && real_zerop (arg1r))
10170 {
10171 tree rp = arg0r ? arg0r
10172 : build1 (REALPART_EXPR, rtype, arg0);
10173 tree ip = arg1i ? arg1i
10174 : build1 (IMAGPART_EXPR, rtype, arg1);
10175 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10176 }
10177 }
10178 }
10179
10180 if (flag_unsafe_math_optimizations
10181 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10182 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10183 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10184 return tem;
10185
10186 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10187 We associate floats only if the user has specified
10188 -fassociative-math. */
10189 if (flag_associative_math
10190 && TREE_CODE (arg1) == PLUS_EXPR
10191 && TREE_CODE (arg0) != MULT_EXPR)
10192 {
10193 tree tree10 = TREE_OPERAND (arg1, 0);
10194 tree tree11 = TREE_OPERAND (arg1, 1);
10195 if (TREE_CODE (tree11) == MULT_EXPR
10196 && TREE_CODE (tree10) == MULT_EXPR)
10197 {
10198 tree tree0;
10199 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10200 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10201 }
10202 }
10203 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10204 We associate floats only if the user has specified
10205 -fassociative-math. */
10206 if (flag_associative_math
10207 && TREE_CODE (arg0) == PLUS_EXPR
10208 && TREE_CODE (arg1) != MULT_EXPR)
10209 {
10210 tree tree00 = TREE_OPERAND (arg0, 0);
10211 tree tree01 = TREE_OPERAND (arg0, 1);
10212 if (TREE_CODE (tree01) == MULT_EXPR
10213 && TREE_CODE (tree00) == MULT_EXPR)
10214 {
10215 tree tree0;
10216 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10217 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10218 }
10219 }
10220 }
10221
10222 bit_rotate:
10223 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10224 is a rotate of A by C1 bits. */
10225 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10226 is a rotate of A by B bits. */
10227 {
10228 enum tree_code code0, code1;
10229 tree rtype;
10230 code0 = TREE_CODE (arg0);
10231 code1 = TREE_CODE (arg1);
10232 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10233 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10234 && operand_equal_p (TREE_OPERAND (arg0, 0),
10235 TREE_OPERAND (arg1, 0), 0)
10236 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10237 TYPE_UNSIGNED (rtype))
10238 /* Only create rotates in complete modes. Other cases are not
10239 expanded properly. */
10240 && (element_precision (rtype)
10241 == element_precision (TYPE_MODE (rtype))))
10242 {
10243 tree tree01, tree11;
10244 enum tree_code code01, code11;
10245
10246 tree01 = TREE_OPERAND (arg0, 1);
10247 tree11 = TREE_OPERAND (arg1, 1);
10248 STRIP_NOPS (tree01);
10249 STRIP_NOPS (tree11);
10250 code01 = TREE_CODE (tree01);
10251 code11 = TREE_CODE (tree11);
10252 if (code01 == INTEGER_CST
10253 && code11 == INTEGER_CST
10254 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10255 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10256 {
10257 tem = build2_loc (loc, LROTATE_EXPR,
10258 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10259 TREE_OPERAND (arg0, 0),
10260 code0 == LSHIFT_EXPR ? tree01 : tree11);
10261 return fold_convert_loc (loc, type, tem);
10262 }
10263 else if (code11 == MINUS_EXPR)
10264 {
10265 tree tree110, tree111;
10266 tree110 = TREE_OPERAND (tree11, 0);
10267 tree111 = TREE_OPERAND (tree11, 1);
10268 STRIP_NOPS (tree110);
10269 STRIP_NOPS (tree111);
10270 if (TREE_CODE (tree110) == INTEGER_CST
10271 && 0 == compare_tree_int (tree110,
10272 element_precision
10273 (TREE_TYPE (TREE_OPERAND
10274 (arg0, 0))))
10275 && operand_equal_p (tree01, tree111, 0))
10276 return
10277 fold_convert_loc (loc, type,
10278 build2 ((code0 == LSHIFT_EXPR
10279 ? LROTATE_EXPR
10280 : RROTATE_EXPR),
10281 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10282 TREE_OPERAND (arg0, 0), tree01));
10283 }
10284 else if (code01 == MINUS_EXPR)
10285 {
10286 tree tree010, tree011;
10287 tree010 = TREE_OPERAND (tree01, 0);
10288 tree011 = TREE_OPERAND (tree01, 1);
10289 STRIP_NOPS (tree010);
10290 STRIP_NOPS (tree011);
10291 if (TREE_CODE (tree010) == INTEGER_CST
10292 && 0 == compare_tree_int (tree010,
10293 element_precision
10294 (TREE_TYPE (TREE_OPERAND
10295 (arg0, 0))))
10296 && operand_equal_p (tree11, tree011, 0))
10297 return fold_convert_loc
10298 (loc, type,
10299 build2 ((code0 != LSHIFT_EXPR
10300 ? LROTATE_EXPR
10301 : RROTATE_EXPR),
10302 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10303 TREE_OPERAND (arg0, 0), tree11));
10304 }
10305 }
10306 }
10307
10308 associate:
10309 /* In most languages, can't associate operations on floats through
10310 parentheses. Rather than remember where the parentheses were, we
10311 don't associate floats at all, unless the user has specified
10312 -fassociative-math.
10313 And, we need to make sure type is not saturating. */
10314
10315 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10316 && !TYPE_SATURATING (type))
10317 {
10318 tree var0, con0, lit0, minus_lit0;
10319 tree var1, con1, lit1, minus_lit1;
10320 tree atype = type;
10321 bool ok = true;
10322
10323 /* Split both trees into variables, constants, and literals. Then
10324 associate each group together, the constants with literals,
10325 then the result with variables. This increases the chances of
10326 literals being recombined later and of generating relocatable
10327 expressions for the sum of a constant and literal. */
10328 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10329 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10330 code == MINUS_EXPR);
10331
10332 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10333 if (code == MINUS_EXPR)
10334 code = PLUS_EXPR;
10335
10336 /* With undefined overflow prefer doing association in a type
10337 which wraps on overflow, if that is one of the operand types. */
10338 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10339 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10340 {
10341 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10342 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10343 atype = TREE_TYPE (arg0);
10344 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10345 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10346 atype = TREE_TYPE (arg1);
10347 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10348 }
10349
10350 /* With undefined overflow we can only associate constants with one
10351 variable, and constants whose association doesn't overflow. */
10352 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10353 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10354 {
10355 if (var0 && var1)
10356 {
10357 tree tmp0 = var0;
10358 tree tmp1 = var1;
10359
10360 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10361 tmp0 = TREE_OPERAND (tmp0, 0);
10362 if (CONVERT_EXPR_P (tmp0)
10363 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10364 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10365 <= TYPE_PRECISION (atype)))
10366 tmp0 = TREE_OPERAND (tmp0, 0);
10367 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10368 tmp1 = TREE_OPERAND (tmp1, 0);
10369 if (CONVERT_EXPR_P (tmp1)
10370 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10371 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10372 <= TYPE_PRECISION (atype)))
10373 tmp1 = TREE_OPERAND (tmp1, 0);
10374 /* The only case we can still associate with two variables
10375 is if they are the same, modulo negation and bit-pattern
10376 preserving conversions. */
10377 if (!operand_equal_p (tmp0, tmp1, 0))
10378 ok = false;
10379 }
10380 }
10381
10382 /* Only do something if we found more than two objects. Otherwise,
10383 nothing has changed and we risk infinite recursion. */
10384 if (ok
10385 && (2 < ((var0 != 0) + (var1 != 0)
10386 + (con0 != 0) + (con1 != 0)
10387 + (lit0 != 0) + (lit1 != 0)
10388 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10389 {
10390 bool any_overflows = false;
10391 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10392 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10393 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10394 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10395 var0 = associate_trees (loc, var0, var1, code, atype);
10396 con0 = associate_trees (loc, con0, con1, code, atype);
10397 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10398 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10399 code, atype);
10400
10401 /* Preserve the MINUS_EXPR if the negative part of the literal is
10402 greater than the positive part. Otherwise, the multiplicative
10403 folding code (i.e extract_muldiv) may be fooled in case
10404 unsigned constants are subtracted, like in the following
10405 example: ((X*2 + 4) - 8U)/2. */
10406 if (minus_lit0 && lit0)
10407 {
10408 if (TREE_CODE (lit0) == INTEGER_CST
10409 && TREE_CODE (minus_lit0) == INTEGER_CST
10410 && tree_int_cst_lt (lit0, minus_lit0))
10411 {
10412 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10413 MINUS_EXPR, atype);
10414 lit0 = 0;
10415 }
10416 else
10417 {
10418 lit0 = associate_trees (loc, lit0, minus_lit0,
10419 MINUS_EXPR, atype);
10420 minus_lit0 = 0;
10421 }
10422 }
10423
10424 /* Don't introduce overflows through reassociation. */
10425 if (!any_overflows
10426 && ((lit0 && TREE_OVERFLOW_P (lit0))
10427 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10428 return NULL_TREE;
10429
10430 if (minus_lit0)
10431 {
10432 if (con0 == 0)
10433 return
10434 fold_convert_loc (loc, type,
10435 associate_trees (loc, var0, minus_lit0,
10436 MINUS_EXPR, atype));
10437 else
10438 {
10439 con0 = associate_trees (loc, con0, minus_lit0,
10440 MINUS_EXPR, atype);
10441 return
10442 fold_convert_loc (loc, type,
10443 associate_trees (loc, var0, con0,
10444 PLUS_EXPR, atype));
10445 }
10446 }
10447
10448 con0 = associate_trees (loc, con0, lit0, code, atype);
10449 return
10450 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10451 code, atype));
10452 }
10453 }
10454
10455 return NULL_TREE;
10456
10457 case MINUS_EXPR:
10458 /* Pointer simplifications for subtraction, simple reassociations. */
10459 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10460 {
10461 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10462 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10463 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10464 {
10465 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10466 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10467 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10468 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10469 return fold_build2_loc (loc, PLUS_EXPR, type,
10470 fold_build2_loc (loc, MINUS_EXPR, type,
10471 arg00, arg10),
10472 fold_build2_loc (loc, MINUS_EXPR, type,
10473 arg01, arg11));
10474 }
10475 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10476 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10477 {
10478 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10479 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10480 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10481 fold_convert_loc (loc, type, arg1));
10482 if (tmp)
10483 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10484 }
10485 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10486 simplifies. */
10487 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10488 {
10489 tree arg10 = fold_convert_loc (loc, type,
10490 TREE_OPERAND (arg1, 0));
10491 tree arg11 = fold_convert_loc (loc, type,
10492 TREE_OPERAND (arg1, 1));
10493 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10494 fold_convert_loc (loc, type, arg0),
10495 arg10);
10496 if (tmp)
10497 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10498 }
10499 }
10500 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10501 if (TREE_CODE (arg0) == NEGATE_EXPR
10502 && negate_expr_p (arg1)
10503 && reorder_operands_p (arg0, arg1))
10504 return fold_build2_loc (loc, MINUS_EXPR, type,
10505 fold_convert_loc (loc, type,
10506 negate_expr (arg1)),
10507 fold_convert_loc (loc, type,
10508 TREE_OPERAND (arg0, 0)));
10509
10510 /* X - (X / Y) * Y is X % Y. */
10511 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10512 && TREE_CODE (arg1) == MULT_EXPR
10513 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10514 && operand_equal_p (arg0,
10515 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10516 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10517 TREE_OPERAND (arg1, 1), 0))
10518 return
10519 fold_convert_loc (loc, type,
10520 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10521 arg0, TREE_OPERAND (arg1, 1)));
10522
10523 if (! FLOAT_TYPE_P (type))
10524 {
10525 /* Fold A - (A & B) into ~B & A. */
10526 if (!TREE_SIDE_EFFECTS (arg0)
10527 && TREE_CODE (arg1) == BIT_AND_EXPR)
10528 {
10529 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10530 {
10531 tree arg10 = fold_convert_loc (loc, type,
10532 TREE_OPERAND (arg1, 0));
10533 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10534 fold_build1_loc (loc, BIT_NOT_EXPR,
10535 type, arg10),
10536 fold_convert_loc (loc, type, arg0));
10537 }
10538 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10539 {
10540 tree arg11 = fold_convert_loc (loc,
10541 type, TREE_OPERAND (arg1, 1));
10542 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10543 fold_build1_loc (loc, BIT_NOT_EXPR,
10544 type, arg11),
10545 fold_convert_loc (loc, type, arg0));
10546 }
10547 }
10548
10549 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10550 any power of 2 minus 1. */
10551 if (TREE_CODE (arg0) == BIT_AND_EXPR
10552 && TREE_CODE (arg1) == BIT_AND_EXPR
10553 && operand_equal_p (TREE_OPERAND (arg0, 0),
10554 TREE_OPERAND (arg1, 0), 0))
10555 {
10556 tree mask0 = TREE_OPERAND (arg0, 1);
10557 tree mask1 = TREE_OPERAND (arg1, 1);
10558 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10559
10560 if (operand_equal_p (tem, mask1, 0))
10561 {
10562 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10563 TREE_OPERAND (arg0, 0), mask1);
10564 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10565 }
10566 }
10567 }
10568
10569 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10570 __complex__ ( x, -y ). This is not the same for SNaNs or if
10571 signed zeros are involved. */
10572 if (!HONOR_SNANS (element_mode (arg0))
10573 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10574 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10575 {
10576 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10577 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10578 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10579 bool arg0rz = false, arg0iz = false;
10580 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10581 || (arg0i && (arg0iz = real_zerop (arg0i))))
10582 {
10583 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10584 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10585 if (arg0rz && arg1i && real_zerop (arg1i))
10586 {
10587 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10588 arg1r ? arg1r
10589 : build1 (REALPART_EXPR, rtype, arg1));
10590 tree ip = arg0i ? arg0i
10591 : build1 (IMAGPART_EXPR, rtype, arg0);
10592 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10593 }
10594 else if (arg0iz && arg1r && real_zerop (arg1r))
10595 {
10596 tree rp = arg0r ? arg0r
10597 : build1 (REALPART_EXPR, rtype, arg0);
10598 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10599 arg1i ? arg1i
10600 : build1 (IMAGPART_EXPR, rtype, arg1));
10601 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10602 }
10603 }
10604 }
10605
10606 /* A - B -> A + (-B) if B is easily negatable. */
10607 if (negate_expr_p (arg1)
10608 && !TYPE_OVERFLOW_SANITIZED (type)
10609 && ((FLOAT_TYPE_P (type)
10610 /* Avoid this transformation if B is a positive REAL_CST. */
10611 && (TREE_CODE (arg1) != REAL_CST
10612 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10613 || INTEGRAL_TYPE_P (type)))
10614 return fold_build2_loc (loc, PLUS_EXPR, type,
10615 fold_convert_loc (loc, type, arg0),
10616 fold_convert_loc (loc, type,
10617 negate_expr (arg1)));
10618
10619 /* Try folding difference of addresses. */
10620 {
10621 HOST_WIDE_INT diff;
10622
10623 if ((TREE_CODE (arg0) == ADDR_EXPR
10624 || TREE_CODE (arg1) == ADDR_EXPR)
10625 && ptr_difference_const (arg0, arg1, &diff))
10626 return build_int_cst_type (type, diff);
10627 }
10628
10629 /* Fold &a[i] - &a[j] to i-j. */
10630 if (TREE_CODE (arg0) == ADDR_EXPR
10631 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10632 && TREE_CODE (arg1) == ADDR_EXPR
10633 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10634 {
10635 tree tem = fold_addr_of_array_ref_difference (loc, type,
10636 TREE_OPERAND (arg0, 0),
10637 TREE_OPERAND (arg1, 0));
10638 if (tem)
10639 return tem;
10640 }
10641
10642 if (FLOAT_TYPE_P (type)
10643 && flag_unsafe_math_optimizations
10644 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10645 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10646 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10647 return tem;
10648
10649 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10650 one. Make sure the type is not saturating and has the signedness of
10651 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10652 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10653 if ((TREE_CODE (arg0) == MULT_EXPR
10654 || TREE_CODE (arg1) == MULT_EXPR)
10655 && !TYPE_SATURATING (type)
10656 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10657 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10658 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10659 {
10660 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10661 if (tem)
10662 return tem;
10663 }
10664
10665 goto associate;
10666
10667 case MULT_EXPR:
10668 /* (-A) * (-B) -> A * B */
10669 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10670 return fold_build2_loc (loc, MULT_EXPR, type,
10671 fold_convert_loc (loc, type,
10672 TREE_OPERAND (arg0, 0)),
10673 fold_convert_loc (loc, type,
10674 negate_expr (arg1)));
10675 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10676 return fold_build2_loc (loc, MULT_EXPR, type,
10677 fold_convert_loc (loc, type,
10678 negate_expr (arg0)),
10679 fold_convert_loc (loc, type,
10680 TREE_OPERAND (arg1, 0)));
10681
10682 if (! FLOAT_TYPE_P (type))
10683 {
10684 /* Transform x * -C into -x * C if x is easily negatable. */
10685 if (TREE_CODE (arg1) == INTEGER_CST
10686 && tree_int_cst_sgn (arg1) == -1
10687 && negate_expr_p (arg0)
10688 && (tem = negate_expr (arg1)) != arg1
10689 && !TREE_OVERFLOW (tem))
10690 return fold_build2_loc (loc, MULT_EXPR, type,
10691 fold_convert_loc (loc, type,
10692 negate_expr (arg0)),
10693 tem);
10694
10695 /* (a * (1 << b)) is (a << b) */
10696 if (TREE_CODE (arg1) == LSHIFT_EXPR
10697 && integer_onep (TREE_OPERAND (arg1, 0)))
10698 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10699 TREE_OPERAND (arg1, 1));
10700 if (TREE_CODE (arg0) == LSHIFT_EXPR
10701 && integer_onep (TREE_OPERAND (arg0, 0)))
10702 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10703 TREE_OPERAND (arg0, 1));
10704
10705 /* (A + A) * C -> A * 2 * C */
10706 if (TREE_CODE (arg0) == PLUS_EXPR
10707 && TREE_CODE (arg1) == INTEGER_CST
10708 && operand_equal_p (TREE_OPERAND (arg0, 0),
10709 TREE_OPERAND (arg0, 1), 0))
10710 return fold_build2_loc (loc, MULT_EXPR, type,
10711 omit_one_operand_loc (loc, type,
10712 TREE_OPERAND (arg0, 0),
10713 TREE_OPERAND (arg0, 1)),
10714 fold_build2_loc (loc, MULT_EXPR, type,
10715 build_int_cst (type, 2) , arg1));
10716
10717 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10718 sign-changing only. */
10719 if (TREE_CODE (arg1) == INTEGER_CST
10720 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10721 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10722 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10723
10724 strict_overflow_p = false;
10725 if (TREE_CODE (arg1) == INTEGER_CST
10726 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10727 &strict_overflow_p)))
10728 {
10729 if (strict_overflow_p)
10730 fold_overflow_warning (("assuming signed overflow does not "
10731 "occur when simplifying "
10732 "multiplication"),
10733 WARN_STRICT_OVERFLOW_MISC);
10734 return fold_convert_loc (loc, type, tem);
10735 }
10736
10737 /* Optimize z * conj(z) for integer complex numbers. */
10738 if (TREE_CODE (arg0) == CONJ_EXPR
10739 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10740 return fold_mult_zconjz (loc, type, arg1);
10741 if (TREE_CODE (arg1) == CONJ_EXPR
10742 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10743 return fold_mult_zconjz (loc, type, arg0);
10744 }
10745 else
10746 {
10747 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10748 the result for floating point types due to rounding so it is applied
10749 only if -fassociative-math was specify. */
10750 if (flag_associative_math
10751 && TREE_CODE (arg0) == RDIV_EXPR
10752 && TREE_CODE (arg1) == REAL_CST
10753 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10754 {
10755 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10756 arg1);
10757 if (tem)
10758 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10759 TREE_OPERAND (arg0, 1));
10760 }
10761
10762 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10763 if (operand_equal_p (arg0, arg1, 0))
10764 {
10765 tree tem = fold_strip_sign_ops (arg0);
10766 if (tem != NULL_TREE)
10767 {
10768 tem = fold_convert_loc (loc, type, tem);
10769 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10770 }
10771 }
10772
10773 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10774 This is not the same for NaNs or if signed zeros are
10775 involved. */
10776 if (!HONOR_NANS (arg0)
10777 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10778 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10779 && TREE_CODE (arg1) == COMPLEX_CST
10780 && real_zerop (TREE_REALPART (arg1)))
10781 {
10782 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10783 if (real_onep (TREE_IMAGPART (arg1)))
10784 return
10785 fold_build2_loc (loc, COMPLEX_EXPR, type,
10786 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10787 rtype, arg0)),
10788 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10789 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10790 return
10791 fold_build2_loc (loc, COMPLEX_EXPR, type,
10792 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10793 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10794 rtype, arg0)));
10795 }
10796
10797 /* Optimize z * conj(z) for floating point complex numbers.
10798 Guarded by flag_unsafe_math_optimizations as non-finite
10799 imaginary components don't produce scalar results. */
10800 if (flag_unsafe_math_optimizations
10801 && TREE_CODE (arg0) == CONJ_EXPR
10802 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10803 return fold_mult_zconjz (loc, type, arg1);
10804 if (flag_unsafe_math_optimizations
10805 && TREE_CODE (arg1) == CONJ_EXPR
10806 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10807 return fold_mult_zconjz (loc, type, arg0);
10808
10809 if (flag_unsafe_math_optimizations)
10810 {
10811 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10812 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10813
10814 /* Optimizations of root(...)*root(...). */
10815 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10816 {
10817 tree rootfn, arg;
10818 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10819 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10820
10821 /* Optimize sqrt(x)*sqrt(x) as x. */
10822 if (BUILTIN_SQRT_P (fcode0)
10823 && operand_equal_p (arg00, arg10, 0)
10824 && ! HONOR_SNANS (element_mode (type)))
10825 return arg00;
10826
10827 /* Optimize root(x)*root(y) as root(x*y). */
10828 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10829 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10830 return build_call_expr_loc (loc, rootfn, 1, arg);
10831 }
10832
10833 /* Optimize expN(x)*expN(y) as expN(x+y). */
10834 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10835 {
10836 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10837 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10838 CALL_EXPR_ARG (arg0, 0),
10839 CALL_EXPR_ARG (arg1, 0));
10840 return build_call_expr_loc (loc, expfn, 1, arg);
10841 }
10842
10843 /* Optimizations of pow(...)*pow(...). */
10844 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10845 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10846 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10847 {
10848 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10849 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10850 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10851 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10852
10853 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10854 if (operand_equal_p (arg01, arg11, 0))
10855 {
10856 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10857 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10858 arg00, arg10);
10859 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10860 }
10861
10862 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10863 if (operand_equal_p (arg00, arg10, 0))
10864 {
10865 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10866 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10867 arg01, arg11);
10868 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10869 }
10870 }
10871
10872 /* Optimize tan(x)*cos(x) as sin(x). */
10873 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10874 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10875 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10876 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10877 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10878 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10879 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10880 CALL_EXPR_ARG (arg1, 0), 0))
10881 {
10882 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10883
10884 if (sinfn != NULL_TREE)
10885 return build_call_expr_loc (loc, sinfn, 1,
10886 CALL_EXPR_ARG (arg0, 0));
10887 }
10888
10889 /* Optimize x*pow(x,c) as pow(x,c+1). */
10890 if (fcode1 == BUILT_IN_POW
10891 || fcode1 == BUILT_IN_POWF
10892 || fcode1 == BUILT_IN_POWL)
10893 {
10894 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10895 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10896 if (TREE_CODE (arg11) == REAL_CST
10897 && !TREE_OVERFLOW (arg11)
10898 && operand_equal_p (arg0, arg10, 0))
10899 {
10900 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10901 REAL_VALUE_TYPE c;
10902 tree arg;
10903
10904 c = TREE_REAL_CST (arg11);
10905 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10906 arg = build_real (type, c);
10907 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10908 }
10909 }
10910
10911 /* Optimize pow(x,c)*x as pow(x,c+1). */
10912 if (fcode0 == BUILT_IN_POW
10913 || fcode0 == BUILT_IN_POWF
10914 || fcode0 == BUILT_IN_POWL)
10915 {
10916 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10917 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10918 if (TREE_CODE (arg01) == REAL_CST
10919 && !TREE_OVERFLOW (arg01)
10920 && operand_equal_p (arg1, arg00, 0))
10921 {
10922 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10923 REAL_VALUE_TYPE c;
10924 tree arg;
10925
10926 c = TREE_REAL_CST (arg01);
10927 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10928 arg = build_real (type, c);
10929 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10930 }
10931 }
10932
10933 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10934 if (!in_gimple_form
10935 && optimize
10936 && operand_equal_p (arg0, arg1, 0))
10937 {
10938 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10939
10940 if (powfn)
10941 {
10942 tree arg = build_real (type, dconst2);
10943 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10944 }
10945 }
10946 }
10947 }
10948 goto associate;
10949
10950 case BIT_IOR_EXPR:
10951 bit_ior:
10952 /* ~X | X is -1. */
10953 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10954 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10955 {
10956 t1 = build_zero_cst (type);
10957 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10958 return omit_one_operand_loc (loc, type, t1, arg1);
10959 }
10960
10961 /* X | ~X is -1. */
10962 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10963 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10964 {
10965 t1 = build_zero_cst (type);
10966 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10967 return omit_one_operand_loc (loc, type, t1, arg0);
10968 }
10969
10970 /* Canonicalize (X & C1) | C2. */
10971 if (TREE_CODE (arg0) == BIT_AND_EXPR
10972 && TREE_CODE (arg1) == INTEGER_CST
10973 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10974 {
10975 int width = TYPE_PRECISION (type), w;
10976 wide_int c1 = TREE_OPERAND (arg0, 1);
10977 wide_int c2 = arg1;
10978
10979 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10980 if ((c1 & c2) == c1)
10981 return omit_one_operand_loc (loc, type, arg1,
10982 TREE_OPERAND (arg0, 0));
10983
10984 wide_int msk = wi::mask (width, false,
10985 TYPE_PRECISION (TREE_TYPE (arg1)));
10986
10987 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10988 if (msk.and_not (c1 | c2) == 0)
10989 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10990 TREE_OPERAND (arg0, 0), arg1);
10991
10992 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10993 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10994 mode which allows further optimizations. */
10995 c1 &= msk;
10996 c2 &= msk;
10997 wide_int c3 = c1.and_not (c2);
10998 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10999 {
11000 wide_int mask = wi::mask (w, false,
11001 TYPE_PRECISION (type));
11002 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11003 {
11004 c3 = mask;
11005 break;
11006 }
11007 }
11008
11009 if (c3 != c1)
11010 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11011 fold_build2_loc (loc, BIT_AND_EXPR, type,
11012 TREE_OPERAND (arg0, 0),
11013 wide_int_to_tree (type,
11014 c3)),
11015 arg1);
11016 }
11017
11018 /* (X & ~Y) | (~X & Y) is X ^ Y */
11019 if (TREE_CODE (arg0) == BIT_AND_EXPR
11020 && TREE_CODE (arg1) == BIT_AND_EXPR)
11021 {
11022 tree a0, a1, l0, l1, n0, n1;
11023
11024 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11025 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11026
11027 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11028 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11029
11030 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11031 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11032
11033 if ((operand_equal_p (n0, a0, 0)
11034 && operand_equal_p (n1, a1, 0))
11035 || (operand_equal_p (n0, a1, 0)
11036 && operand_equal_p (n1, a0, 0)))
11037 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11038 }
11039
11040 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11041 if (t1 != NULL_TREE)
11042 return t1;
11043
11044 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11045
11046 This results in more efficient code for machines without a NAND
11047 instruction. Combine will canonicalize to the first form
11048 which will allow use of NAND instructions provided by the
11049 backend if they exist. */
11050 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11051 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11052 {
11053 return
11054 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11055 build2 (BIT_AND_EXPR, type,
11056 fold_convert_loc (loc, type,
11057 TREE_OPERAND (arg0, 0)),
11058 fold_convert_loc (loc, type,
11059 TREE_OPERAND (arg1, 0))));
11060 }
11061
11062 /* See if this can be simplified into a rotate first. If that
11063 is unsuccessful continue in the association code. */
11064 goto bit_rotate;
11065
11066 case BIT_XOR_EXPR:
11067 /* ~X ^ X is -1. */
11068 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11069 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11070 {
11071 t1 = build_zero_cst (type);
11072 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11073 return omit_one_operand_loc (loc, type, t1, arg1);
11074 }
11075
11076 /* X ^ ~X is -1. */
11077 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11078 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11079 {
11080 t1 = build_zero_cst (type);
11081 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11082 return omit_one_operand_loc (loc, type, t1, arg0);
11083 }
11084
11085 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11086 with a constant, and the two constants have no bits in common,
11087 we should treat this as a BIT_IOR_EXPR since this may produce more
11088 simplifications. */
11089 if (TREE_CODE (arg0) == BIT_AND_EXPR
11090 && TREE_CODE (arg1) == BIT_AND_EXPR
11091 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11092 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11093 && wi::bit_and (TREE_OPERAND (arg0, 1),
11094 TREE_OPERAND (arg1, 1)) == 0)
11095 {
11096 code = BIT_IOR_EXPR;
11097 goto bit_ior;
11098 }
11099
11100 /* (X | Y) ^ X -> Y & ~ X*/
11101 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11102 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11103 {
11104 tree t2 = TREE_OPERAND (arg0, 1);
11105 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11106 arg1);
11107 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11108 fold_convert_loc (loc, type, t2),
11109 fold_convert_loc (loc, type, t1));
11110 return t1;
11111 }
11112
11113 /* (Y | X) ^ X -> Y & ~ X*/
11114 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11115 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11116 {
11117 tree t2 = TREE_OPERAND (arg0, 0);
11118 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11119 arg1);
11120 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11121 fold_convert_loc (loc, type, t2),
11122 fold_convert_loc (loc, type, t1));
11123 return t1;
11124 }
11125
11126 /* X ^ (X | Y) -> Y & ~ X*/
11127 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11128 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11129 {
11130 tree t2 = TREE_OPERAND (arg1, 1);
11131 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11132 arg0);
11133 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11134 fold_convert_loc (loc, type, t2),
11135 fold_convert_loc (loc, type, t1));
11136 return t1;
11137 }
11138
11139 /* X ^ (Y | X) -> Y & ~ X*/
11140 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11141 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11142 {
11143 tree t2 = TREE_OPERAND (arg1, 0);
11144 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11145 arg0);
11146 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11147 fold_convert_loc (loc, type, t2),
11148 fold_convert_loc (loc, type, t1));
11149 return t1;
11150 }
11151
11152 /* Convert ~X ^ ~Y to X ^ Y. */
11153 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11154 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11155 return fold_build2_loc (loc, code, type,
11156 fold_convert_loc (loc, type,
11157 TREE_OPERAND (arg0, 0)),
11158 fold_convert_loc (loc, type,
11159 TREE_OPERAND (arg1, 0)));
11160
11161 /* Convert ~X ^ C to X ^ ~C. */
11162 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11163 && TREE_CODE (arg1) == INTEGER_CST)
11164 return fold_build2_loc (loc, code, type,
11165 fold_convert_loc (loc, type,
11166 TREE_OPERAND (arg0, 0)),
11167 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11168
11169 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11170 if (TREE_CODE (arg0) == BIT_AND_EXPR
11171 && INTEGRAL_TYPE_P (type)
11172 && integer_onep (TREE_OPERAND (arg0, 1))
11173 && integer_onep (arg1))
11174 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11175 build_zero_cst (TREE_TYPE (arg0)));
11176
11177 /* Fold (X & Y) ^ Y as ~X & Y. */
11178 if (TREE_CODE (arg0) == BIT_AND_EXPR
11179 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11180 {
11181 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11182 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11183 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11184 fold_convert_loc (loc, type, arg1));
11185 }
11186 /* Fold (X & Y) ^ X as ~Y & X. */
11187 if (TREE_CODE (arg0) == BIT_AND_EXPR
11188 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11189 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11190 {
11191 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11192 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11193 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11194 fold_convert_loc (loc, type, arg1));
11195 }
11196 /* Fold X ^ (X & Y) as X & ~Y. */
11197 if (TREE_CODE (arg1) == BIT_AND_EXPR
11198 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11199 {
11200 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11201 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11202 fold_convert_loc (loc, type, arg0),
11203 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11204 }
11205 /* Fold X ^ (Y & X) as ~Y & X. */
11206 if (TREE_CODE (arg1) == BIT_AND_EXPR
11207 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11208 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11209 {
11210 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11211 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11212 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11213 fold_convert_loc (loc, type, arg0));
11214 }
11215
11216 /* See if this can be simplified into a rotate first. If that
11217 is unsuccessful continue in the association code. */
11218 goto bit_rotate;
11219
11220 case BIT_AND_EXPR:
11221 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11222 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11223 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11224 || (TREE_CODE (arg0) == EQ_EXPR
11225 && integer_zerop (TREE_OPERAND (arg0, 1))))
11226 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11227 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11228
11229 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11230 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11231 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11232 || (TREE_CODE (arg1) == EQ_EXPR
11233 && integer_zerop (TREE_OPERAND (arg1, 1))))
11234 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11235 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11236
11237 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11238 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11239 && INTEGRAL_TYPE_P (type)
11240 && integer_onep (TREE_OPERAND (arg0, 1))
11241 && integer_onep (arg1))
11242 {
11243 tree tem2;
11244 tem = TREE_OPERAND (arg0, 0);
11245 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11246 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11247 tem, tem2);
11248 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11249 build_zero_cst (TREE_TYPE (tem)));
11250 }
11251 /* Fold ~X & 1 as (X & 1) == 0. */
11252 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11253 && INTEGRAL_TYPE_P (type)
11254 && integer_onep (arg1))
11255 {
11256 tree tem2;
11257 tem = TREE_OPERAND (arg0, 0);
11258 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11259 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11260 tem, tem2);
11261 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11262 build_zero_cst (TREE_TYPE (tem)));
11263 }
11264 /* Fold !X & 1 as X == 0. */
11265 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11266 && integer_onep (arg1))
11267 {
11268 tem = TREE_OPERAND (arg0, 0);
11269 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11270 build_zero_cst (TREE_TYPE (tem)));
11271 }
11272
11273 /* Fold (X ^ Y) & Y as ~X & Y. */
11274 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11275 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11276 {
11277 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11278 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11279 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11280 fold_convert_loc (loc, type, arg1));
11281 }
11282 /* Fold (X ^ Y) & X as ~Y & X. */
11283 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11284 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11285 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11286 {
11287 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11288 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11289 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11290 fold_convert_loc (loc, type, arg1));
11291 }
11292 /* Fold X & (X ^ Y) as X & ~Y. */
11293 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11294 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11295 {
11296 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11297 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11298 fold_convert_loc (loc, type, arg0),
11299 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11300 }
11301 /* Fold X & (Y ^ X) as ~Y & X. */
11302 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11303 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11304 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11305 {
11306 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11307 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11308 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11309 fold_convert_loc (loc, type, arg0));
11310 }
11311
11312 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11313 multiple of 1 << CST. */
11314 if (TREE_CODE (arg1) == INTEGER_CST)
11315 {
11316 wide_int cst1 = arg1;
11317 wide_int ncst1 = -cst1;
11318 if ((cst1 & ncst1) == ncst1
11319 && multiple_of_p (type, arg0,
11320 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11321 return fold_convert_loc (loc, type, arg0);
11322 }
11323
11324 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11325 bits from CST2. */
11326 if (TREE_CODE (arg1) == INTEGER_CST
11327 && TREE_CODE (arg0) == MULT_EXPR
11328 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11329 {
11330 wide_int warg1 = arg1;
11331 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11332
11333 if (masked == 0)
11334 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11335 arg0, arg1);
11336 else if (masked != warg1)
11337 {
11338 /* Avoid the transform if arg1 is a mask of some
11339 mode which allows further optimizations. */
11340 int pop = wi::popcount (warg1);
11341 if (!(pop >= BITS_PER_UNIT
11342 && exact_log2 (pop) != -1
11343 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11344 return fold_build2_loc (loc, code, type, op0,
11345 wide_int_to_tree (type, masked));
11346 }
11347 }
11348
11349 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11350 ((A & N) + B) & M -> (A + B) & M
11351 Similarly if (N & M) == 0,
11352 ((A | N) + B) & M -> (A + B) & M
11353 and for - instead of + (or unary - instead of +)
11354 and/or ^ instead of |.
11355 If B is constant and (B & M) == 0, fold into A & M. */
11356 if (TREE_CODE (arg1) == INTEGER_CST)
11357 {
11358 wide_int cst1 = arg1;
11359 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11360 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11361 && (TREE_CODE (arg0) == PLUS_EXPR
11362 || TREE_CODE (arg0) == MINUS_EXPR
11363 || TREE_CODE (arg0) == NEGATE_EXPR)
11364 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11365 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11366 {
11367 tree pmop[2];
11368 int which = 0;
11369 wide_int cst0;
11370
11371 /* Now we know that arg0 is (C + D) or (C - D) or
11372 -C and arg1 (M) is == (1LL << cst) - 1.
11373 Store C into PMOP[0] and D into PMOP[1]. */
11374 pmop[0] = TREE_OPERAND (arg0, 0);
11375 pmop[1] = NULL;
11376 if (TREE_CODE (arg0) != NEGATE_EXPR)
11377 {
11378 pmop[1] = TREE_OPERAND (arg0, 1);
11379 which = 1;
11380 }
11381
11382 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11383 which = -1;
11384
11385 for (; which >= 0; which--)
11386 switch (TREE_CODE (pmop[which]))
11387 {
11388 case BIT_AND_EXPR:
11389 case BIT_IOR_EXPR:
11390 case BIT_XOR_EXPR:
11391 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11392 != INTEGER_CST)
11393 break;
11394 cst0 = TREE_OPERAND (pmop[which], 1);
11395 cst0 &= cst1;
11396 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11397 {
11398 if (cst0 != cst1)
11399 break;
11400 }
11401 else if (cst0 != 0)
11402 break;
11403 /* If C or D is of the form (A & N) where
11404 (N & M) == M, or of the form (A | N) or
11405 (A ^ N) where (N & M) == 0, replace it with A. */
11406 pmop[which] = TREE_OPERAND (pmop[which], 0);
11407 break;
11408 case INTEGER_CST:
11409 /* If C or D is a N where (N & M) == 0, it can be
11410 omitted (assumed 0). */
11411 if ((TREE_CODE (arg0) == PLUS_EXPR
11412 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11413 && (cst1 & pmop[which]) == 0)
11414 pmop[which] = NULL;
11415 break;
11416 default:
11417 break;
11418 }
11419
11420 /* Only build anything new if we optimized one or both arguments
11421 above. */
11422 if (pmop[0] != TREE_OPERAND (arg0, 0)
11423 || (TREE_CODE (arg0) != NEGATE_EXPR
11424 && pmop[1] != TREE_OPERAND (arg0, 1)))
11425 {
11426 tree utype = TREE_TYPE (arg0);
11427 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11428 {
11429 /* Perform the operations in a type that has defined
11430 overflow behavior. */
11431 utype = unsigned_type_for (TREE_TYPE (arg0));
11432 if (pmop[0] != NULL)
11433 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11434 if (pmop[1] != NULL)
11435 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11436 }
11437
11438 if (TREE_CODE (arg0) == NEGATE_EXPR)
11439 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11440 else if (TREE_CODE (arg0) == PLUS_EXPR)
11441 {
11442 if (pmop[0] != NULL && pmop[1] != NULL)
11443 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11444 pmop[0], pmop[1]);
11445 else if (pmop[0] != NULL)
11446 tem = pmop[0];
11447 else if (pmop[1] != NULL)
11448 tem = pmop[1];
11449 else
11450 return build_int_cst (type, 0);
11451 }
11452 else if (pmop[0] == NULL)
11453 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11454 else
11455 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11456 pmop[0], pmop[1]);
11457 /* TEM is now the new binary +, - or unary - replacement. */
11458 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11459 fold_convert_loc (loc, utype, arg1));
11460 return fold_convert_loc (loc, type, tem);
11461 }
11462 }
11463 }
11464
11465 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11466 if (t1 != NULL_TREE)
11467 return t1;
11468 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11469 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11470 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11471 {
11472 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11473
11474 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11475 if (mask == -1)
11476 return
11477 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11478 }
11479
11480 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11481
11482 This results in more efficient code for machines without a NOR
11483 instruction. Combine will canonicalize to the first form
11484 which will allow use of NOR instructions provided by the
11485 backend if they exist. */
11486 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11487 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11488 {
11489 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11490 build2 (BIT_IOR_EXPR, type,
11491 fold_convert_loc (loc, type,
11492 TREE_OPERAND (arg0, 0)),
11493 fold_convert_loc (loc, type,
11494 TREE_OPERAND (arg1, 0))));
11495 }
11496
11497 /* If arg0 is derived from the address of an object or function, we may
11498 be able to fold this expression using the object or function's
11499 alignment. */
11500 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11501 {
11502 unsigned HOST_WIDE_INT modulus, residue;
11503 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11504
11505 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11506 integer_onep (arg1));
11507
11508 /* This works because modulus is a power of 2. If this weren't the
11509 case, we'd have to replace it by its greatest power-of-2
11510 divisor: modulus & -modulus. */
11511 if (low < modulus)
11512 return build_int_cst (type, residue & low);
11513 }
11514
11515 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11516 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11517 if the new mask might be further optimized. */
11518 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11519 || TREE_CODE (arg0) == RSHIFT_EXPR)
11520 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11521 && TREE_CODE (arg1) == INTEGER_CST
11522 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11523 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11524 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11525 < TYPE_PRECISION (TREE_TYPE (arg0))))
11526 {
11527 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11528 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11529 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11530 tree shift_type = TREE_TYPE (arg0);
11531
11532 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11533 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11534 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11535 && TYPE_PRECISION (TREE_TYPE (arg0))
11536 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11537 {
11538 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11539 tree arg00 = TREE_OPERAND (arg0, 0);
11540 /* See if more bits can be proven as zero because of
11541 zero extension. */
11542 if (TREE_CODE (arg00) == NOP_EXPR
11543 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11544 {
11545 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11546 if (TYPE_PRECISION (inner_type)
11547 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11548 && TYPE_PRECISION (inner_type) < prec)
11549 {
11550 prec = TYPE_PRECISION (inner_type);
11551 /* See if we can shorten the right shift. */
11552 if (shiftc < prec)
11553 shift_type = inner_type;
11554 /* Otherwise X >> C1 is all zeros, so we'll optimize
11555 it into (X, 0) later on by making sure zerobits
11556 is all ones. */
11557 }
11558 }
11559 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11560 if (shiftc < prec)
11561 {
11562 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11563 zerobits <<= prec - shiftc;
11564 }
11565 /* For arithmetic shift if sign bit could be set, zerobits
11566 can contain actually sign bits, so no transformation is
11567 possible, unless MASK masks them all away. In that
11568 case the shift needs to be converted into logical shift. */
11569 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11570 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11571 {
11572 if ((mask & zerobits) == 0)
11573 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11574 else
11575 zerobits = 0;
11576 }
11577 }
11578
11579 /* ((X << 16) & 0xff00) is (X, 0). */
11580 if ((mask & zerobits) == mask)
11581 return omit_one_operand_loc (loc, type,
11582 build_int_cst (type, 0), arg0);
11583
11584 newmask = mask | zerobits;
11585 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11586 {
11587 /* Only do the transformation if NEWMASK is some integer
11588 mode's mask. */
11589 for (prec = BITS_PER_UNIT;
11590 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11591 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11592 break;
11593 if (prec < HOST_BITS_PER_WIDE_INT
11594 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11595 {
11596 tree newmaskt;
11597
11598 if (shift_type != TREE_TYPE (arg0))
11599 {
11600 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11601 fold_convert_loc (loc, shift_type,
11602 TREE_OPERAND (arg0, 0)),
11603 TREE_OPERAND (arg0, 1));
11604 tem = fold_convert_loc (loc, type, tem);
11605 }
11606 else
11607 tem = op0;
11608 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11609 if (!tree_int_cst_equal (newmaskt, arg1))
11610 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11611 }
11612 }
11613 }
11614
11615 goto associate;
11616
11617 case RDIV_EXPR:
11618 /* Don't touch a floating-point divide by zero unless the mode
11619 of the constant can represent infinity. */
11620 if (TREE_CODE (arg1) == REAL_CST
11621 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11622 && real_zerop (arg1))
11623 return NULL_TREE;
11624
11625 /* (-A) / (-B) -> A / B */
11626 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11627 return fold_build2_loc (loc, RDIV_EXPR, type,
11628 TREE_OPERAND (arg0, 0),
11629 negate_expr (arg1));
11630 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11631 return fold_build2_loc (loc, RDIV_EXPR, type,
11632 negate_expr (arg0),
11633 TREE_OPERAND (arg1, 0));
11634
11635 /* Convert A/B/C to A/(B*C). */
11636 if (flag_reciprocal_math
11637 && TREE_CODE (arg0) == RDIV_EXPR)
11638 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11639 fold_build2_loc (loc, MULT_EXPR, type,
11640 TREE_OPERAND (arg0, 1), arg1));
11641
11642 /* Convert A/(B/C) to (A/B)*C. */
11643 if (flag_reciprocal_math
11644 && TREE_CODE (arg1) == RDIV_EXPR)
11645 return fold_build2_loc (loc, MULT_EXPR, type,
11646 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11647 TREE_OPERAND (arg1, 0)),
11648 TREE_OPERAND (arg1, 1));
11649
11650 /* Convert C1/(X*C2) into (C1/C2)/X. */
11651 if (flag_reciprocal_math
11652 && TREE_CODE (arg1) == MULT_EXPR
11653 && TREE_CODE (arg0) == REAL_CST
11654 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11655 {
11656 tree tem = const_binop (RDIV_EXPR, arg0,
11657 TREE_OPERAND (arg1, 1));
11658 if (tem)
11659 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11660 TREE_OPERAND (arg1, 0));
11661 }
11662
11663 if (flag_unsafe_math_optimizations)
11664 {
11665 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11666 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11667
11668 /* Optimize sin(x)/cos(x) as tan(x). */
11669 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11670 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11671 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11672 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11673 CALL_EXPR_ARG (arg1, 0), 0))
11674 {
11675 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11676
11677 if (tanfn != NULL_TREE)
11678 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11679 }
11680
11681 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11682 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11683 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11684 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11685 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11686 CALL_EXPR_ARG (arg1, 0), 0))
11687 {
11688 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11689
11690 if (tanfn != NULL_TREE)
11691 {
11692 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11693 CALL_EXPR_ARG (arg0, 0));
11694 return fold_build2_loc (loc, RDIV_EXPR, type,
11695 build_real (type, dconst1), tmp);
11696 }
11697 }
11698
11699 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11700 NaNs or Infinities. */
11701 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11702 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11703 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11704 {
11705 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11706 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11707
11708 if (! HONOR_NANS (arg00)
11709 && ! HONOR_INFINITIES (element_mode (arg00))
11710 && operand_equal_p (arg00, arg01, 0))
11711 {
11712 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11713
11714 if (cosfn != NULL_TREE)
11715 return build_call_expr_loc (loc, cosfn, 1, arg00);
11716 }
11717 }
11718
11719 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11720 NaNs or Infinities. */
11721 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11722 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11723 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11724 {
11725 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11726 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11727
11728 if (! HONOR_NANS (arg00)
11729 && ! HONOR_INFINITIES (element_mode (arg00))
11730 && operand_equal_p (arg00, arg01, 0))
11731 {
11732 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11733
11734 if (cosfn != NULL_TREE)
11735 {
11736 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11737 return fold_build2_loc (loc, RDIV_EXPR, type,
11738 build_real (type, dconst1),
11739 tmp);
11740 }
11741 }
11742 }
11743
11744 /* Optimize pow(x,c)/x as pow(x,c-1). */
11745 if (fcode0 == BUILT_IN_POW
11746 || fcode0 == BUILT_IN_POWF
11747 || fcode0 == BUILT_IN_POWL)
11748 {
11749 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11750 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11751 if (TREE_CODE (arg01) == REAL_CST
11752 && !TREE_OVERFLOW (arg01)
11753 && operand_equal_p (arg1, arg00, 0))
11754 {
11755 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11756 REAL_VALUE_TYPE c;
11757 tree arg;
11758
11759 c = TREE_REAL_CST (arg01);
11760 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11761 arg = build_real (type, c);
11762 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11763 }
11764 }
11765
11766 /* Optimize a/root(b/c) into a*root(c/b). */
11767 if (BUILTIN_ROOT_P (fcode1))
11768 {
11769 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11770
11771 if (TREE_CODE (rootarg) == RDIV_EXPR)
11772 {
11773 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11774 tree b = TREE_OPERAND (rootarg, 0);
11775 tree c = TREE_OPERAND (rootarg, 1);
11776
11777 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11778
11779 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11780 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11781 }
11782 }
11783
11784 /* Optimize x/expN(y) into x*expN(-y). */
11785 if (BUILTIN_EXPONENT_P (fcode1))
11786 {
11787 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11788 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11789 arg1 = build_call_expr_loc (loc,
11790 expfn, 1,
11791 fold_convert_loc (loc, type, arg));
11792 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11793 }
11794
11795 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11796 if (fcode1 == BUILT_IN_POW
11797 || fcode1 == BUILT_IN_POWF
11798 || fcode1 == BUILT_IN_POWL)
11799 {
11800 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11801 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11802 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11803 tree neg11 = fold_convert_loc (loc, type,
11804 negate_expr (arg11));
11805 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11806 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11807 }
11808 }
11809 return NULL_TREE;
11810
11811 case TRUNC_DIV_EXPR:
11812 /* Optimize (X & (-A)) / A where A is a power of 2,
11813 to X >> log2(A) */
11814 if (TREE_CODE (arg0) == BIT_AND_EXPR
11815 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11816 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11817 {
11818 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11819 arg1, TREE_OPERAND (arg0, 1));
11820 if (sum && integer_zerop (sum)) {
11821 tree pow2 = build_int_cst (integer_type_node,
11822 wi::exact_log2 (arg1));
11823 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11824 TREE_OPERAND (arg0, 0), pow2);
11825 }
11826 }
11827
11828 /* Fall through */
11829
11830 case FLOOR_DIV_EXPR:
11831 /* Simplify A / (B << N) where A and B are positive and B is
11832 a power of 2, to A >> (N + log2(B)). */
11833 strict_overflow_p = false;
11834 if (TREE_CODE (arg1) == LSHIFT_EXPR
11835 && (TYPE_UNSIGNED (type)
11836 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11837 {
11838 tree sval = TREE_OPERAND (arg1, 0);
11839 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11840 {
11841 tree sh_cnt = TREE_OPERAND (arg1, 1);
11842 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11843 wi::exact_log2 (sval));
11844
11845 if (strict_overflow_p)
11846 fold_overflow_warning (("assuming signed overflow does not "
11847 "occur when simplifying A / (B << N)"),
11848 WARN_STRICT_OVERFLOW_MISC);
11849
11850 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11851 sh_cnt, pow2);
11852 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11853 fold_convert_loc (loc, type, arg0), sh_cnt);
11854 }
11855 }
11856
11857 /* Fall through */
11858
11859 case ROUND_DIV_EXPR:
11860 case CEIL_DIV_EXPR:
11861 case EXACT_DIV_EXPR:
11862 if (integer_zerop (arg1))
11863 return NULL_TREE;
11864
11865 /* Convert -A / -B to A / B when the type is signed and overflow is
11866 undefined. */
11867 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11868 && TREE_CODE (arg0) == NEGATE_EXPR
11869 && negate_expr_p (arg1))
11870 {
11871 if (INTEGRAL_TYPE_P (type))
11872 fold_overflow_warning (("assuming signed overflow does not occur "
11873 "when distributing negation across "
11874 "division"),
11875 WARN_STRICT_OVERFLOW_MISC);
11876 return fold_build2_loc (loc, code, type,
11877 fold_convert_loc (loc, type,
11878 TREE_OPERAND (arg0, 0)),
11879 fold_convert_loc (loc, type,
11880 negate_expr (arg1)));
11881 }
11882 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11883 && TREE_CODE (arg1) == NEGATE_EXPR
11884 && negate_expr_p (arg0))
11885 {
11886 if (INTEGRAL_TYPE_P (type))
11887 fold_overflow_warning (("assuming signed overflow does not occur "
11888 "when distributing negation across "
11889 "division"),
11890 WARN_STRICT_OVERFLOW_MISC);
11891 return fold_build2_loc (loc, code, type,
11892 fold_convert_loc (loc, type,
11893 negate_expr (arg0)),
11894 fold_convert_loc (loc, type,
11895 TREE_OPERAND (arg1, 0)));
11896 }
11897
11898 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11899 operation, EXACT_DIV_EXPR.
11900
11901 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11902 At one time others generated faster code, it's not clear if they do
11903 after the last round to changes to the DIV code in expmed.c. */
11904 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11905 && multiple_of_p (type, arg0, arg1))
11906 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11907
11908 strict_overflow_p = false;
11909 if (TREE_CODE (arg1) == INTEGER_CST
11910 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11911 &strict_overflow_p)))
11912 {
11913 if (strict_overflow_p)
11914 fold_overflow_warning (("assuming signed overflow does not occur "
11915 "when simplifying division"),
11916 WARN_STRICT_OVERFLOW_MISC);
11917 return fold_convert_loc (loc, type, tem);
11918 }
11919
11920 return NULL_TREE;
11921
11922 case CEIL_MOD_EXPR:
11923 case FLOOR_MOD_EXPR:
11924 case ROUND_MOD_EXPR:
11925 case TRUNC_MOD_EXPR:
11926 /* X % -Y is the same as X % Y. */
11927 if (code == TRUNC_MOD_EXPR
11928 && !TYPE_UNSIGNED (type)
11929 && TREE_CODE (arg1) == NEGATE_EXPR
11930 && !TYPE_OVERFLOW_TRAPS (type))
11931 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11932 fold_convert_loc (loc, type,
11933 TREE_OPERAND (arg1, 0)));
11934
11935 strict_overflow_p = false;
11936 if (TREE_CODE (arg1) == INTEGER_CST
11937 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11938 &strict_overflow_p)))
11939 {
11940 if (strict_overflow_p)
11941 fold_overflow_warning (("assuming signed overflow does not occur "
11942 "when simplifying modulus"),
11943 WARN_STRICT_OVERFLOW_MISC);
11944 return fold_convert_loc (loc, type, tem);
11945 }
11946
11947 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11948 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11949 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11950 && (TYPE_UNSIGNED (type)
11951 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11952 {
11953 tree c = arg1;
11954 /* Also optimize A % (C << N) where C is a power of 2,
11955 to A & ((C << N) - 1). */
11956 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11957 c = TREE_OPERAND (arg1, 0);
11958
11959 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11960 {
11961 tree mask
11962 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11963 build_int_cst (TREE_TYPE (arg1), 1));
11964 if (strict_overflow_p)
11965 fold_overflow_warning (("assuming signed overflow does not "
11966 "occur when simplifying "
11967 "X % (power of two)"),
11968 WARN_STRICT_OVERFLOW_MISC);
11969 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11970 fold_convert_loc (loc, type, arg0),
11971 fold_convert_loc (loc, type, mask));
11972 }
11973 }
11974
11975 return NULL_TREE;
11976
11977 case LROTATE_EXPR:
11978 case RROTATE_EXPR:
11979 case RSHIFT_EXPR:
11980 case LSHIFT_EXPR:
11981 /* Since negative shift count is not well-defined,
11982 don't try to compute it in the compiler. */
11983 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11984 return NULL_TREE;
11985
11986 prec = element_precision (type);
11987
11988 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11989 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11990 && tree_to_uhwi (arg1) < prec
11991 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11992 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11993 {
11994 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11995 + tree_to_uhwi (arg1));
11996
11997 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11998 being well defined. */
11999 if (low >= prec)
12000 {
12001 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12002 low = low % prec;
12003 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12004 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12005 TREE_OPERAND (arg0, 0));
12006 else
12007 low = prec - 1;
12008 }
12009
12010 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12011 build_int_cst (TREE_TYPE (arg1), low));
12012 }
12013
12014 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12015 into x & ((unsigned)-1 >> c) for unsigned types. */
12016 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12017 || (TYPE_UNSIGNED (type)
12018 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12019 && tree_fits_uhwi_p (arg1)
12020 && tree_to_uhwi (arg1) < prec
12021 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12022 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12023 {
12024 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12025 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12026 tree lshift;
12027 tree arg00;
12028
12029 if (low0 == low1)
12030 {
12031 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12032
12033 lshift = build_minus_one_cst (type);
12034 lshift = const_binop (code, lshift, arg1);
12035
12036 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12037 }
12038 }
12039
12040 /* If we have a rotate of a bit operation with the rotate count and
12041 the second operand of the bit operation both constant,
12042 permute the two operations. */
12043 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12044 && (TREE_CODE (arg0) == BIT_AND_EXPR
12045 || TREE_CODE (arg0) == BIT_IOR_EXPR
12046 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12047 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12048 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12049 fold_build2_loc (loc, code, type,
12050 TREE_OPERAND (arg0, 0), arg1),
12051 fold_build2_loc (loc, code, type,
12052 TREE_OPERAND (arg0, 1), arg1));
12053
12054 /* Two consecutive rotates adding up to the some integer
12055 multiple of the precision of the type can be ignored. */
12056 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12057 && TREE_CODE (arg0) == RROTATE_EXPR
12058 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12059 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12060 prec) == 0)
12061 return TREE_OPERAND (arg0, 0);
12062
12063 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12064 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12065 if the latter can be further optimized. */
12066 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12067 && TREE_CODE (arg0) == BIT_AND_EXPR
12068 && TREE_CODE (arg1) == INTEGER_CST
12069 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12070 {
12071 tree mask = fold_build2_loc (loc, code, type,
12072 fold_convert_loc (loc, type,
12073 TREE_OPERAND (arg0, 1)),
12074 arg1);
12075 tree shift = fold_build2_loc (loc, code, type,
12076 fold_convert_loc (loc, type,
12077 TREE_OPERAND (arg0, 0)),
12078 arg1);
12079 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12080 if (tem)
12081 return tem;
12082 }
12083
12084 return NULL_TREE;
12085
12086 case MIN_EXPR:
12087 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12088 if (tem)
12089 return tem;
12090 goto associate;
12091
12092 case MAX_EXPR:
12093 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12094 if (tem)
12095 return tem;
12096 goto associate;
12097
12098 case TRUTH_ANDIF_EXPR:
12099 /* Note that the operands of this must be ints
12100 and their values must be 0 or 1.
12101 ("true" is a fixed value perhaps depending on the language.) */
12102 /* If first arg is constant zero, return it. */
12103 if (integer_zerop (arg0))
12104 return fold_convert_loc (loc, type, arg0);
12105 case TRUTH_AND_EXPR:
12106 /* If either arg is constant true, drop it. */
12107 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12108 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12109 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12110 /* Preserve sequence points. */
12111 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12112 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12113 /* If second arg is constant zero, result is zero, but first arg
12114 must be evaluated. */
12115 if (integer_zerop (arg1))
12116 return omit_one_operand_loc (loc, type, arg1, arg0);
12117 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12118 case will be handled here. */
12119 if (integer_zerop (arg0))
12120 return omit_one_operand_loc (loc, type, arg0, arg1);
12121
12122 /* !X && X is always false. */
12123 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12124 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12125 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12126 /* X && !X is always false. */
12127 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12128 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12129 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12130
12131 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12132 means A >= Y && A != MAX, but in this case we know that
12133 A < X <= MAX. */
12134
12135 if (!TREE_SIDE_EFFECTS (arg0)
12136 && !TREE_SIDE_EFFECTS (arg1))
12137 {
12138 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12139 if (tem && !operand_equal_p (tem, arg0, 0))
12140 return fold_build2_loc (loc, code, type, tem, arg1);
12141
12142 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12143 if (tem && !operand_equal_p (tem, arg1, 0))
12144 return fold_build2_loc (loc, code, type, arg0, tem);
12145 }
12146
12147 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12148 != NULL_TREE)
12149 return tem;
12150
12151 return NULL_TREE;
12152
12153 case TRUTH_ORIF_EXPR:
12154 /* Note that the operands of this must be ints
12155 and their values must be 0 or true.
12156 ("true" is a fixed value perhaps depending on the language.) */
12157 /* If first arg is constant true, return it. */
12158 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12159 return fold_convert_loc (loc, type, arg0);
12160 case TRUTH_OR_EXPR:
12161 /* If either arg is constant zero, drop it. */
12162 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12163 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12164 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12165 /* Preserve sequence points. */
12166 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12167 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12168 /* If second arg is constant true, result is true, but we must
12169 evaluate first arg. */
12170 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12171 return omit_one_operand_loc (loc, type, arg1, arg0);
12172 /* Likewise for first arg, but note this only occurs here for
12173 TRUTH_OR_EXPR. */
12174 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12175 return omit_one_operand_loc (loc, type, arg0, arg1);
12176
12177 /* !X || X is always true. */
12178 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12179 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12180 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12181 /* X || !X is always true. */
12182 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12183 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12184 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12185
12186 /* (X && !Y) || (!X && Y) is X ^ Y */
12187 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12188 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12189 {
12190 tree a0, a1, l0, l1, n0, n1;
12191
12192 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12193 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12194
12195 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12196 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12197
12198 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12199 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12200
12201 if ((operand_equal_p (n0, a0, 0)
12202 && operand_equal_p (n1, a1, 0))
12203 || (operand_equal_p (n0, a1, 0)
12204 && operand_equal_p (n1, a0, 0)))
12205 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12206 }
12207
12208 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12209 != NULL_TREE)
12210 return tem;
12211
12212 return NULL_TREE;
12213
12214 case TRUTH_XOR_EXPR:
12215 /* If the second arg is constant zero, drop it. */
12216 if (integer_zerop (arg1))
12217 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12218 /* If the second arg is constant true, this is a logical inversion. */
12219 if (integer_onep (arg1))
12220 {
12221 tem = invert_truthvalue_loc (loc, arg0);
12222 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12223 }
12224 /* Identical arguments cancel to zero. */
12225 if (operand_equal_p (arg0, arg1, 0))
12226 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12227
12228 /* !X ^ X is always true. */
12229 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12230 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12231 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12232
12233 /* X ^ !X is always true. */
12234 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12235 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12236 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12237
12238 return NULL_TREE;
12239
12240 case EQ_EXPR:
12241 case NE_EXPR:
12242 STRIP_NOPS (arg0);
12243 STRIP_NOPS (arg1);
12244
12245 tem = fold_comparison (loc, code, type, op0, op1);
12246 if (tem != NULL_TREE)
12247 return tem;
12248
12249 /* bool_var != 0 becomes bool_var. */
12250 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12251 && code == NE_EXPR)
12252 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12253
12254 /* bool_var == 1 becomes bool_var. */
12255 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12256 && code == EQ_EXPR)
12257 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12258
12259 /* bool_var != 1 becomes !bool_var. */
12260 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12261 && code == NE_EXPR)
12262 return fold_convert_loc (loc, type,
12263 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12264 TREE_TYPE (arg0), arg0));
12265
12266 /* bool_var == 0 becomes !bool_var. */
12267 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12268 && code == EQ_EXPR)
12269 return fold_convert_loc (loc, type,
12270 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12271 TREE_TYPE (arg0), arg0));
12272
12273 /* !exp != 0 becomes !exp */
12274 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12275 && code == NE_EXPR)
12276 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12277
12278 /* If this is an equality comparison of the address of two non-weak,
12279 unaliased symbols neither of which are extern (since we do not
12280 have access to attributes for externs), then we know the result. */
12281 if (TREE_CODE (arg0) == ADDR_EXPR
12282 && DECL_P (TREE_OPERAND (arg0, 0))
12283 && TREE_CODE (arg1) == ADDR_EXPR
12284 && DECL_P (TREE_OPERAND (arg1, 0)))
12285 {
12286 int equal;
12287
12288 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12289 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12290 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12291 ->equal_address_to (symtab_node::get_create
12292 (TREE_OPERAND (arg1, 0)));
12293 else
12294 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12295 if (equal != 2)
12296 return constant_boolean_node (equal
12297 ? code == EQ_EXPR : code != EQ_EXPR,
12298 type);
12299 }
12300
12301 /* Similarly for a NEGATE_EXPR. */
12302 if (TREE_CODE (arg0) == NEGATE_EXPR
12303 && TREE_CODE (arg1) == INTEGER_CST
12304 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12305 arg1)))
12306 && TREE_CODE (tem) == INTEGER_CST
12307 && !TREE_OVERFLOW (tem))
12308 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12309
12310 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12311 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12312 && TREE_CODE (arg1) == INTEGER_CST
12313 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12314 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12315 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12316 fold_convert_loc (loc,
12317 TREE_TYPE (arg0),
12318 arg1),
12319 TREE_OPERAND (arg0, 1)));
12320
12321 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12322 if ((TREE_CODE (arg0) == PLUS_EXPR
12323 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12324 || TREE_CODE (arg0) == MINUS_EXPR)
12325 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12326 0)),
12327 arg1, 0)
12328 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12329 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12330 {
12331 tree val = TREE_OPERAND (arg0, 1);
12332 return omit_two_operands_loc (loc, type,
12333 fold_build2_loc (loc, code, type,
12334 val,
12335 build_int_cst (TREE_TYPE (val),
12336 0)),
12337 TREE_OPERAND (arg0, 0), arg1);
12338 }
12339
12340 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12341 if (TREE_CODE (arg0) == MINUS_EXPR
12342 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12343 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12344 1)),
12345 arg1, 0)
12346 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12347 {
12348 return omit_two_operands_loc (loc, type,
12349 code == NE_EXPR
12350 ? boolean_true_node : boolean_false_node,
12351 TREE_OPERAND (arg0, 1), arg1);
12352 }
12353
12354 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12355 if (TREE_CODE (arg0) == ABS_EXPR
12356 && (integer_zerop (arg1) || real_zerop (arg1)))
12357 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12358
12359 /* If this is an EQ or NE comparison with zero and ARG0 is
12360 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12361 two operations, but the latter can be done in one less insn
12362 on machines that have only two-operand insns or on which a
12363 constant cannot be the first operand. */
12364 if (TREE_CODE (arg0) == BIT_AND_EXPR
12365 && integer_zerop (arg1))
12366 {
12367 tree arg00 = TREE_OPERAND (arg0, 0);
12368 tree arg01 = TREE_OPERAND (arg0, 1);
12369 if (TREE_CODE (arg00) == LSHIFT_EXPR
12370 && integer_onep (TREE_OPERAND (arg00, 0)))
12371 {
12372 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12373 arg01, TREE_OPERAND (arg00, 1));
12374 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12375 build_int_cst (TREE_TYPE (arg0), 1));
12376 return fold_build2_loc (loc, code, type,
12377 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12378 arg1);
12379 }
12380 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12381 && integer_onep (TREE_OPERAND (arg01, 0)))
12382 {
12383 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12384 arg00, TREE_OPERAND (arg01, 1));
12385 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12386 build_int_cst (TREE_TYPE (arg0), 1));
12387 return fold_build2_loc (loc, code, type,
12388 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12389 arg1);
12390 }
12391 }
12392
12393 /* If this is an NE or EQ comparison of zero against the result of a
12394 signed MOD operation whose second operand is a power of 2, make
12395 the MOD operation unsigned since it is simpler and equivalent. */
12396 if (integer_zerop (arg1)
12397 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12398 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12399 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12400 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12401 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12402 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12403 {
12404 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12405 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12406 fold_convert_loc (loc, newtype,
12407 TREE_OPERAND (arg0, 0)),
12408 fold_convert_loc (loc, newtype,
12409 TREE_OPERAND (arg0, 1)));
12410
12411 return fold_build2_loc (loc, code, type, newmod,
12412 fold_convert_loc (loc, newtype, arg1));
12413 }
12414
12415 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12416 C1 is a valid shift constant, and C2 is a power of two, i.e.
12417 a single bit. */
12418 if (TREE_CODE (arg0) == BIT_AND_EXPR
12419 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12420 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12421 == INTEGER_CST
12422 && integer_pow2p (TREE_OPERAND (arg0, 1))
12423 && integer_zerop (arg1))
12424 {
12425 tree itype = TREE_TYPE (arg0);
12426 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12427 prec = TYPE_PRECISION (itype);
12428
12429 /* Check for a valid shift count. */
12430 if (wi::ltu_p (arg001, prec))
12431 {
12432 tree arg01 = TREE_OPERAND (arg0, 1);
12433 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12434 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12435 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12436 can be rewritten as (X & (C2 << C1)) != 0. */
12437 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12438 {
12439 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12440 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12441 return fold_build2_loc (loc, code, type, tem,
12442 fold_convert_loc (loc, itype, arg1));
12443 }
12444 /* Otherwise, for signed (arithmetic) shifts,
12445 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12446 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12447 else if (!TYPE_UNSIGNED (itype))
12448 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12449 arg000, build_int_cst (itype, 0));
12450 /* Otherwise, of unsigned (logical) shifts,
12451 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12452 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12453 else
12454 return omit_one_operand_loc (loc, type,
12455 code == EQ_EXPR ? integer_one_node
12456 : integer_zero_node,
12457 arg000);
12458 }
12459 }
12460
12461 /* If we have (A & C) == C where C is a power of 2, convert this into
12462 (A & C) != 0. Similarly for NE_EXPR. */
12463 if (TREE_CODE (arg0) == BIT_AND_EXPR
12464 && integer_pow2p (TREE_OPERAND (arg0, 1))
12465 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12466 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12467 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12468 integer_zero_node));
12469
12470 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12471 bit, then fold the expression into A < 0 or A >= 0. */
12472 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12473 if (tem)
12474 return tem;
12475
12476 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12477 Similarly for NE_EXPR. */
12478 if (TREE_CODE (arg0) == BIT_AND_EXPR
12479 && TREE_CODE (arg1) == INTEGER_CST
12480 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12481 {
12482 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12483 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12484 TREE_OPERAND (arg0, 1));
12485 tree dandnotc
12486 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12487 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12488 notc);
12489 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12490 if (integer_nonzerop (dandnotc))
12491 return omit_one_operand_loc (loc, type, rslt, arg0);
12492 }
12493
12494 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12495 Similarly for NE_EXPR. */
12496 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12497 && TREE_CODE (arg1) == INTEGER_CST
12498 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12499 {
12500 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12501 tree candnotd
12502 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12503 TREE_OPERAND (arg0, 1),
12504 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12505 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12506 if (integer_nonzerop (candnotd))
12507 return omit_one_operand_loc (loc, type, rslt, arg0);
12508 }
12509
12510 /* If this is a comparison of a field, we may be able to simplify it. */
12511 if ((TREE_CODE (arg0) == COMPONENT_REF
12512 || TREE_CODE (arg0) == BIT_FIELD_REF)
12513 /* Handle the constant case even without -O
12514 to make sure the warnings are given. */
12515 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12516 {
12517 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12518 if (t1)
12519 return t1;
12520 }
12521
12522 /* Optimize comparisons of strlen vs zero to a compare of the
12523 first character of the string vs zero. To wit,
12524 strlen(ptr) == 0 => *ptr == 0
12525 strlen(ptr) != 0 => *ptr != 0
12526 Other cases should reduce to one of these two (or a constant)
12527 due to the return value of strlen being unsigned. */
12528 if (TREE_CODE (arg0) == CALL_EXPR
12529 && integer_zerop (arg1))
12530 {
12531 tree fndecl = get_callee_fndecl (arg0);
12532
12533 if (fndecl
12534 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12535 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12536 && call_expr_nargs (arg0) == 1
12537 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12538 {
12539 tree iref = build_fold_indirect_ref_loc (loc,
12540 CALL_EXPR_ARG (arg0, 0));
12541 return fold_build2_loc (loc, code, type, iref,
12542 build_int_cst (TREE_TYPE (iref), 0));
12543 }
12544 }
12545
12546 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12547 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12548 if (TREE_CODE (arg0) == RSHIFT_EXPR
12549 && integer_zerop (arg1)
12550 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12551 {
12552 tree arg00 = TREE_OPERAND (arg0, 0);
12553 tree arg01 = TREE_OPERAND (arg0, 1);
12554 tree itype = TREE_TYPE (arg00);
12555 if (wi::eq_p (arg01, element_precision (itype) - 1))
12556 {
12557 if (TYPE_UNSIGNED (itype))
12558 {
12559 itype = signed_type_for (itype);
12560 arg00 = fold_convert_loc (loc, itype, arg00);
12561 }
12562 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12563 type, arg00, build_zero_cst (itype));
12564 }
12565 }
12566
12567 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12568 if (integer_zerop (arg1)
12569 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12570 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12571 TREE_OPERAND (arg0, 1));
12572
12573 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12574 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12575 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12576 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12577 build_zero_cst (TREE_TYPE (arg0)));
12578 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12579 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12580 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12581 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12582 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12583 build_zero_cst (TREE_TYPE (arg0)));
12584
12585 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12586 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12587 && TREE_CODE (arg1) == INTEGER_CST
12588 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12589 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12590 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12591 TREE_OPERAND (arg0, 1), arg1));
12592
12593 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12594 (X & C) == 0 when C is a single bit. */
12595 if (TREE_CODE (arg0) == BIT_AND_EXPR
12596 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12597 && integer_zerop (arg1)
12598 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12599 {
12600 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12601 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12602 TREE_OPERAND (arg0, 1));
12603 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12604 type, tem,
12605 fold_convert_loc (loc, TREE_TYPE (arg0),
12606 arg1));
12607 }
12608
12609 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12610 constant C is a power of two, i.e. a single bit. */
12611 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12612 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12613 && integer_zerop (arg1)
12614 && integer_pow2p (TREE_OPERAND (arg0, 1))
12615 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12616 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12617 {
12618 tree arg00 = TREE_OPERAND (arg0, 0);
12619 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12620 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12621 }
12622
12623 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12624 when is C is a power of two, i.e. a single bit. */
12625 if (TREE_CODE (arg0) == BIT_AND_EXPR
12626 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12627 && integer_zerop (arg1)
12628 && integer_pow2p (TREE_OPERAND (arg0, 1))
12629 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12630 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12631 {
12632 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12633 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12634 arg000, TREE_OPERAND (arg0, 1));
12635 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12636 tem, build_int_cst (TREE_TYPE (tem), 0));
12637 }
12638
12639 if (integer_zerop (arg1)
12640 && tree_expr_nonzero_p (arg0))
12641 {
12642 tree res = constant_boolean_node (code==NE_EXPR, type);
12643 return omit_one_operand_loc (loc, type, res, arg0);
12644 }
12645
12646 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12647 if (TREE_CODE (arg0) == NEGATE_EXPR
12648 && TREE_CODE (arg1) == NEGATE_EXPR)
12649 return fold_build2_loc (loc, code, type,
12650 TREE_OPERAND (arg0, 0),
12651 fold_convert_loc (loc, TREE_TYPE (arg0),
12652 TREE_OPERAND (arg1, 0)));
12653
12654 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12655 if (TREE_CODE (arg0) == BIT_AND_EXPR
12656 && TREE_CODE (arg1) == BIT_AND_EXPR)
12657 {
12658 tree arg00 = TREE_OPERAND (arg0, 0);
12659 tree arg01 = TREE_OPERAND (arg0, 1);
12660 tree arg10 = TREE_OPERAND (arg1, 0);
12661 tree arg11 = TREE_OPERAND (arg1, 1);
12662 tree itype = TREE_TYPE (arg0);
12663
12664 if (operand_equal_p (arg01, arg11, 0))
12665 return fold_build2_loc (loc, code, type,
12666 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12667 fold_build2_loc (loc,
12668 BIT_XOR_EXPR, itype,
12669 arg00, arg10),
12670 arg01),
12671 build_zero_cst (itype));
12672
12673 if (operand_equal_p (arg01, arg10, 0))
12674 return fold_build2_loc (loc, code, type,
12675 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12676 fold_build2_loc (loc,
12677 BIT_XOR_EXPR, itype,
12678 arg00, arg11),
12679 arg01),
12680 build_zero_cst (itype));
12681
12682 if (operand_equal_p (arg00, arg11, 0))
12683 return fold_build2_loc (loc, code, type,
12684 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12685 fold_build2_loc (loc,
12686 BIT_XOR_EXPR, itype,
12687 arg01, arg10),
12688 arg00),
12689 build_zero_cst (itype));
12690
12691 if (operand_equal_p (arg00, arg10, 0))
12692 return fold_build2_loc (loc, code, type,
12693 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12694 fold_build2_loc (loc,
12695 BIT_XOR_EXPR, itype,
12696 arg01, arg11),
12697 arg00),
12698 build_zero_cst (itype));
12699 }
12700
12701 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12702 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12703 {
12704 tree arg00 = TREE_OPERAND (arg0, 0);
12705 tree arg01 = TREE_OPERAND (arg0, 1);
12706 tree arg10 = TREE_OPERAND (arg1, 0);
12707 tree arg11 = TREE_OPERAND (arg1, 1);
12708 tree itype = TREE_TYPE (arg0);
12709
12710 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12711 operand_equal_p guarantees no side-effects so we don't need
12712 to use omit_one_operand on Z. */
12713 if (operand_equal_p (arg01, arg11, 0))
12714 return fold_build2_loc (loc, code, type, arg00,
12715 fold_convert_loc (loc, TREE_TYPE (arg00),
12716 arg10));
12717 if (operand_equal_p (arg01, arg10, 0))
12718 return fold_build2_loc (loc, code, type, arg00,
12719 fold_convert_loc (loc, TREE_TYPE (arg00),
12720 arg11));
12721 if (operand_equal_p (arg00, arg11, 0))
12722 return fold_build2_loc (loc, code, type, arg01,
12723 fold_convert_loc (loc, TREE_TYPE (arg01),
12724 arg10));
12725 if (operand_equal_p (arg00, arg10, 0))
12726 return fold_build2_loc (loc, code, type, arg01,
12727 fold_convert_loc (loc, TREE_TYPE (arg01),
12728 arg11));
12729
12730 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12731 if (TREE_CODE (arg01) == INTEGER_CST
12732 && TREE_CODE (arg11) == INTEGER_CST)
12733 {
12734 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12735 fold_convert_loc (loc, itype, arg11));
12736 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12737 return fold_build2_loc (loc, code, type, tem,
12738 fold_convert_loc (loc, itype, arg10));
12739 }
12740 }
12741
12742 /* Attempt to simplify equality/inequality comparisons of complex
12743 values. Only lower the comparison if the result is known or
12744 can be simplified to a single scalar comparison. */
12745 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12746 || TREE_CODE (arg0) == COMPLEX_CST)
12747 && (TREE_CODE (arg1) == COMPLEX_EXPR
12748 || TREE_CODE (arg1) == COMPLEX_CST))
12749 {
12750 tree real0, imag0, real1, imag1;
12751 tree rcond, icond;
12752
12753 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12754 {
12755 real0 = TREE_OPERAND (arg0, 0);
12756 imag0 = TREE_OPERAND (arg0, 1);
12757 }
12758 else
12759 {
12760 real0 = TREE_REALPART (arg0);
12761 imag0 = TREE_IMAGPART (arg0);
12762 }
12763
12764 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12765 {
12766 real1 = TREE_OPERAND (arg1, 0);
12767 imag1 = TREE_OPERAND (arg1, 1);
12768 }
12769 else
12770 {
12771 real1 = TREE_REALPART (arg1);
12772 imag1 = TREE_IMAGPART (arg1);
12773 }
12774
12775 rcond = fold_binary_loc (loc, code, type, real0, real1);
12776 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12777 {
12778 if (integer_zerop (rcond))
12779 {
12780 if (code == EQ_EXPR)
12781 return omit_two_operands_loc (loc, type, boolean_false_node,
12782 imag0, imag1);
12783 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12784 }
12785 else
12786 {
12787 if (code == NE_EXPR)
12788 return omit_two_operands_loc (loc, type, boolean_true_node,
12789 imag0, imag1);
12790 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12791 }
12792 }
12793
12794 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12795 if (icond && TREE_CODE (icond) == INTEGER_CST)
12796 {
12797 if (integer_zerop (icond))
12798 {
12799 if (code == EQ_EXPR)
12800 return omit_two_operands_loc (loc, type, boolean_false_node,
12801 real0, real1);
12802 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12803 }
12804 else
12805 {
12806 if (code == NE_EXPR)
12807 return omit_two_operands_loc (loc, type, boolean_true_node,
12808 real0, real1);
12809 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12810 }
12811 }
12812 }
12813
12814 return NULL_TREE;
12815
12816 case LT_EXPR:
12817 case GT_EXPR:
12818 case LE_EXPR:
12819 case GE_EXPR:
12820 tem = fold_comparison (loc, code, type, op0, op1);
12821 if (tem != NULL_TREE)
12822 return tem;
12823
12824 /* Transform comparisons of the form X +- C CMP X. */
12825 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12826 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12827 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12828 && !HONOR_SNANS (arg0))
12829 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12830 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12831 {
12832 tree arg01 = TREE_OPERAND (arg0, 1);
12833 enum tree_code code0 = TREE_CODE (arg0);
12834 int is_positive;
12835
12836 if (TREE_CODE (arg01) == REAL_CST)
12837 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12838 else
12839 is_positive = tree_int_cst_sgn (arg01);
12840
12841 /* (X - c) > X becomes false. */
12842 if (code == GT_EXPR
12843 && ((code0 == MINUS_EXPR && is_positive >= 0)
12844 || (code0 == PLUS_EXPR && is_positive <= 0)))
12845 {
12846 if (TREE_CODE (arg01) == INTEGER_CST
12847 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12848 fold_overflow_warning (("assuming signed overflow does not "
12849 "occur when assuming that (X - c) > X "
12850 "is always false"),
12851 WARN_STRICT_OVERFLOW_ALL);
12852 return constant_boolean_node (0, type);
12853 }
12854
12855 /* Likewise (X + c) < X becomes false. */
12856 if (code == LT_EXPR
12857 && ((code0 == PLUS_EXPR && is_positive >= 0)
12858 || (code0 == MINUS_EXPR && is_positive <= 0)))
12859 {
12860 if (TREE_CODE (arg01) == INTEGER_CST
12861 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12862 fold_overflow_warning (("assuming signed overflow does not "
12863 "occur when assuming that "
12864 "(X + c) < X is always false"),
12865 WARN_STRICT_OVERFLOW_ALL);
12866 return constant_boolean_node (0, type);
12867 }
12868
12869 /* Convert (X - c) <= X to true. */
12870 if (!HONOR_NANS (arg1)
12871 && code == LE_EXPR
12872 && ((code0 == MINUS_EXPR && is_positive >= 0)
12873 || (code0 == PLUS_EXPR && is_positive <= 0)))
12874 {
12875 if (TREE_CODE (arg01) == INTEGER_CST
12876 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12877 fold_overflow_warning (("assuming signed overflow does not "
12878 "occur when assuming that "
12879 "(X - c) <= X is always true"),
12880 WARN_STRICT_OVERFLOW_ALL);
12881 return constant_boolean_node (1, type);
12882 }
12883
12884 /* Convert (X + c) >= X to true. */
12885 if (!HONOR_NANS (arg1)
12886 && code == GE_EXPR
12887 && ((code0 == PLUS_EXPR && is_positive >= 0)
12888 || (code0 == MINUS_EXPR && is_positive <= 0)))
12889 {
12890 if (TREE_CODE (arg01) == INTEGER_CST
12891 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12892 fold_overflow_warning (("assuming signed overflow does not "
12893 "occur when assuming that "
12894 "(X + c) >= X is always true"),
12895 WARN_STRICT_OVERFLOW_ALL);
12896 return constant_boolean_node (1, type);
12897 }
12898
12899 if (TREE_CODE (arg01) == INTEGER_CST)
12900 {
12901 /* Convert X + c > X and X - c < X to true for integers. */
12902 if (code == GT_EXPR
12903 && ((code0 == PLUS_EXPR && is_positive > 0)
12904 || (code0 == MINUS_EXPR && is_positive < 0)))
12905 {
12906 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12907 fold_overflow_warning (("assuming signed overflow does "
12908 "not occur when assuming that "
12909 "(X + c) > X is always true"),
12910 WARN_STRICT_OVERFLOW_ALL);
12911 return constant_boolean_node (1, type);
12912 }
12913
12914 if (code == LT_EXPR
12915 && ((code0 == MINUS_EXPR && is_positive > 0)
12916 || (code0 == PLUS_EXPR && is_positive < 0)))
12917 {
12918 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12919 fold_overflow_warning (("assuming signed overflow does "
12920 "not occur when assuming that "
12921 "(X - c) < X is always true"),
12922 WARN_STRICT_OVERFLOW_ALL);
12923 return constant_boolean_node (1, type);
12924 }
12925
12926 /* Convert X + c <= X and X - c >= X to false for integers. */
12927 if (code == LE_EXPR
12928 && ((code0 == PLUS_EXPR && is_positive > 0)
12929 || (code0 == MINUS_EXPR && is_positive < 0)))
12930 {
12931 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12932 fold_overflow_warning (("assuming signed overflow does "
12933 "not occur when assuming that "
12934 "(X + c) <= X is always false"),
12935 WARN_STRICT_OVERFLOW_ALL);
12936 return constant_boolean_node (0, type);
12937 }
12938
12939 if (code == GE_EXPR
12940 && ((code0 == MINUS_EXPR && is_positive > 0)
12941 || (code0 == PLUS_EXPR && is_positive < 0)))
12942 {
12943 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12944 fold_overflow_warning (("assuming signed overflow does "
12945 "not occur when assuming that "
12946 "(X - c) >= X is always false"),
12947 WARN_STRICT_OVERFLOW_ALL);
12948 return constant_boolean_node (0, type);
12949 }
12950 }
12951 }
12952
12953 /* Comparisons with the highest or lowest possible integer of
12954 the specified precision will have known values. */
12955 {
12956 tree arg1_type = TREE_TYPE (arg1);
12957 unsigned int prec = TYPE_PRECISION (arg1_type);
12958
12959 if (TREE_CODE (arg1) == INTEGER_CST
12960 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12961 {
12962 wide_int max = wi::max_value (arg1_type);
12963 wide_int signed_max = wi::max_value (prec, SIGNED);
12964 wide_int min = wi::min_value (arg1_type);
12965
12966 if (wi::eq_p (arg1, max))
12967 switch (code)
12968 {
12969 case GT_EXPR:
12970 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12971
12972 case GE_EXPR:
12973 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12974
12975 case LE_EXPR:
12976 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12977
12978 case LT_EXPR:
12979 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12980
12981 /* The GE_EXPR and LT_EXPR cases above are not normally
12982 reached because of previous transformations. */
12983
12984 default:
12985 break;
12986 }
12987 else if (wi::eq_p (arg1, max - 1))
12988 switch (code)
12989 {
12990 case GT_EXPR:
12991 arg1 = const_binop (PLUS_EXPR, arg1,
12992 build_int_cst (TREE_TYPE (arg1), 1));
12993 return fold_build2_loc (loc, EQ_EXPR, type,
12994 fold_convert_loc (loc,
12995 TREE_TYPE (arg1), arg0),
12996 arg1);
12997 case LE_EXPR:
12998 arg1 = const_binop (PLUS_EXPR, arg1,
12999 build_int_cst (TREE_TYPE (arg1), 1));
13000 return fold_build2_loc (loc, NE_EXPR, type,
13001 fold_convert_loc (loc, TREE_TYPE (arg1),
13002 arg0),
13003 arg1);
13004 default:
13005 break;
13006 }
13007 else if (wi::eq_p (arg1, min))
13008 switch (code)
13009 {
13010 case LT_EXPR:
13011 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13012
13013 case LE_EXPR:
13014 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13015
13016 case GE_EXPR:
13017 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13018
13019 case GT_EXPR:
13020 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13021
13022 default:
13023 break;
13024 }
13025 else if (wi::eq_p (arg1, min + 1))
13026 switch (code)
13027 {
13028 case GE_EXPR:
13029 arg1 = const_binop (MINUS_EXPR, arg1,
13030 build_int_cst (TREE_TYPE (arg1), 1));
13031 return fold_build2_loc (loc, NE_EXPR, type,
13032 fold_convert_loc (loc,
13033 TREE_TYPE (arg1), arg0),
13034 arg1);
13035 case LT_EXPR:
13036 arg1 = const_binop (MINUS_EXPR, arg1,
13037 build_int_cst (TREE_TYPE (arg1), 1));
13038 return fold_build2_loc (loc, EQ_EXPR, type,
13039 fold_convert_loc (loc, TREE_TYPE (arg1),
13040 arg0),
13041 arg1);
13042 default:
13043 break;
13044 }
13045
13046 else if (wi::eq_p (arg1, signed_max)
13047 && TYPE_UNSIGNED (arg1_type)
13048 /* We will flip the signedness of the comparison operator
13049 associated with the mode of arg1, so the sign bit is
13050 specified by this mode. Check that arg1 is the signed
13051 max associated with this sign bit. */
13052 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13053 /* signed_type does not work on pointer types. */
13054 && INTEGRAL_TYPE_P (arg1_type))
13055 {
13056 /* The following case also applies to X < signed_max+1
13057 and X >= signed_max+1 because previous transformations. */
13058 if (code == LE_EXPR || code == GT_EXPR)
13059 {
13060 tree st = signed_type_for (arg1_type);
13061 return fold_build2_loc (loc,
13062 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13063 type, fold_convert_loc (loc, st, arg0),
13064 build_int_cst (st, 0));
13065 }
13066 }
13067 }
13068 }
13069
13070 /* If we are comparing an ABS_EXPR with a constant, we can
13071 convert all the cases into explicit comparisons, but they may
13072 well not be faster than doing the ABS and one comparison.
13073 But ABS (X) <= C is a range comparison, which becomes a subtraction
13074 and a comparison, and is probably faster. */
13075 if (code == LE_EXPR
13076 && TREE_CODE (arg1) == INTEGER_CST
13077 && TREE_CODE (arg0) == ABS_EXPR
13078 && ! TREE_SIDE_EFFECTS (arg0)
13079 && (0 != (tem = negate_expr (arg1)))
13080 && TREE_CODE (tem) == INTEGER_CST
13081 && !TREE_OVERFLOW (tem))
13082 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13083 build2 (GE_EXPR, type,
13084 TREE_OPERAND (arg0, 0), tem),
13085 build2 (LE_EXPR, type,
13086 TREE_OPERAND (arg0, 0), arg1));
13087
13088 /* Convert ABS_EXPR<x> >= 0 to true. */
13089 strict_overflow_p = false;
13090 if (code == GE_EXPR
13091 && (integer_zerop (arg1)
13092 || (! HONOR_NANS (arg0)
13093 && real_zerop (arg1)))
13094 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13095 {
13096 if (strict_overflow_p)
13097 fold_overflow_warning (("assuming signed overflow does not occur "
13098 "when simplifying comparison of "
13099 "absolute value and zero"),
13100 WARN_STRICT_OVERFLOW_CONDITIONAL);
13101 return omit_one_operand_loc (loc, type,
13102 constant_boolean_node (true, type),
13103 arg0);
13104 }
13105
13106 /* Convert ABS_EXPR<x> < 0 to false. */
13107 strict_overflow_p = false;
13108 if (code == LT_EXPR
13109 && (integer_zerop (arg1) || real_zerop (arg1))
13110 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13111 {
13112 if (strict_overflow_p)
13113 fold_overflow_warning (("assuming signed overflow does not occur "
13114 "when simplifying comparison of "
13115 "absolute value and zero"),
13116 WARN_STRICT_OVERFLOW_CONDITIONAL);
13117 return omit_one_operand_loc (loc, type,
13118 constant_boolean_node (false, type),
13119 arg0);
13120 }
13121
13122 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13123 and similarly for >= into !=. */
13124 if ((code == LT_EXPR || code == GE_EXPR)
13125 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13126 && TREE_CODE (arg1) == LSHIFT_EXPR
13127 && integer_onep (TREE_OPERAND (arg1, 0)))
13128 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13129 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13130 TREE_OPERAND (arg1, 1)),
13131 build_zero_cst (TREE_TYPE (arg0)));
13132
13133 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13134 otherwise Y might be >= # of bits in X's type and thus e.g.
13135 (unsigned char) (1 << Y) for Y 15 might be 0.
13136 If the cast is widening, then 1 << Y should have unsigned type,
13137 otherwise if Y is number of bits in the signed shift type minus 1,
13138 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13139 31 might be 0xffffffff80000000. */
13140 if ((code == LT_EXPR || code == GE_EXPR)
13141 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13142 && CONVERT_EXPR_P (arg1)
13143 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13144 && (element_precision (TREE_TYPE (arg1))
13145 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13146 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13147 || (element_precision (TREE_TYPE (arg1))
13148 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13149 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13150 {
13151 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13152 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13153 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13154 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13155 build_zero_cst (TREE_TYPE (arg0)));
13156 }
13157
13158 return NULL_TREE;
13159
13160 case UNORDERED_EXPR:
13161 case ORDERED_EXPR:
13162 case UNLT_EXPR:
13163 case UNLE_EXPR:
13164 case UNGT_EXPR:
13165 case UNGE_EXPR:
13166 case UNEQ_EXPR:
13167 case LTGT_EXPR:
13168 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13169 {
13170 t1 = fold_relational_const (code, type, arg0, arg1);
13171 if (t1 != NULL_TREE)
13172 return t1;
13173 }
13174
13175 /* If the first operand is NaN, the result is constant. */
13176 if (TREE_CODE (arg0) == REAL_CST
13177 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13178 && (code != LTGT_EXPR || ! flag_trapping_math))
13179 {
13180 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13181 ? integer_zero_node
13182 : integer_one_node;
13183 return omit_one_operand_loc (loc, type, t1, arg1);
13184 }
13185
13186 /* If the second operand is NaN, the result is constant. */
13187 if (TREE_CODE (arg1) == REAL_CST
13188 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13189 && (code != LTGT_EXPR || ! flag_trapping_math))
13190 {
13191 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13192 ? integer_zero_node
13193 : integer_one_node;
13194 return omit_one_operand_loc (loc, type, t1, arg0);
13195 }
13196
13197 /* Simplify unordered comparison of something with itself. */
13198 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13199 && operand_equal_p (arg0, arg1, 0))
13200 return constant_boolean_node (1, type);
13201
13202 if (code == LTGT_EXPR
13203 && !flag_trapping_math
13204 && operand_equal_p (arg0, arg1, 0))
13205 return constant_boolean_node (0, type);
13206
13207 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13208 {
13209 tree targ0 = strip_float_extensions (arg0);
13210 tree targ1 = strip_float_extensions (arg1);
13211 tree newtype = TREE_TYPE (targ0);
13212
13213 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13214 newtype = TREE_TYPE (targ1);
13215
13216 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13217 return fold_build2_loc (loc, code, type,
13218 fold_convert_loc (loc, newtype, targ0),
13219 fold_convert_loc (loc, newtype, targ1));
13220 }
13221
13222 return NULL_TREE;
13223
13224 case COMPOUND_EXPR:
13225 /* When pedantic, a compound expression can be neither an lvalue
13226 nor an integer constant expression. */
13227 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13228 return NULL_TREE;
13229 /* Don't let (0, 0) be null pointer constant. */
13230 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13231 : fold_convert_loc (loc, type, arg1);
13232 return pedantic_non_lvalue_loc (loc, tem);
13233
13234 case ASSERT_EXPR:
13235 /* An ASSERT_EXPR should never be passed to fold_binary. */
13236 gcc_unreachable ();
13237
13238 default:
13239 return NULL_TREE;
13240 } /* switch (code) */
13241 }
13242
13243 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13244 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13245 of GOTO_EXPR. */
13246
13247 static tree
13248 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13249 {
13250 switch (TREE_CODE (*tp))
13251 {
13252 case LABEL_EXPR:
13253 return *tp;
13254
13255 case GOTO_EXPR:
13256 *walk_subtrees = 0;
13257
13258 /* ... fall through ... */
13259
13260 default:
13261 return NULL_TREE;
13262 }
13263 }
13264
13265 /* Return whether the sub-tree ST contains a label which is accessible from
13266 outside the sub-tree. */
13267
13268 static bool
13269 contains_label_p (tree st)
13270 {
13271 return
13272 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13273 }
13274
13275 /* Fold a ternary expression of code CODE and type TYPE with operands
13276 OP0, OP1, and OP2. Return the folded expression if folding is
13277 successful. Otherwise, return NULL_TREE. */
13278
13279 tree
13280 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13281 tree op0, tree op1, tree op2)
13282 {
13283 tree tem;
13284 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13285 enum tree_code_class kind = TREE_CODE_CLASS (code);
13286
13287 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13288 && TREE_CODE_LENGTH (code) == 3);
13289
13290 /* If this is a commutative operation, and OP0 is a constant, move it
13291 to OP1 to reduce the number of tests below. */
13292 if (commutative_ternary_tree_code (code)
13293 && tree_swap_operands_p (op0, op1, true))
13294 return fold_build3_loc (loc, code, type, op1, op0, op2);
13295
13296 tem = generic_simplify (loc, code, type, op0, op1, op2);
13297 if (tem)
13298 return tem;
13299
13300 /* Strip any conversions that don't change the mode. This is safe
13301 for every expression, except for a comparison expression because
13302 its signedness is derived from its operands. So, in the latter
13303 case, only strip conversions that don't change the signedness.
13304
13305 Note that this is done as an internal manipulation within the
13306 constant folder, in order to find the simplest representation of
13307 the arguments so that their form can be studied. In any cases,
13308 the appropriate type conversions should be put back in the tree
13309 that will get out of the constant folder. */
13310 if (op0)
13311 {
13312 arg0 = op0;
13313 STRIP_NOPS (arg0);
13314 }
13315
13316 if (op1)
13317 {
13318 arg1 = op1;
13319 STRIP_NOPS (arg1);
13320 }
13321
13322 if (op2)
13323 {
13324 arg2 = op2;
13325 STRIP_NOPS (arg2);
13326 }
13327
13328 switch (code)
13329 {
13330 case COMPONENT_REF:
13331 if (TREE_CODE (arg0) == CONSTRUCTOR
13332 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13333 {
13334 unsigned HOST_WIDE_INT idx;
13335 tree field, value;
13336 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13337 if (field == arg1)
13338 return value;
13339 }
13340 return NULL_TREE;
13341
13342 case COND_EXPR:
13343 case VEC_COND_EXPR:
13344 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13345 so all simple results must be passed through pedantic_non_lvalue. */
13346 if (TREE_CODE (arg0) == INTEGER_CST)
13347 {
13348 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13349 tem = integer_zerop (arg0) ? op2 : op1;
13350 /* Only optimize constant conditions when the selected branch
13351 has the same type as the COND_EXPR. This avoids optimizing
13352 away "c ? x : throw", where the throw has a void type.
13353 Avoid throwing away that operand which contains label. */
13354 if ((!TREE_SIDE_EFFECTS (unused_op)
13355 || !contains_label_p (unused_op))
13356 && (! VOID_TYPE_P (TREE_TYPE (tem))
13357 || VOID_TYPE_P (type)))
13358 return pedantic_non_lvalue_loc (loc, tem);
13359 return NULL_TREE;
13360 }
13361 else if (TREE_CODE (arg0) == VECTOR_CST)
13362 {
13363 if ((TREE_CODE (arg1) == VECTOR_CST
13364 || TREE_CODE (arg1) == CONSTRUCTOR)
13365 && (TREE_CODE (arg2) == VECTOR_CST
13366 || TREE_CODE (arg2) == CONSTRUCTOR))
13367 {
13368 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13369 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13370 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13371 for (i = 0; i < nelts; i++)
13372 {
13373 tree val = VECTOR_CST_ELT (arg0, i);
13374 if (integer_all_onesp (val))
13375 sel[i] = i;
13376 else if (integer_zerop (val))
13377 sel[i] = nelts + i;
13378 else /* Currently unreachable. */
13379 return NULL_TREE;
13380 }
13381 tree t = fold_vec_perm (type, arg1, arg2, sel);
13382 if (t != NULL_TREE)
13383 return t;
13384 }
13385 }
13386
13387 /* If we have A op B ? A : C, we may be able to convert this to a
13388 simpler expression, depending on the operation and the values
13389 of B and C. Signed zeros prevent all of these transformations,
13390 for reasons given above each one.
13391
13392 Also try swapping the arguments and inverting the conditional. */
13393 if (COMPARISON_CLASS_P (arg0)
13394 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13395 arg1, TREE_OPERAND (arg0, 1))
13396 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13397 {
13398 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13399 if (tem)
13400 return tem;
13401 }
13402
13403 if (COMPARISON_CLASS_P (arg0)
13404 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13405 op2,
13406 TREE_OPERAND (arg0, 1))
13407 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13408 {
13409 location_t loc0 = expr_location_or (arg0, loc);
13410 tem = fold_invert_truthvalue (loc0, arg0);
13411 if (tem && COMPARISON_CLASS_P (tem))
13412 {
13413 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13414 if (tem)
13415 return tem;
13416 }
13417 }
13418
13419 /* If the second operand is simpler than the third, swap them
13420 since that produces better jump optimization results. */
13421 if (truth_value_p (TREE_CODE (arg0))
13422 && tree_swap_operands_p (op1, op2, false))
13423 {
13424 location_t loc0 = expr_location_or (arg0, loc);
13425 /* See if this can be inverted. If it can't, possibly because
13426 it was a floating-point inequality comparison, don't do
13427 anything. */
13428 tem = fold_invert_truthvalue (loc0, arg0);
13429 if (tem)
13430 return fold_build3_loc (loc, code, type, tem, op2, op1);
13431 }
13432
13433 /* Convert A ? 1 : 0 to simply A. */
13434 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13435 : (integer_onep (op1)
13436 && !VECTOR_TYPE_P (type)))
13437 && integer_zerop (op2)
13438 /* If we try to convert OP0 to our type, the
13439 call to fold will try to move the conversion inside
13440 a COND, which will recurse. In that case, the COND_EXPR
13441 is probably the best choice, so leave it alone. */
13442 && type == TREE_TYPE (arg0))
13443 return pedantic_non_lvalue_loc (loc, arg0);
13444
13445 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13446 over COND_EXPR in cases such as floating point comparisons. */
13447 if (integer_zerop (op1)
13448 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13449 : (integer_onep (op2)
13450 && !VECTOR_TYPE_P (type)))
13451 && truth_value_p (TREE_CODE (arg0)))
13452 return pedantic_non_lvalue_loc (loc,
13453 fold_convert_loc (loc, type,
13454 invert_truthvalue_loc (loc,
13455 arg0)));
13456
13457 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13458 if (TREE_CODE (arg0) == LT_EXPR
13459 && integer_zerop (TREE_OPERAND (arg0, 1))
13460 && integer_zerop (op2)
13461 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13462 {
13463 /* sign_bit_p looks through both zero and sign extensions,
13464 but for this optimization only sign extensions are
13465 usable. */
13466 tree tem2 = TREE_OPERAND (arg0, 0);
13467 while (tem != tem2)
13468 {
13469 if (TREE_CODE (tem2) != NOP_EXPR
13470 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13471 {
13472 tem = NULL_TREE;
13473 break;
13474 }
13475 tem2 = TREE_OPERAND (tem2, 0);
13476 }
13477 /* sign_bit_p only checks ARG1 bits within A's precision.
13478 If <sign bit of A> has wider type than A, bits outside
13479 of A's precision in <sign bit of A> need to be checked.
13480 If they are all 0, this optimization needs to be done
13481 in unsigned A's type, if they are all 1 in signed A's type,
13482 otherwise this can't be done. */
13483 if (tem
13484 && TYPE_PRECISION (TREE_TYPE (tem))
13485 < TYPE_PRECISION (TREE_TYPE (arg1))
13486 && TYPE_PRECISION (TREE_TYPE (tem))
13487 < TYPE_PRECISION (type))
13488 {
13489 int inner_width, outer_width;
13490 tree tem_type;
13491
13492 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13493 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13494 if (outer_width > TYPE_PRECISION (type))
13495 outer_width = TYPE_PRECISION (type);
13496
13497 wide_int mask = wi::shifted_mask
13498 (inner_width, outer_width - inner_width, false,
13499 TYPE_PRECISION (TREE_TYPE (arg1)));
13500
13501 wide_int common = mask & arg1;
13502 if (common == mask)
13503 {
13504 tem_type = signed_type_for (TREE_TYPE (tem));
13505 tem = fold_convert_loc (loc, tem_type, tem);
13506 }
13507 else if (common == 0)
13508 {
13509 tem_type = unsigned_type_for (TREE_TYPE (tem));
13510 tem = fold_convert_loc (loc, tem_type, tem);
13511 }
13512 else
13513 tem = NULL;
13514 }
13515
13516 if (tem)
13517 return
13518 fold_convert_loc (loc, type,
13519 fold_build2_loc (loc, BIT_AND_EXPR,
13520 TREE_TYPE (tem), tem,
13521 fold_convert_loc (loc,
13522 TREE_TYPE (tem),
13523 arg1)));
13524 }
13525
13526 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13527 already handled above. */
13528 if (TREE_CODE (arg0) == BIT_AND_EXPR
13529 && integer_onep (TREE_OPERAND (arg0, 1))
13530 && integer_zerop (op2)
13531 && integer_pow2p (arg1))
13532 {
13533 tree tem = TREE_OPERAND (arg0, 0);
13534 STRIP_NOPS (tem);
13535 if (TREE_CODE (tem) == RSHIFT_EXPR
13536 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13537 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13538 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13539 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13540 TREE_OPERAND (tem, 0), arg1);
13541 }
13542
13543 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13544 is probably obsolete because the first operand should be a
13545 truth value (that's why we have the two cases above), but let's
13546 leave it in until we can confirm this for all front-ends. */
13547 if (integer_zerop (op2)
13548 && TREE_CODE (arg0) == NE_EXPR
13549 && integer_zerop (TREE_OPERAND (arg0, 1))
13550 && integer_pow2p (arg1)
13551 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13552 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13553 arg1, OEP_ONLY_CONST))
13554 return pedantic_non_lvalue_loc (loc,
13555 fold_convert_loc (loc, type,
13556 TREE_OPERAND (arg0, 0)));
13557
13558 /* Disable the transformations below for vectors, since
13559 fold_binary_op_with_conditional_arg may undo them immediately,
13560 yielding an infinite loop. */
13561 if (code == VEC_COND_EXPR)
13562 return NULL_TREE;
13563
13564 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13565 if (integer_zerop (op2)
13566 && truth_value_p (TREE_CODE (arg0))
13567 && truth_value_p (TREE_CODE (arg1))
13568 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13569 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13570 : TRUTH_ANDIF_EXPR,
13571 type, fold_convert_loc (loc, type, arg0), arg1);
13572
13573 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13574 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13575 && truth_value_p (TREE_CODE (arg0))
13576 && truth_value_p (TREE_CODE (arg1))
13577 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13578 {
13579 location_t loc0 = expr_location_or (arg0, loc);
13580 /* Only perform transformation if ARG0 is easily inverted. */
13581 tem = fold_invert_truthvalue (loc0, arg0);
13582 if (tem)
13583 return fold_build2_loc (loc, code == VEC_COND_EXPR
13584 ? BIT_IOR_EXPR
13585 : TRUTH_ORIF_EXPR,
13586 type, fold_convert_loc (loc, type, tem),
13587 arg1);
13588 }
13589
13590 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13591 if (integer_zerop (arg1)
13592 && truth_value_p (TREE_CODE (arg0))
13593 && truth_value_p (TREE_CODE (op2))
13594 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13595 {
13596 location_t loc0 = expr_location_or (arg0, loc);
13597 /* Only perform transformation if ARG0 is easily inverted. */
13598 tem = fold_invert_truthvalue (loc0, arg0);
13599 if (tem)
13600 return fold_build2_loc (loc, code == VEC_COND_EXPR
13601 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13602 type, fold_convert_loc (loc, type, tem),
13603 op2);
13604 }
13605
13606 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13607 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13608 && truth_value_p (TREE_CODE (arg0))
13609 && truth_value_p (TREE_CODE (op2))
13610 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13611 return fold_build2_loc (loc, code == VEC_COND_EXPR
13612 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13613 type, fold_convert_loc (loc, type, arg0), op2);
13614
13615 return NULL_TREE;
13616
13617 case CALL_EXPR:
13618 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13619 of fold_ternary on them. */
13620 gcc_unreachable ();
13621
13622 case BIT_FIELD_REF:
13623 if ((TREE_CODE (arg0) == VECTOR_CST
13624 || (TREE_CODE (arg0) == CONSTRUCTOR
13625 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13626 && (type == TREE_TYPE (TREE_TYPE (arg0))
13627 || (TREE_CODE (type) == VECTOR_TYPE
13628 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13629 {
13630 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13631 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13632 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13633 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13634
13635 if (n != 0
13636 && (idx % width) == 0
13637 && (n % width) == 0
13638 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13639 {
13640 idx = idx / width;
13641 n = n / width;
13642
13643 if (TREE_CODE (arg0) == VECTOR_CST)
13644 {
13645 if (n == 1)
13646 return VECTOR_CST_ELT (arg0, idx);
13647
13648 tree *vals = XALLOCAVEC (tree, n);
13649 for (unsigned i = 0; i < n; ++i)
13650 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13651 return build_vector (type, vals);
13652 }
13653
13654 /* Constructor elements can be subvectors. */
13655 unsigned HOST_WIDE_INT k = 1;
13656 if (CONSTRUCTOR_NELTS (arg0) != 0)
13657 {
13658 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13659 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13660 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13661 }
13662
13663 /* We keep an exact subset of the constructor elements. */
13664 if ((idx % k) == 0 && (n % k) == 0)
13665 {
13666 if (CONSTRUCTOR_NELTS (arg0) == 0)
13667 return build_constructor (type, NULL);
13668 idx /= k;
13669 n /= k;
13670 if (n == 1)
13671 {
13672 if (idx < CONSTRUCTOR_NELTS (arg0))
13673 return CONSTRUCTOR_ELT (arg0, idx)->value;
13674 return build_zero_cst (type);
13675 }
13676
13677 vec<constructor_elt, va_gc> *vals;
13678 vec_alloc (vals, n);
13679 for (unsigned i = 0;
13680 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13681 ++i)
13682 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13683 CONSTRUCTOR_ELT
13684 (arg0, idx + i)->value);
13685 return build_constructor (type, vals);
13686 }
13687 /* The bitfield references a single constructor element. */
13688 else if (idx + n <= (idx / k + 1) * k)
13689 {
13690 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13691 return build_zero_cst (type);
13692 else if (n == k)
13693 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13694 else
13695 return fold_build3_loc (loc, code, type,
13696 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13697 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13698 }
13699 }
13700 }
13701
13702 /* A bit-field-ref that referenced the full argument can be stripped. */
13703 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13704 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13705 && integer_zerop (op2))
13706 return fold_convert_loc (loc, type, arg0);
13707
13708 /* On constants we can use native encode/interpret to constant
13709 fold (nearly) all BIT_FIELD_REFs. */
13710 if (CONSTANT_CLASS_P (arg0)
13711 && can_native_interpret_type_p (type)
13712 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13713 /* This limitation should not be necessary, we just need to
13714 round this up to mode size. */
13715 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13716 /* Need bit-shifting of the buffer to relax the following. */
13717 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13718 {
13719 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13720 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13721 unsigned HOST_WIDE_INT clen;
13722 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13723 /* ??? We cannot tell native_encode_expr to start at
13724 some random byte only. So limit us to a reasonable amount
13725 of work. */
13726 if (clen <= 4096)
13727 {
13728 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13729 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13730 if (len > 0
13731 && len * BITS_PER_UNIT >= bitpos + bitsize)
13732 {
13733 tree v = native_interpret_expr (type,
13734 b + bitpos / BITS_PER_UNIT,
13735 bitsize / BITS_PER_UNIT);
13736 if (v)
13737 return v;
13738 }
13739 }
13740 }
13741
13742 return NULL_TREE;
13743
13744 case FMA_EXPR:
13745 /* For integers we can decompose the FMA if possible. */
13746 if (TREE_CODE (arg0) == INTEGER_CST
13747 && TREE_CODE (arg1) == INTEGER_CST)
13748 return fold_build2_loc (loc, PLUS_EXPR, type,
13749 const_binop (MULT_EXPR, arg0, arg1), arg2);
13750 if (integer_zerop (arg2))
13751 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13752
13753 return fold_fma (loc, type, arg0, arg1, arg2);
13754
13755 case VEC_PERM_EXPR:
13756 if (TREE_CODE (arg2) == VECTOR_CST)
13757 {
13758 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13759 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13760 unsigned char *sel2 = sel + nelts;
13761 bool need_mask_canon = false;
13762 bool need_mask_canon2 = false;
13763 bool all_in_vec0 = true;
13764 bool all_in_vec1 = true;
13765 bool maybe_identity = true;
13766 bool single_arg = (op0 == op1);
13767 bool changed = false;
13768
13769 mask2 = 2 * nelts - 1;
13770 mask = single_arg ? (nelts - 1) : mask2;
13771 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13772 for (i = 0; i < nelts; i++)
13773 {
13774 tree val = VECTOR_CST_ELT (arg2, i);
13775 if (TREE_CODE (val) != INTEGER_CST)
13776 return NULL_TREE;
13777
13778 /* Make sure that the perm value is in an acceptable
13779 range. */
13780 wide_int t = val;
13781 need_mask_canon |= wi::gtu_p (t, mask);
13782 need_mask_canon2 |= wi::gtu_p (t, mask2);
13783 sel[i] = t.to_uhwi () & mask;
13784 sel2[i] = t.to_uhwi () & mask2;
13785
13786 if (sel[i] < nelts)
13787 all_in_vec1 = false;
13788 else
13789 all_in_vec0 = false;
13790
13791 if ((sel[i] & (nelts-1)) != i)
13792 maybe_identity = false;
13793 }
13794
13795 if (maybe_identity)
13796 {
13797 if (all_in_vec0)
13798 return op0;
13799 if (all_in_vec1)
13800 return op1;
13801 }
13802
13803 if (all_in_vec0)
13804 op1 = op0;
13805 else if (all_in_vec1)
13806 {
13807 op0 = op1;
13808 for (i = 0; i < nelts; i++)
13809 sel[i] -= nelts;
13810 need_mask_canon = true;
13811 }
13812
13813 if ((TREE_CODE (op0) == VECTOR_CST
13814 || TREE_CODE (op0) == CONSTRUCTOR)
13815 && (TREE_CODE (op1) == VECTOR_CST
13816 || TREE_CODE (op1) == CONSTRUCTOR))
13817 {
13818 tree t = fold_vec_perm (type, op0, op1, sel);
13819 if (t != NULL_TREE)
13820 return t;
13821 }
13822
13823 if (op0 == op1 && !single_arg)
13824 changed = true;
13825
13826 /* Some targets are deficient and fail to expand a single
13827 argument permutation while still allowing an equivalent
13828 2-argument version. */
13829 if (need_mask_canon && arg2 == op2
13830 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13831 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13832 {
13833 need_mask_canon = need_mask_canon2;
13834 sel = sel2;
13835 }
13836
13837 if (need_mask_canon && arg2 == op2)
13838 {
13839 tree *tsel = XALLOCAVEC (tree, nelts);
13840 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13841 for (i = 0; i < nelts; i++)
13842 tsel[i] = build_int_cst (eltype, sel[i]);
13843 op2 = build_vector (TREE_TYPE (arg2), tsel);
13844 changed = true;
13845 }
13846
13847 if (changed)
13848 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13849 }
13850 return NULL_TREE;
13851
13852 default:
13853 return NULL_TREE;
13854 } /* switch (code) */
13855 }
13856
13857 /* Perform constant folding and related simplification of EXPR.
13858 The related simplifications include x*1 => x, x*0 => 0, etc.,
13859 and application of the associative law.
13860 NOP_EXPR conversions may be removed freely (as long as we
13861 are careful not to change the type of the overall expression).
13862 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13863 but we can constant-fold them if they have constant operands. */
13864
13865 #ifdef ENABLE_FOLD_CHECKING
13866 # define fold(x) fold_1 (x)
13867 static tree fold_1 (tree);
13868 static
13869 #endif
13870 tree
13871 fold (tree expr)
13872 {
13873 const tree t = expr;
13874 enum tree_code code = TREE_CODE (t);
13875 enum tree_code_class kind = TREE_CODE_CLASS (code);
13876 tree tem;
13877 location_t loc = EXPR_LOCATION (expr);
13878
13879 /* Return right away if a constant. */
13880 if (kind == tcc_constant)
13881 return t;
13882
13883 /* CALL_EXPR-like objects with variable numbers of operands are
13884 treated specially. */
13885 if (kind == tcc_vl_exp)
13886 {
13887 if (code == CALL_EXPR)
13888 {
13889 tem = fold_call_expr (loc, expr, false);
13890 return tem ? tem : expr;
13891 }
13892 return expr;
13893 }
13894
13895 if (IS_EXPR_CODE_CLASS (kind))
13896 {
13897 tree type = TREE_TYPE (t);
13898 tree op0, op1, op2;
13899
13900 switch (TREE_CODE_LENGTH (code))
13901 {
13902 case 1:
13903 op0 = TREE_OPERAND (t, 0);
13904 tem = fold_unary_loc (loc, code, type, op0);
13905 return tem ? tem : expr;
13906 case 2:
13907 op0 = TREE_OPERAND (t, 0);
13908 op1 = TREE_OPERAND (t, 1);
13909 tem = fold_binary_loc (loc, code, type, op0, op1);
13910 return tem ? tem : expr;
13911 case 3:
13912 op0 = TREE_OPERAND (t, 0);
13913 op1 = TREE_OPERAND (t, 1);
13914 op2 = TREE_OPERAND (t, 2);
13915 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13916 return tem ? tem : expr;
13917 default:
13918 break;
13919 }
13920 }
13921
13922 switch (code)
13923 {
13924 case ARRAY_REF:
13925 {
13926 tree op0 = TREE_OPERAND (t, 0);
13927 tree op1 = TREE_OPERAND (t, 1);
13928
13929 if (TREE_CODE (op1) == INTEGER_CST
13930 && TREE_CODE (op0) == CONSTRUCTOR
13931 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13932 {
13933 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13934 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13935 unsigned HOST_WIDE_INT begin = 0;
13936
13937 /* Find a matching index by means of a binary search. */
13938 while (begin != end)
13939 {
13940 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13941 tree index = (*elts)[middle].index;
13942
13943 if (TREE_CODE (index) == INTEGER_CST
13944 && tree_int_cst_lt (index, op1))
13945 begin = middle + 1;
13946 else if (TREE_CODE (index) == INTEGER_CST
13947 && tree_int_cst_lt (op1, index))
13948 end = middle;
13949 else if (TREE_CODE (index) == RANGE_EXPR
13950 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13951 begin = middle + 1;
13952 else if (TREE_CODE (index) == RANGE_EXPR
13953 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13954 end = middle;
13955 else
13956 return (*elts)[middle].value;
13957 }
13958 }
13959
13960 return t;
13961 }
13962
13963 /* Return a VECTOR_CST if possible. */
13964 case CONSTRUCTOR:
13965 {
13966 tree type = TREE_TYPE (t);
13967 if (TREE_CODE (type) != VECTOR_TYPE)
13968 return t;
13969
13970 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13971 unsigned HOST_WIDE_INT idx, pos = 0;
13972 tree value;
13973
13974 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13975 {
13976 if (!CONSTANT_CLASS_P (value))
13977 return t;
13978 if (TREE_CODE (value) == VECTOR_CST)
13979 {
13980 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13981 vec[pos++] = VECTOR_CST_ELT (value, i);
13982 }
13983 else
13984 vec[pos++] = value;
13985 }
13986 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13987 vec[pos] = build_zero_cst (TREE_TYPE (type));
13988
13989 return build_vector (type, vec);
13990 }
13991
13992 case CONST_DECL:
13993 return fold (DECL_INITIAL (t));
13994
13995 default:
13996 return t;
13997 } /* switch (code) */
13998 }
13999
14000 #ifdef ENABLE_FOLD_CHECKING
14001 #undef fold
14002
14003 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14004 hash_table<pointer_hash<const tree_node> > *);
14005 static void fold_check_failed (const_tree, const_tree);
14006 void print_fold_checksum (const_tree);
14007
14008 /* When --enable-checking=fold, compute a digest of expr before
14009 and after actual fold call to see if fold did not accidentally
14010 change original expr. */
14011
14012 tree
14013 fold (tree expr)
14014 {
14015 tree ret;
14016 struct md5_ctx ctx;
14017 unsigned char checksum_before[16], checksum_after[16];
14018 hash_table<pointer_hash<const tree_node> > ht (32);
14019
14020 md5_init_ctx (&ctx);
14021 fold_checksum_tree (expr, &ctx, &ht);
14022 md5_finish_ctx (&ctx, checksum_before);
14023 ht.empty ();
14024
14025 ret = fold_1 (expr);
14026
14027 md5_init_ctx (&ctx);
14028 fold_checksum_tree (expr, &ctx, &ht);
14029 md5_finish_ctx (&ctx, checksum_after);
14030
14031 if (memcmp (checksum_before, checksum_after, 16))
14032 fold_check_failed (expr, ret);
14033
14034 return ret;
14035 }
14036
14037 void
14038 print_fold_checksum (const_tree expr)
14039 {
14040 struct md5_ctx ctx;
14041 unsigned char checksum[16], cnt;
14042 hash_table<pointer_hash<const tree_node> > ht (32);
14043
14044 md5_init_ctx (&ctx);
14045 fold_checksum_tree (expr, &ctx, &ht);
14046 md5_finish_ctx (&ctx, checksum);
14047 for (cnt = 0; cnt < 16; ++cnt)
14048 fprintf (stderr, "%02x", checksum[cnt]);
14049 putc ('\n', stderr);
14050 }
14051
14052 static void
14053 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14054 {
14055 internal_error ("fold check: original tree changed by fold");
14056 }
14057
14058 static void
14059 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14060 hash_table<pointer_hash <const tree_node> > *ht)
14061 {
14062 const tree_node **slot;
14063 enum tree_code code;
14064 union tree_node buf;
14065 int i, len;
14066
14067 recursive_label:
14068 if (expr == NULL)
14069 return;
14070 slot = ht->find_slot (expr, INSERT);
14071 if (*slot != NULL)
14072 return;
14073 *slot = expr;
14074 code = TREE_CODE (expr);
14075 if (TREE_CODE_CLASS (code) == tcc_declaration
14076 && DECL_ASSEMBLER_NAME_SET_P (expr))
14077 {
14078 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14079 memcpy ((char *) &buf, expr, tree_size (expr));
14080 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14081 expr = (tree) &buf;
14082 }
14083 else if (TREE_CODE_CLASS (code) == tcc_type
14084 && (TYPE_POINTER_TO (expr)
14085 || TYPE_REFERENCE_TO (expr)
14086 || TYPE_CACHED_VALUES_P (expr)
14087 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14088 || TYPE_NEXT_VARIANT (expr)))
14089 {
14090 /* Allow these fields to be modified. */
14091 tree tmp;
14092 memcpy ((char *) &buf, expr, tree_size (expr));
14093 expr = tmp = (tree) &buf;
14094 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14095 TYPE_POINTER_TO (tmp) = NULL;
14096 TYPE_REFERENCE_TO (tmp) = NULL;
14097 TYPE_NEXT_VARIANT (tmp) = NULL;
14098 if (TYPE_CACHED_VALUES_P (tmp))
14099 {
14100 TYPE_CACHED_VALUES_P (tmp) = 0;
14101 TYPE_CACHED_VALUES (tmp) = NULL;
14102 }
14103 }
14104 md5_process_bytes (expr, tree_size (expr), ctx);
14105 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14106 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14107 if (TREE_CODE_CLASS (code) != tcc_type
14108 && TREE_CODE_CLASS (code) != tcc_declaration
14109 && code != TREE_LIST
14110 && code != SSA_NAME
14111 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14112 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14113 switch (TREE_CODE_CLASS (code))
14114 {
14115 case tcc_constant:
14116 switch (code)
14117 {
14118 case STRING_CST:
14119 md5_process_bytes (TREE_STRING_POINTER (expr),
14120 TREE_STRING_LENGTH (expr), ctx);
14121 break;
14122 case COMPLEX_CST:
14123 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14124 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14125 break;
14126 case VECTOR_CST:
14127 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14128 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14129 break;
14130 default:
14131 break;
14132 }
14133 break;
14134 case tcc_exceptional:
14135 switch (code)
14136 {
14137 case TREE_LIST:
14138 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14139 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14140 expr = TREE_CHAIN (expr);
14141 goto recursive_label;
14142 break;
14143 case TREE_VEC:
14144 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14145 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14146 break;
14147 default:
14148 break;
14149 }
14150 break;
14151 case tcc_expression:
14152 case tcc_reference:
14153 case tcc_comparison:
14154 case tcc_unary:
14155 case tcc_binary:
14156 case tcc_statement:
14157 case tcc_vl_exp:
14158 len = TREE_OPERAND_LENGTH (expr);
14159 for (i = 0; i < len; ++i)
14160 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14161 break;
14162 case tcc_declaration:
14163 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14164 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14165 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14166 {
14167 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14168 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14169 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14170 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14171 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14172 }
14173
14174 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14175 {
14176 if (TREE_CODE (expr) == FUNCTION_DECL)
14177 {
14178 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14179 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14180 }
14181 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14182 }
14183 break;
14184 case tcc_type:
14185 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14186 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14187 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14188 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14189 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14190 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14191 if (INTEGRAL_TYPE_P (expr)
14192 || SCALAR_FLOAT_TYPE_P (expr))
14193 {
14194 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14195 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14196 }
14197 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14198 if (TREE_CODE (expr) == RECORD_TYPE
14199 || TREE_CODE (expr) == UNION_TYPE
14200 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14201 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14202 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14203 break;
14204 default:
14205 break;
14206 }
14207 }
14208
14209 /* Helper function for outputting the checksum of a tree T. When
14210 debugging with gdb, you can "define mynext" to be "next" followed
14211 by "call debug_fold_checksum (op0)", then just trace down till the
14212 outputs differ. */
14213
14214 DEBUG_FUNCTION void
14215 debug_fold_checksum (const_tree t)
14216 {
14217 int i;
14218 unsigned char checksum[16];
14219 struct md5_ctx ctx;
14220 hash_table<pointer_hash<const tree_node> > ht (32);
14221
14222 md5_init_ctx (&ctx);
14223 fold_checksum_tree (t, &ctx, &ht);
14224 md5_finish_ctx (&ctx, checksum);
14225 ht.empty ();
14226
14227 for (i = 0; i < 16; i++)
14228 fprintf (stderr, "%d ", checksum[i]);
14229
14230 fprintf (stderr, "\n");
14231 }
14232
14233 #endif
14234
14235 /* Fold a unary tree expression with code CODE of type TYPE with an
14236 operand OP0. LOC is the location of the resulting expression.
14237 Return a folded expression if successful. Otherwise, return a tree
14238 expression with code CODE of type TYPE with an operand OP0. */
14239
14240 tree
14241 fold_build1_stat_loc (location_t loc,
14242 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14243 {
14244 tree tem;
14245 #ifdef ENABLE_FOLD_CHECKING
14246 unsigned char checksum_before[16], checksum_after[16];
14247 struct md5_ctx ctx;
14248 hash_table<pointer_hash<const tree_node> > ht (32);
14249
14250 md5_init_ctx (&ctx);
14251 fold_checksum_tree (op0, &ctx, &ht);
14252 md5_finish_ctx (&ctx, checksum_before);
14253 ht.empty ();
14254 #endif
14255
14256 tem = fold_unary_loc (loc, code, type, op0);
14257 if (!tem)
14258 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14259
14260 #ifdef ENABLE_FOLD_CHECKING
14261 md5_init_ctx (&ctx);
14262 fold_checksum_tree (op0, &ctx, &ht);
14263 md5_finish_ctx (&ctx, checksum_after);
14264
14265 if (memcmp (checksum_before, checksum_after, 16))
14266 fold_check_failed (op0, tem);
14267 #endif
14268 return tem;
14269 }
14270
14271 /* Fold a binary tree expression with code CODE of type TYPE with
14272 operands OP0 and OP1. LOC is the location of the resulting
14273 expression. Return a folded expression if successful. Otherwise,
14274 return a tree expression with code CODE of type TYPE with operands
14275 OP0 and OP1. */
14276
14277 tree
14278 fold_build2_stat_loc (location_t loc,
14279 enum tree_code code, tree type, tree op0, tree op1
14280 MEM_STAT_DECL)
14281 {
14282 tree tem;
14283 #ifdef ENABLE_FOLD_CHECKING
14284 unsigned char checksum_before_op0[16],
14285 checksum_before_op1[16],
14286 checksum_after_op0[16],
14287 checksum_after_op1[16];
14288 struct md5_ctx ctx;
14289 hash_table<pointer_hash<const tree_node> > ht (32);
14290
14291 md5_init_ctx (&ctx);
14292 fold_checksum_tree (op0, &ctx, &ht);
14293 md5_finish_ctx (&ctx, checksum_before_op0);
14294 ht.empty ();
14295
14296 md5_init_ctx (&ctx);
14297 fold_checksum_tree (op1, &ctx, &ht);
14298 md5_finish_ctx (&ctx, checksum_before_op1);
14299 ht.empty ();
14300 #endif
14301
14302 tem = fold_binary_loc (loc, code, type, op0, op1);
14303 if (!tem)
14304 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14305
14306 #ifdef ENABLE_FOLD_CHECKING
14307 md5_init_ctx (&ctx);
14308 fold_checksum_tree (op0, &ctx, &ht);
14309 md5_finish_ctx (&ctx, checksum_after_op0);
14310 ht.empty ();
14311
14312 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14313 fold_check_failed (op0, tem);
14314
14315 md5_init_ctx (&ctx);
14316 fold_checksum_tree (op1, &ctx, &ht);
14317 md5_finish_ctx (&ctx, checksum_after_op1);
14318
14319 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14320 fold_check_failed (op1, tem);
14321 #endif
14322 return tem;
14323 }
14324
14325 /* Fold a ternary tree expression with code CODE of type TYPE with
14326 operands OP0, OP1, and OP2. Return a folded expression if
14327 successful. Otherwise, return a tree expression with code CODE of
14328 type TYPE with operands OP0, OP1, and OP2. */
14329
14330 tree
14331 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14332 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14333 {
14334 tree tem;
14335 #ifdef ENABLE_FOLD_CHECKING
14336 unsigned char checksum_before_op0[16],
14337 checksum_before_op1[16],
14338 checksum_before_op2[16],
14339 checksum_after_op0[16],
14340 checksum_after_op1[16],
14341 checksum_after_op2[16];
14342 struct md5_ctx ctx;
14343 hash_table<pointer_hash<const tree_node> > ht (32);
14344
14345 md5_init_ctx (&ctx);
14346 fold_checksum_tree (op0, &ctx, &ht);
14347 md5_finish_ctx (&ctx, checksum_before_op0);
14348 ht.empty ();
14349
14350 md5_init_ctx (&ctx);
14351 fold_checksum_tree (op1, &ctx, &ht);
14352 md5_finish_ctx (&ctx, checksum_before_op1);
14353 ht.empty ();
14354
14355 md5_init_ctx (&ctx);
14356 fold_checksum_tree (op2, &ctx, &ht);
14357 md5_finish_ctx (&ctx, checksum_before_op2);
14358 ht.empty ();
14359 #endif
14360
14361 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14362 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14363 if (!tem)
14364 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14365
14366 #ifdef ENABLE_FOLD_CHECKING
14367 md5_init_ctx (&ctx);
14368 fold_checksum_tree (op0, &ctx, &ht);
14369 md5_finish_ctx (&ctx, checksum_after_op0);
14370 ht.empty ();
14371
14372 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14373 fold_check_failed (op0, tem);
14374
14375 md5_init_ctx (&ctx);
14376 fold_checksum_tree (op1, &ctx, &ht);
14377 md5_finish_ctx (&ctx, checksum_after_op1);
14378 ht.empty ();
14379
14380 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14381 fold_check_failed (op1, tem);
14382
14383 md5_init_ctx (&ctx);
14384 fold_checksum_tree (op2, &ctx, &ht);
14385 md5_finish_ctx (&ctx, checksum_after_op2);
14386
14387 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14388 fold_check_failed (op2, tem);
14389 #endif
14390 return tem;
14391 }
14392
14393 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14394 arguments in ARGARRAY, and a null static chain.
14395 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14396 of type TYPE from the given operands as constructed by build_call_array. */
14397
14398 tree
14399 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14400 int nargs, tree *argarray)
14401 {
14402 tree tem;
14403 #ifdef ENABLE_FOLD_CHECKING
14404 unsigned char checksum_before_fn[16],
14405 checksum_before_arglist[16],
14406 checksum_after_fn[16],
14407 checksum_after_arglist[16];
14408 struct md5_ctx ctx;
14409 hash_table<pointer_hash<const tree_node> > ht (32);
14410 int i;
14411
14412 md5_init_ctx (&ctx);
14413 fold_checksum_tree (fn, &ctx, &ht);
14414 md5_finish_ctx (&ctx, checksum_before_fn);
14415 ht.empty ();
14416
14417 md5_init_ctx (&ctx);
14418 for (i = 0; i < nargs; i++)
14419 fold_checksum_tree (argarray[i], &ctx, &ht);
14420 md5_finish_ctx (&ctx, checksum_before_arglist);
14421 ht.empty ();
14422 #endif
14423
14424 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14425 if (!tem)
14426 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14427
14428 #ifdef ENABLE_FOLD_CHECKING
14429 md5_init_ctx (&ctx);
14430 fold_checksum_tree (fn, &ctx, &ht);
14431 md5_finish_ctx (&ctx, checksum_after_fn);
14432 ht.empty ();
14433
14434 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14435 fold_check_failed (fn, tem);
14436
14437 md5_init_ctx (&ctx);
14438 for (i = 0; i < nargs; i++)
14439 fold_checksum_tree (argarray[i], &ctx, &ht);
14440 md5_finish_ctx (&ctx, checksum_after_arglist);
14441
14442 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14443 fold_check_failed (NULL_TREE, tem);
14444 #endif
14445 return tem;
14446 }
14447
14448 /* Perform constant folding and related simplification of initializer
14449 expression EXPR. These behave identically to "fold_buildN" but ignore
14450 potential run-time traps and exceptions that fold must preserve. */
14451
14452 #define START_FOLD_INIT \
14453 int saved_signaling_nans = flag_signaling_nans;\
14454 int saved_trapping_math = flag_trapping_math;\
14455 int saved_rounding_math = flag_rounding_math;\
14456 int saved_trapv = flag_trapv;\
14457 int saved_folding_initializer = folding_initializer;\
14458 flag_signaling_nans = 0;\
14459 flag_trapping_math = 0;\
14460 flag_rounding_math = 0;\
14461 flag_trapv = 0;\
14462 folding_initializer = 1;
14463
14464 #define END_FOLD_INIT \
14465 flag_signaling_nans = saved_signaling_nans;\
14466 flag_trapping_math = saved_trapping_math;\
14467 flag_rounding_math = saved_rounding_math;\
14468 flag_trapv = saved_trapv;\
14469 folding_initializer = saved_folding_initializer;
14470
14471 tree
14472 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14473 tree type, tree op)
14474 {
14475 tree result;
14476 START_FOLD_INIT;
14477
14478 result = fold_build1_loc (loc, code, type, op);
14479
14480 END_FOLD_INIT;
14481 return result;
14482 }
14483
14484 tree
14485 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14486 tree type, tree op0, tree op1)
14487 {
14488 tree result;
14489 START_FOLD_INIT;
14490
14491 result = fold_build2_loc (loc, code, type, op0, op1);
14492
14493 END_FOLD_INIT;
14494 return result;
14495 }
14496
14497 tree
14498 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14499 int nargs, tree *argarray)
14500 {
14501 tree result;
14502 START_FOLD_INIT;
14503
14504 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14505
14506 END_FOLD_INIT;
14507 return result;
14508 }
14509
14510 #undef START_FOLD_INIT
14511 #undef END_FOLD_INIT
14512
14513 /* Determine if first argument is a multiple of second argument. Return 0 if
14514 it is not, or we cannot easily determined it to be.
14515
14516 An example of the sort of thing we care about (at this point; this routine
14517 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14518 fold cases do now) is discovering that
14519
14520 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14521
14522 is a multiple of
14523
14524 SAVE_EXPR (J * 8)
14525
14526 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14527
14528 This code also handles discovering that
14529
14530 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14531
14532 is a multiple of 8 so we don't have to worry about dealing with a
14533 possible remainder.
14534
14535 Note that we *look* inside a SAVE_EXPR only to determine how it was
14536 calculated; it is not safe for fold to do much of anything else with the
14537 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14538 at run time. For example, the latter example above *cannot* be implemented
14539 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14540 evaluation time of the original SAVE_EXPR is not necessarily the same at
14541 the time the new expression is evaluated. The only optimization of this
14542 sort that would be valid is changing
14543
14544 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14545
14546 divided by 8 to
14547
14548 SAVE_EXPR (I) * SAVE_EXPR (J)
14549
14550 (where the same SAVE_EXPR (J) is used in the original and the
14551 transformed version). */
14552
14553 int
14554 multiple_of_p (tree type, const_tree top, const_tree bottom)
14555 {
14556 if (operand_equal_p (top, bottom, 0))
14557 return 1;
14558
14559 if (TREE_CODE (type) != INTEGER_TYPE)
14560 return 0;
14561
14562 switch (TREE_CODE (top))
14563 {
14564 case BIT_AND_EXPR:
14565 /* Bitwise and provides a power of two multiple. If the mask is
14566 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14567 if (!integer_pow2p (bottom))
14568 return 0;
14569 /* FALLTHRU */
14570
14571 case MULT_EXPR:
14572 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14573 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14574
14575 case PLUS_EXPR:
14576 case MINUS_EXPR:
14577 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14578 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14579
14580 case LSHIFT_EXPR:
14581 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14582 {
14583 tree op1, t1;
14584
14585 op1 = TREE_OPERAND (top, 1);
14586 /* const_binop may not detect overflow correctly,
14587 so check for it explicitly here. */
14588 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14589 && 0 != (t1 = fold_convert (type,
14590 const_binop (LSHIFT_EXPR,
14591 size_one_node,
14592 op1)))
14593 && !TREE_OVERFLOW (t1))
14594 return multiple_of_p (type, t1, bottom);
14595 }
14596 return 0;
14597
14598 case NOP_EXPR:
14599 /* Can't handle conversions from non-integral or wider integral type. */
14600 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14601 || (TYPE_PRECISION (type)
14602 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14603 return 0;
14604
14605 /* .. fall through ... */
14606
14607 case SAVE_EXPR:
14608 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14609
14610 case COND_EXPR:
14611 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14612 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14613
14614 case INTEGER_CST:
14615 if (TREE_CODE (bottom) != INTEGER_CST
14616 || integer_zerop (bottom)
14617 || (TYPE_UNSIGNED (type)
14618 && (tree_int_cst_sgn (top) < 0
14619 || tree_int_cst_sgn (bottom) < 0)))
14620 return 0;
14621 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14622 SIGNED);
14623
14624 default:
14625 return 0;
14626 }
14627 }
14628
14629 /* Return true if CODE or TYPE is known to be non-negative. */
14630
14631 static bool
14632 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14633 {
14634 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14635 && truth_value_p (code))
14636 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14637 have a signed:1 type (where the value is -1 and 0). */
14638 return true;
14639 return false;
14640 }
14641
14642 /* Return true if (CODE OP0) is known to be non-negative. If the return
14643 value is based on the assumption that signed overflow is undefined,
14644 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14645 *STRICT_OVERFLOW_P. */
14646
14647 bool
14648 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14649 bool *strict_overflow_p)
14650 {
14651 if (TYPE_UNSIGNED (type))
14652 return true;
14653
14654 switch (code)
14655 {
14656 case ABS_EXPR:
14657 /* We can't return 1 if flag_wrapv is set because
14658 ABS_EXPR<INT_MIN> = INT_MIN. */
14659 if (!INTEGRAL_TYPE_P (type))
14660 return true;
14661 if (TYPE_OVERFLOW_UNDEFINED (type))
14662 {
14663 *strict_overflow_p = true;
14664 return true;
14665 }
14666 break;
14667
14668 case NON_LVALUE_EXPR:
14669 case FLOAT_EXPR:
14670 case FIX_TRUNC_EXPR:
14671 return tree_expr_nonnegative_warnv_p (op0,
14672 strict_overflow_p);
14673
14674 CASE_CONVERT:
14675 {
14676 tree inner_type = TREE_TYPE (op0);
14677 tree outer_type = type;
14678
14679 if (TREE_CODE (outer_type) == REAL_TYPE)
14680 {
14681 if (TREE_CODE (inner_type) == REAL_TYPE)
14682 return tree_expr_nonnegative_warnv_p (op0,
14683 strict_overflow_p);
14684 if (INTEGRAL_TYPE_P (inner_type))
14685 {
14686 if (TYPE_UNSIGNED (inner_type))
14687 return true;
14688 return tree_expr_nonnegative_warnv_p (op0,
14689 strict_overflow_p);
14690 }
14691 }
14692 else if (INTEGRAL_TYPE_P (outer_type))
14693 {
14694 if (TREE_CODE (inner_type) == REAL_TYPE)
14695 return tree_expr_nonnegative_warnv_p (op0,
14696 strict_overflow_p);
14697 if (INTEGRAL_TYPE_P (inner_type))
14698 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14699 && TYPE_UNSIGNED (inner_type);
14700 }
14701 }
14702 break;
14703
14704 default:
14705 return tree_simple_nonnegative_warnv_p (code, type);
14706 }
14707
14708 /* We don't know sign of `t', so be conservative and return false. */
14709 return false;
14710 }
14711
14712 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14713 value is based on the assumption that signed overflow is undefined,
14714 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14715 *STRICT_OVERFLOW_P. */
14716
14717 bool
14718 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14719 tree op1, bool *strict_overflow_p)
14720 {
14721 if (TYPE_UNSIGNED (type))
14722 return true;
14723
14724 switch (code)
14725 {
14726 case POINTER_PLUS_EXPR:
14727 case PLUS_EXPR:
14728 if (FLOAT_TYPE_P (type))
14729 return (tree_expr_nonnegative_warnv_p (op0,
14730 strict_overflow_p)
14731 && tree_expr_nonnegative_warnv_p (op1,
14732 strict_overflow_p));
14733
14734 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14735 both unsigned and at least 2 bits shorter than the result. */
14736 if (TREE_CODE (type) == INTEGER_TYPE
14737 && TREE_CODE (op0) == NOP_EXPR
14738 && TREE_CODE (op1) == NOP_EXPR)
14739 {
14740 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14741 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14742 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14743 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14744 {
14745 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14746 TYPE_PRECISION (inner2)) + 1;
14747 return prec < TYPE_PRECISION (type);
14748 }
14749 }
14750 break;
14751
14752 case MULT_EXPR:
14753 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14754 {
14755 /* x * x is always non-negative for floating point x
14756 or without overflow. */
14757 if (operand_equal_p (op0, op1, 0)
14758 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14759 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14760 {
14761 if (ANY_INTEGRAL_TYPE_P (type)
14762 && TYPE_OVERFLOW_UNDEFINED (type))
14763 *strict_overflow_p = true;
14764 return true;
14765 }
14766 }
14767
14768 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14769 both unsigned and their total bits is shorter than the result. */
14770 if (TREE_CODE (type) == INTEGER_TYPE
14771 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14772 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14773 {
14774 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14775 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14776 : TREE_TYPE (op0);
14777 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14778 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14779 : TREE_TYPE (op1);
14780
14781 bool unsigned0 = TYPE_UNSIGNED (inner0);
14782 bool unsigned1 = TYPE_UNSIGNED (inner1);
14783
14784 if (TREE_CODE (op0) == INTEGER_CST)
14785 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14786
14787 if (TREE_CODE (op1) == INTEGER_CST)
14788 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14789
14790 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14791 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14792 {
14793 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14794 ? tree_int_cst_min_precision (op0, UNSIGNED)
14795 : TYPE_PRECISION (inner0);
14796
14797 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14798 ? tree_int_cst_min_precision (op1, UNSIGNED)
14799 : TYPE_PRECISION (inner1);
14800
14801 return precision0 + precision1 < TYPE_PRECISION (type);
14802 }
14803 }
14804 return false;
14805
14806 case BIT_AND_EXPR:
14807 case MAX_EXPR:
14808 return (tree_expr_nonnegative_warnv_p (op0,
14809 strict_overflow_p)
14810 || tree_expr_nonnegative_warnv_p (op1,
14811 strict_overflow_p));
14812
14813 case BIT_IOR_EXPR:
14814 case BIT_XOR_EXPR:
14815 case MIN_EXPR:
14816 case RDIV_EXPR:
14817 case TRUNC_DIV_EXPR:
14818 case CEIL_DIV_EXPR:
14819 case FLOOR_DIV_EXPR:
14820 case ROUND_DIV_EXPR:
14821 return (tree_expr_nonnegative_warnv_p (op0,
14822 strict_overflow_p)
14823 && tree_expr_nonnegative_warnv_p (op1,
14824 strict_overflow_p));
14825
14826 case TRUNC_MOD_EXPR:
14827 case CEIL_MOD_EXPR:
14828 case FLOOR_MOD_EXPR:
14829 case ROUND_MOD_EXPR:
14830 return tree_expr_nonnegative_warnv_p (op0,
14831 strict_overflow_p);
14832 default:
14833 return tree_simple_nonnegative_warnv_p (code, type);
14834 }
14835
14836 /* We don't know sign of `t', so be conservative and return false. */
14837 return false;
14838 }
14839
14840 /* Return true if T is known to be non-negative. If the return
14841 value is based on the assumption that signed overflow is undefined,
14842 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14843 *STRICT_OVERFLOW_P. */
14844
14845 bool
14846 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14847 {
14848 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14849 return true;
14850
14851 switch (TREE_CODE (t))
14852 {
14853 case INTEGER_CST:
14854 return tree_int_cst_sgn (t) >= 0;
14855
14856 case REAL_CST:
14857 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14858
14859 case FIXED_CST:
14860 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14861
14862 case COND_EXPR:
14863 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14864 strict_overflow_p)
14865 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14866 strict_overflow_p));
14867 default:
14868 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14869 TREE_TYPE (t));
14870 }
14871 /* We don't know sign of `t', so be conservative and return false. */
14872 return false;
14873 }
14874
14875 /* Return true if T is known to be non-negative. If the return
14876 value is based on the assumption that signed overflow is undefined,
14877 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14878 *STRICT_OVERFLOW_P. */
14879
14880 bool
14881 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14882 tree arg0, tree arg1, bool *strict_overflow_p)
14883 {
14884 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14885 switch (DECL_FUNCTION_CODE (fndecl))
14886 {
14887 CASE_FLT_FN (BUILT_IN_ACOS):
14888 CASE_FLT_FN (BUILT_IN_ACOSH):
14889 CASE_FLT_FN (BUILT_IN_CABS):
14890 CASE_FLT_FN (BUILT_IN_COSH):
14891 CASE_FLT_FN (BUILT_IN_ERFC):
14892 CASE_FLT_FN (BUILT_IN_EXP):
14893 CASE_FLT_FN (BUILT_IN_EXP10):
14894 CASE_FLT_FN (BUILT_IN_EXP2):
14895 CASE_FLT_FN (BUILT_IN_FABS):
14896 CASE_FLT_FN (BUILT_IN_FDIM):
14897 CASE_FLT_FN (BUILT_IN_HYPOT):
14898 CASE_FLT_FN (BUILT_IN_POW10):
14899 CASE_INT_FN (BUILT_IN_FFS):
14900 CASE_INT_FN (BUILT_IN_PARITY):
14901 CASE_INT_FN (BUILT_IN_POPCOUNT):
14902 CASE_INT_FN (BUILT_IN_CLZ):
14903 CASE_INT_FN (BUILT_IN_CLRSB):
14904 case BUILT_IN_BSWAP32:
14905 case BUILT_IN_BSWAP64:
14906 /* Always true. */
14907 return true;
14908
14909 CASE_FLT_FN (BUILT_IN_SQRT):
14910 /* sqrt(-0.0) is -0.0. */
14911 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14912 return true;
14913 return tree_expr_nonnegative_warnv_p (arg0,
14914 strict_overflow_p);
14915
14916 CASE_FLT_FN (BUILT_IN_ASINH):
14917 CASE_FLT_FN (BUILT_IN_ATAN):
14918 CASE_FLT_FN (BUILT_IN_ATANH):
14919 CASE_FLT_FN (BUILT_IN_CBRT):
14920 CASE_FLT_FN (BUILT_IN_CEIL):
14921 CASE_FLT_FN (BUILT_IN_ERF):
14922 CASE_FLT_FN (BUILT_IN_EXPM1):
14923 CASE_FLT_FN (BUILT_IN_FLOOR):
14924 CASE_FLT_FN (BUILT_IN_FMOD):
14925 CASE_FLT_FN (BUILT_IN_FREXP):
14926 CASE_FLT_FN (BUILT_IN_ICEIL):
14927 CASE_FLT_FN (BUILT_IN_IFLOOR):
14928 CASE_FLT_FN (BUILT_IN_IRINT):
14929 CASE_FLT_FN (BUILT_IN_IROUND):
14930 CASE_FLT_FN (BUILT_IN_LCEIL):
14931 CASE_FLT_FN (BUILT_IN_LDEXP):
14932 CASE_FLT_FN (BUILT_IN_LFLOOR):
14933 CASE_FLT_FN (BUILT_IN_LLCEIL):
14934 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14935 CASE_FLT_FN (BUILT_IN_LLRINT):
14936 CASE_FLT_FN (BUILT_IN_LLROUND):
14937 CASE_FLT_FN (BUILT_IN_LRINT):
14938 CASE_FLT_FN (BUILT_IN_LROUND):
14939 CASE_FLT_FN (BUILT_IN_MODF):
14940 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14941 CASE_FLT_FN (BUILT_IN_RINT):
14942 CASE_FLT_FN (BUILT_IN_ROUND):
14943 CASE_FLT_FN (BUILT_IN_SCALB):
14944 CASE_FLT_FN (BUILT_IN_SCALBLN):
14945 CASE_FLT_FN (BUILT_IN_SCALBN):
14946 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14947 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14948 CASE_FLT_FN (BUILT_IN_SINH):
14949 CASE_FLT_FN (BUILT_IN_TANH):
14950 CASE_FLT_FN (BUILT_IN_TRUNC):
14951 /* True if the 1st argument is nonnegative. */
14952 return tree_expr_nonnegative_warnv_p (arg0,
14953 strict_overflow_p);
14954
14955 CASE_FLT_FN (BUILT_IN_FMAX):
14956 /* True if the 1st OR 2nd arguments are nonnegative. */
14957 return (tree_expr_nonnegative_warnv_p (arg0,
14958 strict_overflow_p)
14959 || (tree_expr_nonnegative_warnv_p (arg1,
14960 strict_overflow_p)));
14961
14962 CASE_FLT_FN (BUILT_IN_FMIN):
14963 /* True if the 1st AND 2nd arguments are nonnegative. */
14964 return (tree_expr_nonnegative_warnv_p (arg0,
14965 strict_overflow_p)
14966 && (tree_expr_nonnegative_warnv_p (arg1,
14967 strict_overflow_p)));
14968
14969 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14970 /* True if the 2nd argument is nonnegative. */
14971 return tree_expr_nonnegative_warnv_p (arg1,
14972 strict_overflow_p);
14973
14974 CASE_FLT_FN (BUILT_IN_POWI):
14975 /* True if the 1st argument is nonnegative or the second
14976 argument is an even integer. */
14977 if (TREE_CODE (arg1) == INTEGER_CST
14978 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14979 return true;
14980 return tree_expr_nonnegative_warnv_p (arg0,
14981 strict_overflow_p);
14982
14983 CASE_FLT_FN (BUILT_IN_POW):
14984 /* True if the 1st argument is nonnegative or the second
14985 argument is an even integer valued real. */
14986 if (TREE_CODE (arg1) == REAL_CST)
14987 {
14988 REAL_VALUE_TYPE c;
14989 HOST_WIDE_INT n;
14990
14991 c = TREE_REAL_CST (arg1);
14992 n = real_to_integer (&c);
14993 if ((n & 1) == 0)
14994 {
14995 REAL_VALUE_TYPE cint;
14996 real_from_integer (&cint, VOIDmode, n, SIGNED);
14997 if (real_identical (&c, &cint))
14998 return true;
14999 }
15000 }
15001 return tree_expr_nonnegative_warnv_p (arg0,
15002 strict_overflow_p);
15003
15004 default:
15005 break;
15006 }
15007 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15008 type);
15009 }
15010
15011 /* Return true if T is known to be non-negative. If the return
15012 value is based on the assumption that signed overflow is undefined,
15013 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15014 *STRICT_OVERFLOW_P. */
15015
15016 static bool
15017 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15018 {
15019 enum tree_code code = TREE_CODE (t);
15020 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15021 return true;
15022
15023 switch (code)
15024 {
15025 case TARGET_EXPR:
15026 {
15027 tree temp = TARGET_EXPR_SLOT (t);
15028 t = TARGET_EXPR_INITIAL (t);
15029
15030 /* If the initializer is non-void, then it's a normal expression
15031 that will be assigned to the slot. */
15032 if (!VOID_TYPE_P (t))
15033 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15034
15035 /* Otherwise, the initializer sets the slot in some way. One common
15036 way is an assignment statement at the end of the initializer. */
15037 while (1)
15038 {
15039 if (TREE_CODE (t) == BIND_EXPR)
15040 t = expr_last (BIND_EXPR_BODY (t));
15041 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15042 || TREE_CODE (t) == TRY_CATCH_EXPR)
15043 t = expr_last (TREE_OPERAND (t, 0));
15044 else if (TREE_CODE (t) == STATEMENT_LIST)
15045 t = expr_last (t);
15046 else
15047 break;
15048 }
15049 if (TREE_CODE (t) == MODIFY_EXPR
15050 && TREE_OPERAND (t, 0) == temp)
15051 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15052 strict_overflow_p);
15053
15054 return false;
15055 }
15056
15057 case CALL_EXPR:
15058 {
15059 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15060 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15061
15062 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15063 get_callee_fndecl (t),
15064 arg0,
15065 arg1,
15066 strict_overflow_p);
15067 }
15068 case COMPOUND_EXPR:
15069 case MODIFY_EXPR:
15070 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15071 strict_overflow_p);
15072 case BIND_EXPR:
15073 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15074 strict_overflow_p);
15075 case SAVE_EXPR:
15076 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15077 strict_overflow_p);
15078
15079 default:
15080 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15081 TREE_TYPE (t));
15082 }
15083
15084 /* We don't know sign of `t', so be conservative and return false. */
15085 return false;
15086 }
15087
15088 /* Return true if T is known to be non-negative. If the return
15089 value is based on the assumption that signed overflow is undefined,
15090 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15091 *STRICT_OVERFLOW_P. */
15092
15093 bool
15094 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15095 {
15096 enum tree_code code;
15097 if (t == error_mark_node)
15098 return false;
15099
15100 code = TREE_CODE (t);
15101 switch (TREE_CODE_CLASS (code))
15102 {
15103 case tcc_binary:
15104 case tcc_comparison:
15105 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15106 TREE_TYPE (t),
15107 TREE_OPERAND (t, 0),
15108 TREE_OPERAND (t, 1),
15109 strict_overflow_p);
15110
15111 case tcc_unary:
15112 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15113 TREE_TYPE (t),
15114 TREE_OPERAND (t, 0),
15115 strict_overflow_p);
15116
15117 case tcc_constant:
15118 case tcc_declaration:
15119 case tcc_reference:
15120 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15121
15122 default:
15123 break;
15124 }
15125
15126 switch (code)
15127 {
15128 case TRUTH_AND_EXPR:
15129 case TRUTH_OR_EXPR:
15130 case TRUTH_XOR_EXPR:
15131 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15132 TREE_TYPE (t),
15133 TREE_OPERAND (t, 0),
15134 TREE_OPERAND (t, 1),
15135 strict_overflow_p);
15136 case TRUTH_NOT_EXPR:
15137 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15138 TREE_TYPE (t),
15139 TREE_OPERAND (t, 0),
15140 strict_overflow_p);
15141
15142 case COND_EXPR:
15143 case CONSTRUCTOR:
15144 case OBJ_TYPE_REF:
15145 case ASSERT_EXPR:
15146 case ADDR_EXPR:
15147 case WITH_SIZE_EXPR:
15148 case SSA_NAME:
15149 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15150
15151 default:
15152 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15153 }
15154 }
15155
15156 /* Return true if `t' is known to be non-negative. Handle warnings
15157 about undefined signed overflow. */
15158
15159 bool
15160 tree_expr_nonnegative_p (tree t)
15161 {
15162 bool ret, strict_overflow_p;
15163
15164 strict_overflow_p = false;
15165 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15166 if (strict_overflow_p)
15167 fold_overflow_warning (("assuming signed overflow does not occur when "
15168 "determining that expression is always "
15169 "non-negative"),
15170 WARN_STRICT_OVERFLOW_MISC);
15171 return ret;
15172 }
15173
15174
15175 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15176 For floating point we further ensure that T is not denormal.
15177 Similar logic is present in nonzero_address in rtlanal.h.
15178
15179 If the return value is based on the assumption that signed overflow
15180 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15181 change *STRICT_OVERFLOW_P. */
15182
15183 bool
15184 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15185 bool *strict_overflow_p)
15186 {
15187 switch (code)
15188 {
15189 case ABS_EXPR:
15190 return tree_expr_nonzero_warnv_p (op0,
15191 strict_overflow_p);
15192
15193 case NOP_EXPR:
15194 {
15195 tree inner_type = TREE_TYPE (op0);
15196 tree outer_type = type;
15197
15198 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15199 && tree_expr_nonzero_warnv_p (op0,
15200 strict_overflow_p));
15201 }
15202 break;
15203
15204 case NON_LVALUE_EXPR:
15205 return tree_expr_nonzero_warnv_p (op0,
15206 strict_overflow_p);
15207
15208 default:
15209 break;
15210 }
15211
15212 return false;
15213 }
15214
15215 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15216 For floating point we further ensure that T is not denormal.
15217 Similar logic is present in nonzero_address in rtlanal.h.
15218
15219 If the return value is based on the assumption that signed overflow
15220 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15221 change *STRICT_OVERFLOW_P. */
15222
15223 bool
15224 tree_binary_nonzero_warnv_p (enum tree_code code,
15225 tree type,
15226 tree op0,
15227 tree op1, bool *strict_overflow_p)
15228 {
15229 bool sub_strict_overflow_p;
15230 switch (code)
15231 {
15232 case POINTER_PLUS_EXPR:
15233 case PLUS_EXPR:
15234 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15235 {
15236 /* With the presence of negative values it is hard
15237 to say something. */
15238 sub_strict_overflow_p = false;
15239 if (!tree_expr_nonnegative_warnv_p (op0,
15240 &sub_strict_overflow_p)
15241 || !tree_expr_nonnegative_warnv_p (op1,
15242 &sub_strict_overflow_p))
15243 return false;
15244 /* One of operands must be positive and the other non-negative. */
15245 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15246 overflows, on a twos-complement machine the sum of two
15247 nonnegative numbers can never be zero. */
15248 return (tree_expr_nonzero_warnv_p (op0,
15249 strict_overflow_p)
15250 || tree_expr_nonzero_warnv_p (op1,
15251 strict_overflow_p));
15252 }
15253 break;
15254
15255 case MULT_EXPR:
15256 if (TYPE_OVERFLOW_UNDEFINED (type))
15257 {
15258 if (tree_expr_nonzero_warnv_p (op0,
15259 strict_overflow_p)
15260 && tree_expr_nonzero_warnv_p (op1,
15261 strict_overflow_p))
15262 {
15263 *strict_overflow_p = true;
15264 return true;
15265 }
15266 }
15267 break;
15268
15269 case MIN_EXPR:
15270 sub_strict_overflow_p = false;
15271 if (tree_expr_nonzero_warnv_p (op0,
15272 &sub_strict_overflow_p)
15273 && tree_expr_nonzero_warnv_p (op1,
15274 &sub_strict_overflow_p))
15275 {
15276 if (sub_strict_overflow_p)
15277 *strict_overflow_p = true;
15278 }
15279 break;
15280
15281 case MAX_EXPR:
15282 sub_strict_overflow_p = false;
15283 if (tree_expr_nonzero_warnv_p (op0,
15284 &sub_strict_overflow_p))
15285 {
15286 if (sub_strict_overflow_p)
15287 *strict_overflow_p = true;
15288
15289 /* When both operands are nonzero, then MAX must be too. */
15290 if (tree_expr_nonzero_warnv_p (op1,
15291 strict_overflow_p))
15292 return true;
15293
15294 /* MAX where operand 0 is positive is positive. */
15295 return tree_expr_nonnegative_warnv_p (op0,
15296 strict_overflow_p);
15297 }
15298 /* MAX where operand 1 is positive is positive. */
15299 else if (tree_expr_nonzero_warnv_p (op1,
15300 &sub_strict_overflow_p)
15301 && tree_expr_nonnegative_warnv_p (op1,
15302 &sub_strict_overflow_p))
15303 {
15304 if (sub_strict_overflow_p)
15305 *strict_overflow_p = true;
15306 return true;
15307 }
15308 break;
15309
15310 case BIT_IOR_EXPR:
15311 return (tree_expr_nonzero_warnv_p (op1,
15312 strict_overflow_p)
15313 || tree_expr_nonzero_warnv_p (op0,
15314 strict_overflow_p));
15315
15316 default:
15317 break;
15318 }
15319
15320 return false;
15321 }
15322
15323 /* Return true when T is an address and is known to be nonzero.
15324 For floating point we further ensure that T is not denormal.
15325 Similar logic is present in nonzero_address in rtlanal.h.
15326
15327 If the return value is based on the assumption that signed overflow
15328 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15329 change *STRICT_OVERFLOW_P. */
15330
15331 bool
15332 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15333 {
15334 bool sub_strict_overflow_p;
15335 switch (TREE_CODE (t))
15336 {
15337 case INTEGER_CST:
15338 return !integer_zerop (t);
15339
15340 case ADDR_EXPR:
15341 {
15342 tree base = TREE_OPERAND (t, 0);
15343
15344 if (!DECL_P (base))
15345 base = get_base_address (base);
15346
15347 if (!base)
15348 return false;
15349
15350 /* For objects in symbol table check if we know they are non-zero.
15351 Don't do anything for variables and functions before symtab is built;
15352 it is quite possible that they will be declared weak later. */
15353 if (DECL_P (base) && decl_in_symtab_p (base))
15354 {
15355 struct symtab_node *symbol;
15356
15357 symbol = symtab_node::get_create (base);
15358 if (symbol)
15359 return symbol->nonzero_address ();
15360 else
15361 return false;
15362 }
15363
15364 /* Function local objects are never NULL. */
15365 if (DECL_P (base)
15366 && (DECL_CONTEXT (base)
15367 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15368 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15369 return true;
15370
15371 /* Constants are never weak. */
15372 if (CONSTANT_CLASS_P (base))
15373 return true;
15374
15375 return false;
15376 }
15377
15378 case COND_EXPR:
15379 sub_strict_overflow_p = false;
15380 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15381 &sub_strict_overflow_p)
15382 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15383 &sub_strict_overflow_p))
15384 {
15385 if (sub_strict_overflow_p)
15386 *strict_overflow_p = true;
15387 return true;
15388 }
15389 break;
15390
15391 default:
15392 break;
15393 }
15394 return false;
15395 }
15396
15397 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15398 attempt to fold the expression to a constant without modifying TYPE,
15399 OP0 or OP1.
15400
15401 If the expression could be simplified to a constant, then return
15402 the constant. If the expression would not be simplified to a
15403 constant, then return NULL_TREE. */
15404
15405 tree
15406 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15407 {
15408 tree tem = fold_binary (code, type, op0, op1);
15409 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15410 }
15411
15412 /* Given the components of a unary expression CODE, TYPE and OP0,
15413 attempt to fold the expression to a constant without modifying
15414 TYPE or OP0.
15415
15416 If the expression could be simplified to a constant, then return
15417 the constant. If the expression would not be simplified to a
15418 constant, then return NULL_TREE. */
15419
15420 tree
15421 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15422 {
15423 tree tem = fold_unary (code, type, op0);
15424 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15425 }
15426
15427 /* If EXP represents referencing an element in a constant string
15428 (either via pointer arithmetic or array indexing), return the
15429 tree representing the value accessed, otherwise return NULL. */
15430
15431 tree
15432 fold_read_from_constant_string (tree exp)
15433 {
15434 if ((TREE_CODE (exp) == INDIRECT_REF
15435 || TREE_CODE (exp) == ARRAY_REF)
15436 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15437 {
15438 tree exp1 = TREE_OPERAND (exp, 0);
15439 tree index;
15440 tree string;
15441 location_t loc = EXPR_LOCATION (exp);
15442
15443 if (TREE_CODE (exp) == INDIRECT_REF)
15444 string = string_constant (exp1, &index);
15445 else
15446 {
15447 tree low_bound = array_ref_low_bound (exp);
15448 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15449
15450 /* Optimize the special-case of a zero lower bound.
15451
15452 We convert the low_bound to sizetype to avoid some problems
15453 with constant folding. (E.g. suppose the lower bound is 1,
15454 and its mode is QI. Without the conversion,l (ARRAY
15455 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15456 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15457 if (! integer_zerop (low_bound))
15458 index = size_diffop_loc (loc, index,
15459 fold_convert_loc (loc, sizetype, low_bound));
15460
15461 string = exp1;
15462 }
15463
15464 if (string
15465 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15466 && TREE_CODE (string) == STRING_CST
15467 && TREE_CODE (index) == INTEGER_CST
15468 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15469 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15470 == MODE_INT)
15471 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15472 return build_int_cst_type (TREE_TYPE (exp),
15473 (TREE_STRING_POINTER (string)
15474 [TREE_INT_CST_LOW (index)]));
15475 }
15476 return NULL;
15477 }
15478
15479 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15480 an integer constant, real, or fixed-point constant.
15481
15482 TYPE is the type of the result. */
15483
15484 static tree
15485 fold_negate_const (tree arg0, tree type)
15486 {
15487 tree t = NULL_TREE;
15488
15489 switch (TREE_CODE (arg0))
15490 {
15491 case INTEGER_CST:
15492 {
15493 bool overflow;
15494 wide_int val = wi::neg (arg0, &overflow);
15495 t = force_fit_type (type, val, 1,
15496 (overflow | TREE_OVERFLOW (arg0))
15497 && !TYPE_UNSIGNED (type));
15498 break;
15499 }
15500
15501 case REAL_CST:
15502 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15503 break;
15504
15505 case FIXED_CST:
15506 {
15507 FIXED_VALUE_TYPE f;
15508 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15509 &(TREE_FIXED_CST (arg0)), NULL,
15510 TYPE_SATURATING (type));
15511 t = build_fixed (type, f);
15512 /* Propagate overflow flags. */
15513 if (overflow_p | TREE_OVERFLOW (arg0))
15514 TREE_OVERFLOW (t) = 1;
15515 break;
15516 }
15517
15518 default:
15519 gcc_unreachable ();
15520 }
15521
15522 return t;
15523 }
15524
15525 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15526 an integer constant or real constant.
15527
15528 TYPE is the type of the result. */
15529
15530 tree
15531 fold_abs_const (tree arg0, tree type)
15532 {
15533 tree t = NULL_TREE;
15534
15535 switch (TREE_CODE (arg0))
15536 {
15537 case INTEGER_CST:
15538 {
15539 /* If the value is unsigned or non-negative, then the absolute value
15540 is the same as the ordinary value. */
15541 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15542 t = arg0;
15543
15544 /* If the value is negative, then the absolute value is
15545 its negation. */
15546 else
15547 {
15548 bool overflow;
15549 wide_int val = wi::neg (arg0, &overflow);
15550 t = force_fit_type (type, val, -1,
15551 overflow | TREE_OVERFLOW (arg0));
15552 }
15553 }
15554 break;
15555
15556 case REAL_CST:
15557 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15558 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15559 else
15560 t = arg0;
15561 break;
15562
15563 default:
15564 gcc_unreachable ();
15565 }
15566
15567 return t;
15568 }
15569
15570 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15571 constant. TYPE is the type of the result. */
15572
15573 static tree
15574 fold_not_const (const_tree arg0, tree type)
15575 {
15576 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15577
15578 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15579 }
15580
15581 /* Given CODE, a relational operator, the target type, TYPE and two
15582 constant operands OP0 and OP1, return the result of the
15583 relational operation. If the result is not a compile time
15584 constant, then return NULL_TREE. */
15585
15586 static tree
15587 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15588 {
15589 int result, invert;
15590
15591 /* From here on, the only cases we handle are when the result is
15592 known to be a constant. */
15593
15594 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15595 {
15596 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15597 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15598
15599 /* Handle the cases where either operand is a NaN. */
15600 if (real_isnan (c0) || real_isnan (c1))
15601 {
15602 switch (code)
15603 {
15604 case EQ_EXPR:
15605 case ORDERED_EXPR:
15606 result = 0;
15607 break;
15608
15609 case NE_EXPR:
15610 case UNORDERED_EXPR:
15611 case UNLT_EXPR:
15612 case UNLE_EXPR:
15613 case UNGT_EXPR:
15614 case UNGE_EXPR:
15615 case UNEQ_EXPR:
15616 result = 1;
15617 break;
15618
15619 case LT_EXPR:
15620 case LE_EXPR:
15621 case GT_EXPR:
15622 case GE_EXPR:
15623 case LTGT_EXPR:
15624 if (flag_trapping_math)
15625 return NULL_TREE;
15626 result = 0;
15627 break;
15628
15629 default:
15630 gcc_unreachable ();
15631 }
15632
15633 return constant_boolean_node (result, type);
15634 }
15635
15636 return constant_boolean_node (real_compare (code, c0, c1), type);
15637 }
15638
15639 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15640 {
15641 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15642 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15643 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15644 }
15645
15646 /* Handle equality/inequality of complex constants. */
15647 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15648 {
15649 tree rcond = fold_relational_const (code, type,
15650 TREE_REALPART (op0),
15651 TREE_REALPART (op1));
15652 tree icond = fold_relational_const (code, type,
15653 TREE_IMAGPART (op0),
15654 TREE_IMAGPART (op1));
15655 if (code == EQ_EXPR)
15656 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15657 else if (code == NE_EXPR)
15658 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15659 else
15660 return NULL_TREE;
15661 }
15662
15663 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15664 {
15665 unsigned count = VECTOR_CST_NELTS (op0);
15666 tree *elts = XALLOCAVEC (tree, count);
15667 gcc_assert (VECTOR_CST_NELTS (op1) == count
15668 && TYPE_VECTOR_SUBPARTS (type) == count);
15669
15670 for (unsigned i = 0; i < count; i++)
15671 {
15672 tree elem_type = TREE_TYPE (type);
15673 tree elem0 = VECTOR_CST_ELT (op0, i);
15674 tree elem1 = VECTOR_CST_ELT (op1, i);
15675
15676 tree tem = fold_relational_const (code, elem_type,
15677 elem0, elem1);
15678
15679 if (tem == NULL_TREE)
15680 return NULL_TREE;
15681
15682 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15683 }
15684
15685 return build_vector (type, elts);
15686 }
15687
15688 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15689
15690 To compute GT, swap the arguments and do LT.
15691 To compute GE, do LT and invert the result.
15692 To compute LE, swap the arguments, do LT and invert the result.
15693 To compute NE, do EQ and invert the result.
15694
15695 Therefore, the code below must handle only EQ and LT. */
15696
15697 if (code == LE_EXPR || code == GT_EXPR)
15698 {
15699 tree tem = op0;
15700 op0 = op1;
15701 op1 = tem;
15702 code = swap_tree_comparison (code);
15703 }
15704
15705 /* Note that it is safe to invert for real values here because we
15706 have already handled the one case that it matters. */
15707
15708 invert = 0;
15709 if (code == NE_EXPR || code == GE_EXPR)
15710 {
15711 invert = 1;
15712 code = invert_tree_comparison (code, false);
15713 }
15714
15715 /* Compute a result for LT or EQ if args permit;
15716 Otherwise return T. */
15717 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15718 {
15719 if (code == EQ_EXPR)
15720 result = tree_int_cst_equal (op0, op1);
15721 else
15722 result = tree_int_cst_lt (op0, op1);
15723 }
15724 else
15725 return NULL_TREE;
15726
15727 if (invert)
15728 result ^= 1;
15729 return constant_boolean_node (result, type);
15730 }
15731
15732 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15733 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15734 itself. */
15735
15736 tree
15737 fold_build_cleanup_point_expr (tree type, tree expr)
15738 {
15739 /* If the expression does not have side effects then we don't have to wrap
15740 it with a cleanup point expression. */
15741 if (!TREE_SIDE_EFFECTS (expr))
15742 return expr;
15743
15744 /* If the expression is a return, check to see if the expression inside the
15745 return has no side effects or the right hand side of the modify expression
15746 inside the return. If either don't have side effects set we don't need to
15747 wrap the expression in a cleanup point expression. Note we don't check the
15748 left hand side of the modify because it should always be a return decl. */
15749 if (TREE_CODE (expr) == RETURN_EXPR)
15750 {
15751 tree op = TREE_OPERAND (expr, 0);
15752 if (!op || !TREE_SIDE_EFFECTS (op))
15753 return expr;
15754 op = TREE_OPERAND (op, 1);
15755 if (!TREE_SIDE_EFFECTS (op))
15756 return expr;
15757 }
15758
15759 return build1 (CLEANUP_POINT_EXPR, type, expr);
15760 }
15761
15762 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15763 of an indirection through OP0, or NULL_TREE if no simplification is
15764 possible. */
15765
15766 tree
15767 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15768 {
15769 tree sub = op0;
15770 tree subtype;
15771
15772 STRIP_NOPS (sub);
15773 subtype = TREE_TYPE (sub);
15774 if (!POINTER_TYPE_P (subtype))
15775 return NULL_TREE;
15776
15777 if (TREE_CODE (sub) == ADDR_EXPR)
15778 {
15779 tree op = TREE_OPERAND (sub, 0);
15780 tree optype = TREE_TYPE (op);
15781 /* *&CONST_DECL -> to the value of the const decl. */
15782 if (TREE_CODE (op) == CONST_DECL)
15783 return DECL_INITIAL (op);
15784 /* *&p => p; make sure to handle *&"str"[cst] here. */
15785 if (type == optype)
15786 {
15787 tree fop = fold_read_from_constant_string (op);
15788 if (fop)
15789 return fop;
15790 else
15791 return op;
15792 }
15793 /* *(foo *)&fooarray => fooarray[0] */
15794 else if (TREE_CODE (optype) == ARRAY_TYPE
15795 && type == TREE_TYPE (optype)
15796 && (!in_gimple_form
15797 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15798 {
15799 tree type_domain = TYPE_DOMAIN (optype);
15800 tree min_val = size_zero_node;
15801 if (type_domain && TYPE_MIN_VALUE (type_domain))
15802 min_val = TYPE_MIN_VALUE (type_domain);
15803 if (in_gimple_form
15804 && TREE_CODE (min_val) != INTEGER_CST)
15805 return NULL_TREE;
15806 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15807 NULL_TREE, NULL_TREE);
15808 }
15809 /* *(foo *)&complexfoo => __real__ complexfoo */
15810 else if (TREE_CODE (optype) == COMPLEX_TYPE
15811 && type == TREE_TYPE (optype))
15812 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15813 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15814 else if (TREE_CODE (optype) == VECTOR_TYPE
15815 && type == TREE_TYPE (optype))
15816 {
15817 tree part_width = TYPE_SIZE (type);
15818 tree index = bitsize_int (0);
15819 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15820 }
15821 }
15822
15823 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15824 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15825 {
15826 tree op00 = TREE_OPERAND (sub, 0);
15827 tree op01 = TREE_OPERAND (sub, 1);
15828
15829 STRIP_NOPS (op00);
15830 if (TREE_CODE (op00) == ADDR_EXPR)
15831 {
15832 tree op00type;
15833 op00 = TREE_OPERAND (op00, 0);
15834 op00type = TREE_TYPE (op00);
15835
15836 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15837 if (TREE_CODE (op00type) == VECTOR_TYPE
15838 && type == TREE_TYPE (op00type))
15839 {
15840 HOST_WIDE_INT offset = tree_to_shwi (op01);
15841 tree part_width = TYPE_SIZE (type);
15842 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15843 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15844 tree index = bitsize_int (indexi);
15845
15846 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15847 return fold_build3_loc (loc,
15848 BIT_FIELD_REF, type, op00,
15849 part_width, index);
15850
15851 }
15852 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15853 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15854 && type == TREE_TYPE (op00type))
15855 {
15856 tree size = TYPE_SIZE_UNIT (type);
15857 if (tree_int_cst_equal (size, op01))
15858 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15859 }
15860 /* ((foo *)&fooarray)[1] => fooarray[1] */
15861 else if (TREE_CODE (op00type) == ARRAY_TYPE
15862 && type == TREE_TYPE (op00type))
15863 {
15864 tree type_domain = TYPE_DOMAIN (op00type);
15865 tree min_val = size_zero_node;
15866 if (type_domain && TYPE_MIN_VALUE (type_domain))
15867 min_val = TYPE_MIN_VALUE (type_domain);
15868 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15869 TYPE_SIZE_UNIT (type));
15870 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15871 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15872 NULL_TREE, NULL_TREE);
15873 }
15874 }
15875 }
15876
15877 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15878 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15879 && type == TREE_TYPE (TREE_TYPE (subtype))
15880 && (!in_gimple_form
15881 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15882 {
15883 tree type_domain;
15884 tree min_val = size_zero_node;
15885 sub = build_fold_indirect_ref_loc (loc, sub);
15886 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15887 if (type_domain && TYPE_MIN_VALUE (type_domain))
15888 min_val = TYPE_MIN_VALUE (type_domain);
15889 if (in_gimple_form
15890 && TREE_CODE (min_val) != INTEGER_CST)
15891 return NULL_TREE;
15892 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15893 NULL_TREE);
15894 }
15895
15896 return NULL_TREE;
15897 }
15898
15899 /* Builds an expression for an indirection through T, simplifying some
15900 cases. */
15901
15902 tree
15903 build_fold_indirect_ref_loc (location_t loc, tree t)
15904 {
15905 tree type = TREE_TYPE (TREE_TYPE (t));
15906 tree sub = fold_indirect_ref_1 (loc, type, t);
15907
15908 if (sub)
15909 return sub;
15910
15911 return build1_loc (loc, INDIRECT_REF, type, t);
15912 }
15913
15914 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15915
15916 tree
15917 fold_indirect_ref_loc (location_t loc, tree t)
15918 {
15919 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15920
15921 if (sub)
15922 return sub;
15923 else
15924 return t;
15925 }
15926
15927 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15928 whose result is ignored. The type of the returned tree need not be
15929 the same as the original expression. */
15930
15931 tree
15932 fold_ignored_result (tree t)
15933 {
15934 if (!TREE_SIDE_EFFECTS (t))
15935 return integer_zero_node;
15936
15937 for (;;)
15938 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15939 {
15940 case tcc_unary:
15941 t = TREE_OPERAND (t, 0);
15942 break;
15943
15944 case tcc_binary:
15945 case tcc_comparison:
15946 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15947 t = TREE_OPERAND (t, 0);
15948 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15949 t = TREE_OPERAND (t, 1);
15950 else
15951 return t;
15952 break;
15953
15954 case tcc_expression:
15955 switch (TREE_CODE (t))
15956 {
15957 case COMPOUND_EXPR:
15958 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15959 return t;
15960 t = TREE_OPERAND (t, 0);
15961 break;
15962
15963 case COND_EXPR:
15964 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15965 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15966 return t;
15967 t = TREE_OPERAND (t, 0);
15968 break;
15969
15970 default:
15971 return t;
15972 }
15973 break;
15974
15975 default:
15976 return t;
15977 }
15978 }
15979
15980 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15981
15982 tree
15983 round_up_loc (location_t loc, tree value, unsigned int divisor)
15984 {
15985 tree div = NULL_TREE;
15986
15987 if (divisor == 1)
15988 return value;
15989
15990 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15991 have to do anything. Only do this when we are not given a const,
15992 because in that case, this check is more expensive than just
15993 doing it. */
15994 if (TREE_CODE (value) != INTEGER_CST)
15995 {
15996 div = build_int_cst (TREE_TYPE (value), divisor);
15997
15998 if (multiple_of_p (TREE_TYPE (value), value, div))
15999 return value;
16000 }
16001
16002 /* If divisor is a power of two, simplify this to bit manipulation. */
16003 if (divisor == (divisor & -divisor))
16004 {
16005 if (TREE_CODE (value) == INTEGER_CST)
16006 {
16007 wide_int val = value;
16008 bool overflow_p;
16009
16010 if ((val & (divisor - 1)) == 0)
16011 return value;
16012
16013 overflow_p = TREE_OVERFLOW (value);
16014 val &= ~(divisor - 1);
16015 val += divisor;
16016 if (val == 0)
16017 overflow_p = true;
16018
16019 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16020 }
16021 else
16022 {
16023 tree t;
16024
16025 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16026 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16027 t = build_int_cst (TREE_TYPE (value), -divisor);
16028 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16029 }
16030 }
16031 else
16032 {
16033 if (!div)
16034 div = build_int_cst (TREE_TYPE (value), divisor);
16035 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16036 value = size_binop_loc (loc, MULT_EXPR, value, div);
16037 }
16038
16039 return value;
16040 }
16041
16042 /* Likewise, but round down. */
16043
16044 tree
16045 round_down_loc (location_t loc, tree value, int divisor)
16046 {
16047 tree div = NULL_TREE;
16048
16049 gcc_assert (divisor > 0);
16050 if (divisor == 1)
16051 return value;
16052
16053 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16054 have to do anything. Only do this when we are not given a const,
16055 because in that case, this check is more expensive than just
16056 doing it. */
16057 if (TREE_CODE (value) != INTEGER_CST)
16058 {
16059 div = build_int_cst (TREE_TYPE (value), divisor);
16060
16061 if (multiple_of_p (TREE_TYPE (value), value, div))
16062 return value;
16063 }
16064
16065 /* If divisor is a power of two, simplify this to bit manipulation. */
16066 if (divisor == (divisor & -divisor))
16067 {
16068 tree t;
16069
16070 t = build_int_cst (TREE_TYPE (value), -divisor);
16071 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16072 }
16073 else
16074 {
16075 if (!div)
16076 div = build_int_cst (TREE_TYPE (value), divisor);
16077 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16078 value = size_binop_loc (loc, MULT_EXPR, value, div);
16079 }
16080
16081 return value;
16082 }
16083
16084 /* Returns the pointer to the base of the object addressed by EXP and
16085 extracts the information about the offset of the access, storing it
16086 to PBITPOS and POFFSET. */
16087
16088 static tree
16089 split_address_to_core_and_offset (tree exp,
16090 HOST_WIDE_INT *pbitpos, tree *poffset)
16091 {
16092 tree core;
16093 machine_mode mode;
16094 int unsignedp, volatilep;
16095 HOST_WIDE_INT bitsize;
16096 location_t loc = EXPR_LOCATION (exp);
16097
16098 if (TREE_CODE (exp) == ADDR_EXPR)
16099 {
16100 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16101 poffset, &mode, &unsignedp, &volatilep,
16102 false);
16103 core = build_fold_addr_expr_loc (loc, core);
16104 }
16105 else
16106 {
16107 core = exp;
16108 *pbitpos = 0;
16109 *poffset = NULL_TREE;
16110 }
16111
16112 return core;
16113 }
16114
16115 /* Returns true if addresses of E1 and E2 differ by a constant, false
16116 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16117
16118 bool
16119 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16120 {
16121 tree core1, core2;
16122 HOST_WIDE_INT bitpos1, bitpos2;
16123 tree toffset1, toffset2, tdiff, type;
16124
16125 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16126 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16127
16128 if (bitpos1 % BITS_PER_UNIT != 0
16129 || bitpos2 % BITS_PER_UNIT != 0
16130 || !operand_equal_p (core1, core2, 0))
16131 return false;
16132
16133 if (toffset1 && toffset2)
16134 {
16135 type = TREE_TYPE (toffset1);
16136 if (type != TREE_TYPE (toffset2))
16137 toffset2 = fold_convert (type, toffset2);
16138
16139 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16140 if (!cst_and_fits_in_hwi (tdiff))
16141 return false;
16142
16143 *diff = int_cst_value (tdiff);
16144 }
16145 else if (toffset1 || toffset2)
16146 {
16147 /* If only one of the offsets is non-constant, the difference cannot
16148 be a constant. */
16149 return false;
16150 }
16151 else
16152 *diff = 0;
16153
16154 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16155 return true;
16156 }
16157
16158 /* Simplify the floating point expression EXP when the sign of the
16159 result is not significant. Return NULL_TREE if no simplification
16160 is possible. */
16161
16162 tree
16163 fold_strip_sign_ops (tree exp)
16164 {
16165 tree arg0, arg1;
16166 location_t loc = EXPR_LOCATION (exp);
16167
16168 switch (TREE_CODE (exp))
16169 {
16170 case ABS_EXPR:
16171 case NEGATE_EXPR:
16172 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16173 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16174
16175 case MULT_EXPR:
16176 case RDIV_EXPR:
16177 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16178 return NULL_TREE;
16179 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16180 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16181 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16182 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16183 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16184 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16185 break;
16186
16187 case COMPOUND_EXPR:
16188 arg0 = TREE_OPERAND (exp, 0);
16189 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16190 if (arg1)
16191 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16192 break;
16193
16194 case COND_EXPR:
16195 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16196 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16197 if (arg0 || arg1)
16198 return fold_build3_loc (loc,
16199 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16200 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16201 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16202 break;
16203
16204 case CALL_EXPR:
16205 {
16206 const enum built_in_function fcode = builtin_mathfn_code (exp);
16207 switch (fcode)
16208 {
16209 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16210 /* Strip copysign function call, return the 1st argument. */
16211 arg0 = CALL_EXPR_ARG (exp, 0);
16212 arg1 = CALL_EXPR_ARG (exp, 1);
16213 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16214
16215 default:
16216 /* Strip sign ops from the argument of "odd" math functions. */
16217 if (negate_mathfn_p (fcode))
16218 {
16219 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16220 if (arg0)
16221 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16222 }
16223 break;
16224 }
16225 }
16226 break;
16227
16228 default:
16229 break;
16230 }
16231 return NULL_TREE;
16232 }
16233
16234 /* Return OFF converted to a pointer offset type suitable as offset for
16235 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16236 tree
16237 convert_to_ptrofftype_loc (location_t loc, tree off)
16238 {
16239 return fold_convert_loc (loc, sizetype, off);
16240 }
16241
16242 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16243 tree
16244 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16245 {
16246 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16247 ptr, convert_to_ptrofftype_loc (loc, off));
16248 }
16249
16250 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16251 tree
16252 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16253 {
16254 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16255 ptr, size_int (off));
16256 }