]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fold-const.c
2015-07-07 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "tree.h"
48 #include "gimple.h"
49 #include "rtl.h"
50 #include "flags.h"
51 #include "alias.h"
52 #include "fold-const.h"
53 #include "stor-layout.h"
54 #include "calls.h"
55 #include "tree-iterator.h"
56 #include "realmpfr.h"
57 #include "insn-config.h"
58 #include "expmed.h"
59 #include "dojump.h"
60 #include "explow.h"
61 #include "emit-rtl.h"
62 #include "varasm.h"
63 #include "stmt.h"
64 #include "expr.h"
65 #include "tm_p.h"
66 #include "target.h"
67 #include "diagnostic-core.h"
68 #include "intl.h"
69 #include "langhooks.h"
70 #include "md5.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimplify.h"
74 #include "tree-dfa.h"
75 #include "builtins.h"
76 #include "cgraph.h"
77 #include "generic-match.h"
78 #include "optabs.h"
79
80 /* Nonzero if we are folding constants inside an initializer; zero
81 otherwise. */
82 int folding_initializer = 0;
83
84 /* The following constants represent a bit based encoding of GCC's
85 comparison operators. This encoding simplifies transformations
86 on relational comparison operators, such as AND and OR. */
87 enum comparison_code {
88 COMPCODE_FALSE = 0,
89 COMPCODE_LT = 1,
90 COMPCODE_EQ = 2,
91 COMPCODE_LE = 3,
92 COMPCODE_GT = 4,
93 COMPCODE_LTGT = 5,
94 COMPCODE_GE = 6,
95 COMPCODE_ORD = 7,
96 COMPCODE_UNORD = 8,
97 COMPCODE_UNLT = 9,
98 COMPCODE_UNEQ = 10,
99 COMPCODE_UNLE = 11,
100 COMPCODE_UNGT = 12,
101 COMPCODE_NE = 13,
102 COMPCODE_UNGE = 14,
103 COMPCODE_TRUE = 15
104 };
105
106 static bool negate_mathfn_p (enum built_in_function);
107 static bool negate_expr_p (tree);
108 static tree negate_expr (tree);
109 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
110 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
111 static enum comparison_code comparison_to_compcode (enum tree_code);
112 static enum tree_code compcode_to_comparison (enum comparison_code);
113 static int operand_equal_for_comparison_p (tree, tree, tree);
114 static int twoval_comparison_p (tree, tree *, tree *, int *);
115 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
116 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
117 static tree make_bit_field_ref (location_t, tree, tree,
118 HOST_WIDE_INT, HOST_WIDE_INT, int);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
122 HOST_WIDE_INT *,
123 machine_mode *, int *, int *,
124 tree *, tree *);
125 static int simple_operand_p (const_tree);
126 static bool simple_operand_p_2 (tree);
127 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
128 static tree range_predecessor (tree);
129 static tree range_successor (tree);
130 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
131 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
132 static tree unextend (tree, int, int, tree);
133 static tree optimize_minmax_comparison (location_t, enum tree_code,
134 tree, tree, tree);
135 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
136 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
137 static tree fold_binary_op_with_conditional_arg (location_t,
138 enum tree_code, tree,
139 tree, tree,
140 tree, tree, int);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
147 static tree fold_view_convert_expr (tree, tree);
148 static bool vec_cst_ctor_to_array (tree, tree *);
149
150
151 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
152 Otherwise, return LOC. */
153
154 static location_t
155 expr_location_or (tree t, location_t loc)
156 {
157 location_t tloc = EXPR_LOCATION (t);
158 return tloc == UNKNOWN_LOCATION ? loc : tloc;
159 }
160
161 /* Similar to protected_set_expr_location, but never modify x in place,
162 if location can and needs to be set, unshare it. */
163
164 static inline tree
165 protected_set_expr_location_unshare (tree x, location_t loc)
166 {
167 if (CAN_HAVE_LOCATION_P (x)
168 && EXPR_LOCATION (x) != loc
169 && !(TREE_CODE (x) == SAVE_EXPR
170 || TREE_CODE (x) == TARGET_EXPR
171 || TREE_CODE (x) == BIND_EXPR))
172 {
173 x = copy_node (x);
174 SET_EXPR_LOCATION (x, loc);
175 }
176 return x;
177 }
178 \f
179 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
180 division and returns the quotient. Otherwise returns
181 NULL_TREE. */
182
183 tree
184 div_if_zero_remainder (const_tree arg1, const_tree arg2)
185 {
186 widest_int quo;
187
188 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
189 SIGNED, &quo))
190 return wide_int_to_tree (TREE_TYPE (arg1), quo);
191
192 return NULL_TREE;
193 }
194 \f
195 /* This is nonzero if we should defer warnings about undefined
196 overflow. This facility exists because these warnings are a
197 special case. The code to estimate loop iterations does not want
198 to issue any warnings, since it works with expressions which do not
199 occur in user code. Various bits of cleanup code call fold(), but
200 only use the result if it has certain characteristics (e.g., is a
201 constant); that code only wants to issue a warning if the result is
202 used. */
203
204 static int fold_deferring_overflow_warnings;
205
206 /* If a warning about undefined overflow is deferred, this is the
207 warning. Note that this may cause us to turn two warnings into
208 one, but that is fine since it is sufficient to only give one
209 warning per expression. */
210
211 static const char* fold_deferred_overflow_warning;
212
213 /* If a warning about undefined overflow is deferred, this is the
214 level at which the warning should be emitted. */
215
216 static enum warn_strict_overflow_code fold_deferred_overflow_code;
217
218 /* Start deferring overflow warnings. We could use a stack here to
219 permit nested calls, but at present it is not necessary. */
220
221 void
222 fold_defer_overflow_warnings (void)
223 {
224 ++fold_deferring_overflow_warnings;
225 }
226
227 /* Stop deferring overflow warnings. If there is a pending warning,
228 and ISSUE is true, then issue the warning if appropriate. STMT is
229 the statement with which the warning should be associated (used for
230 location information); STMT may be NULL. CODE is the level of the
231 warning--a warn_strict_overflow_code value. This function will use
232 the smaller of CODE and the deferred code when deciding whether to
233 issue the warning. CODE may be zero to mean to always use the
234 deferred code. */
235
236 void
237 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
238 {
239 const char *warnmsg;
240 location_t locus;
241
242 gcc_assert (fold_deferring_overflow_warnings > 0);
243 --fold_deferring_overflow_warnings;
244 if (fold_deferring_overflow_warnings > 0)
245 {
246 if (fold_deferred_overflow_warning != NULL
247 && code != 0
248 && code < (int) fold_deferred_overflow_code)
249 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
250 return;
251 }
252
253 warnmsg = fold_deferred_overflow_warning;
254 fold_deferred_overflow_warning = NULL;
255
256 if (!issue || warnmsg == NULL)
257 return;
258
259 if (gimple_no_warning_p (stmt))
260 return;
261
262 /* Use the smallest code level when deciding to issue the
263 warning. */
264 if (code == 0 || code > (int) fold_deferred_overflow_code)
265 code = fold_deferred_overflow_code;
266
267 if (!issue_strict_overflow_warning (code))
268 return;
269
270 if (stmt == NULL)
271 locus = input_location;
272 else
273 locus = gimple_location (stmt);
274 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
275 }
276
277 /* Stop deferring overflow warnings, ignoring any deferred
278 warnings. */
279
280 void
281 fold_undefer_and_ignore_overflow_warnings (void)
282 {
283 fold_undefer_overflow_warnings (false, NULL, 0);
284 }
285
286 /* Whether we are deferring overflow warnings. */
287
288 bool
289 fold_deferring_overflow_warnings_p (void)
290 {
291 return fold_deferring_overflow_warnings > 0;
292 }
293
294 /* This is called when we fold something based on the fact that signed
295 overflow is undefined. */
296
297 static void
298 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
299 {
300 if (fold_deferring_overflow_warnings > 0)
301 {
302 if (fold_deferred_overflow_warning == NULL
303 || wc < fold_deferred_overflow_code)
304 {
305 fold_deferred_overflow_warning = gmsgid;
306 fold_deferred_overflow_code = wc;
307 }
308 }
309 else if (issue_strict_overflow_warning (wc))
310 warning (OPT_Wstrict_overflow, gmsgid);
311 }
312 \f
313 /* Return true if the built-in mathematical function specified by CODE
314 is odd, i.e. -f(x) == f(-x). */
315
316 static bool
317 negate_mathfn_p (enum built_in_function code)
318 {
319 switch (code)
320 {
321 CASE_FLT_FN (BUILT_IN_ASIN):
322 CASE_FLT_FN (BUILT_IN_ASINH):
323 CASE_FLT_FN (BUILT_IN_ATAN):
324 CASE_FLT_FN (BUILT_IN_ATANH):
325 CASE_FLT_FN (BUILT_IN_CASIN):
326 CASE_FLT_FN (BUILT_IN_CASINH):
327 CASE_FLT_FN (BUILT_IN_CATAN):
328 CASE_FLT_FN (BUILT_IN_CATANH):
329 CASE_FLT_FN (BUILT_IN_CBRT):
330 CASE_FLT_FN (BUILT_IN_CPROJ):
331 CASE_FLT_FN (BUILT_IN_CSIN):
332 CASE_FLT_FN (BUILT_IN_CSINH):
333 CASE_FLT_FN (BUILT_IN_CTAN):
334 CASE_FLT_FN (BUILT_IN_CTANH):
335 CASE_FLT_FN (BUILT_IN_ERF):
336 CASE_FLT_FN (BUILT_IN_LLROUND):
337 CASE_FLT_FN (BUILT_IN_LROUND):
338 CASE_FLT_FN (BUILT_IN_ROUND):
339 CASE_FLT_FN (BUILT_IN_SIN):
340 CASE_FLT_FN (BUILT_IN_SINH):
341 CASE_FLT_FN (BUILT_IN_TAN):
342 CASE_FLT_FN (BUILT_IN_TANH):
343 CASE_FLT_FN (BUILT_IN_TRUNC):
344 return true;
345
346 CASE_FLT_FN (BUILT_IN_LLRINT):
347 CASE_FLT_FN (BUILT_IN_LRINT):
348 CASE_FLT_FN (BUILT_IN_NEARBYINT):
349 CASE_FLT_FN (BUILT_IN_RINT):
350 return !flag_rounding_math;
351
352 default:
353 break;
354 }
355 return false;
356 }
357
358 /* Check whether we may negate an integer constant T without causing
359 overflow. */
360
361 bool
362 may_negate_without_overflow_p (const_tree t)
363 {
364 tree type;
365
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
367
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
371
372 return !wi::only_sign_bit_p (t);
373 }
374
375 /* Determine whether an expression T can be cheaply negated using
376 the function negate_expr without introducing undefined overflow. */
377
378 static bool
379 negate_expr_p (tree t)
380 {
381 tree type;
382
383 if (t == 0)
384 return false;
385
386 type = TREE_TYPE (t);
387
388 STRIP_SIGN_NOPS (t);
389 switch (TREE_CODE (t))
390 {
391 case INTEGER_CST:
392 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
393 return true;
394
395 /* Check that -CST will not overflow type. */
396 return may_negate_without_overflow_p (t);
397 case BIT_NOT_EXPR:
398 return (INTEGRAL_TYPE_P (type)
399 && TYPE_OVERFLOW_WRAPS (type));
400
401 case FIXED_CST:
402 return true;
403
404 case NEGATE_EXPR:
405 return !TYPE_OVERFLOW_SANITIZED (type);
406
407 case REAL_CST:
408 /* We want to canonicalize to positive real constants. Pretend
409 that only negative ones can be easily negated. */
410 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
411
412 case COMPLEX_CST:
413 return negate_expr_p (TREE_REALPART (t))
414 && negate_expr_p (TREE_IMAGPART (t));
415
416 case VECTOR_CST:
417 {
418 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
419 return true;
420
421 int count = TYPE_VECTOR_SUBPARTS (type), i;
422
423 for (i = 0; i < count; i++)
424 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
425 return false;
426
427 return true;
428 }
429
430 case COMPLEX_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0))
432 && negate_expr_p (TREE_OPERAND (t, 1));
433
434 case CONJ_EXPR:
435 return negate_expr_p (TREE_OPERAND (t, 0));
436
437 case PLUS_EXPR:
438 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
439 || HONOR_SIGNED_ZEROS (element_mode (type)))
440 return false;
441 /* -(A + B) -> (-B) - A. */
442 if (negate_expr_p (TREE_OPERAND (t, 1))
443 && reorder_operands_p (TREE_OPERAND (t, 0),
444 TREE_OPERAND (t, 1)))
445 return true;
446 /* -(A + B) -> (-A) - B. */
447 return negate_expr_p (TREE_OPERAND (t, 0));
448
449 case MINUS_EXPR:
450 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
451 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
452 && !HONOR_SIGNED_ZEROS (element_mode (type))
453 && reorder_operands_p (TREE_OPERAND (t, 0),
454 TREE_OPERAND (t, 1));
455
456 case MULT_EXPR:
457 if (TYPE_UNSIGNED (TREE_TYPE (t)))
458 break;
459
460 /* Fall through. */
461
462 case RDIV_EXPR:
463 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
464 return negate_expr_p (TREE_OPERAND (t, 1))
465 || negate_expr_p (TREE_OPERAND (t, 0));
466 break;
467
468 case TRUNC_DIV_EXPR:
469 case ROUND_DIV_EXPR:
470 case EXACT_DIV_EXPR:
471 /* In general we can't negate A / B, because if A is INT_MIN and
472 B is 1, we may turn this into INT_MIN / -1 which is undefined
473 and actually traps on some architectures. But if overflow is
474 undefined, we can negate, because - (INT_MIN / 1) is an
475 overflow. */
476 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
477 {
478 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
479 break;
480 /* If overflow is undefined then we have to be careful because
481 we ask whether it's ok to associate the negate with the
482 division which is not ok for example for
483 -((a - b) / c) where (-(a - b)) / c may invoke undefined
484 overflow because of negating INT_MIN. So do not use
485 negate_expr_p here but open-code the two important cases. */
486 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
487 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
488 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
489 return true;
490 }
491 else if (negate_expr_p (TREE_OPERAND (t, 0)))
492 return true;
493 return negate_expr_p (TREE_OPERAND (t, 1));
494
495 case NOP_EXPR:
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type) == REAL_TYPE)
498 {
499 tree tem = strip_float_extensions (t);
500 if (tem != t)
501 return negate_expr_p (tem);
502 }
503 break;
504
505 case CALL_EXPR:
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t)))
508 return negate_expr_p (CALL_EXPR_ARG (t, 0));
509 break;
510
511 case RSHIFT_EXPR:
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
513 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
514 {
515 tree op1 = TREE_OPERAND (t, 1);
516 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
517 return true;
518 }
519 break;
520
521 default:
522 break;
523 }
524 return false;
525 }
526
527 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
528 simplification is possible.
529 If negate_expr_p would return true for T, NULL_TREE will never be
530 returned. */
531
532 static tree
533 fold_negate_expr (location_t loc, tree t)
534 {
535 tree type = TREE_TYPE (t);
536 tree tem;
537
538 switch (TREE_CODE (t))
539 {
540 /* Convert - (~A) to A + 1. */
541 case BIT_NOT_EXPR:
542 if (INTEGRAL_TYPE_P (type))
543 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
544 build_one_cst (type));
545 break;
546
547 case INTEGER_CST:
548 tem = fold_negate_const (t, type);
549 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
550 || (ANY_INTEGRAL_TYPE_P (type)
551 && !TYPE_OVERFLOW_TRAPS (type)
552 && TYPE_OVERFLOW_WRAPS (type))
553 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
554 return tem;
555 break;
556
557 case REAL_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
560
561 case FIXED_CST:
562 tem = fold_negate_const (t, type);
563 return tem;
564
565 case COMPLEX_CST:
566 {
567 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
568 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
569 if (rpart && ipart)
570 return build_complex (type, rpart, ipart);
571 }
572 break;
573
574 case VECTOR_CST:
575 {
576 int count = TYPE_VECTOR_SUBPARTS (type), i;
577 tree *elts = XALLOCAVEC (tree, count);
578
579 for (i = 0; i < count; i++)
580 {
581 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
582 if (elts[i] == NULL_TREE)
583 return NULL_TREE;
584 }
585
586 return build_vector (type, elts);
587 }
588
589 case COMPLEX_EXPR:
590 if (negate_expr_p (t))
591 return fold_build2_loc (loc, COMPLEX_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
593 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
594 break;
595
596 case CONJ_EXPR:
597 if (negate_expr_p (t))
598 return fold_build1_loc (loc, CONJ_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
600 break;
601
602 case NEGATE_EXPR:
603 if (!TYPE_OVERFLOW_SANITIZED (type))
604 return TREE_OPERAND (t, 0);
605 break;
606
607 case PLUS_EXPR:
608 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
609 && !HONOR_SIGNED_ZEROS (element_mode (type)))
610 {
611 /* -(A + B) -> (-B) - A. */
612 if (negate_expr_p (TREE_OPERAND (t, 1))
613 && reorder_operands_p (TREE_OPERAND (t, 0),
614 TREE_OPERAND (t, 1)))
615 {
616 tem = negate_expr (TREE_OPERAND (t, 1));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 0));
619 }
620
621 /* -(A + B) -> (-A) - B. */
622 if (negate_expr_p (TREE_OPERAND (t, 0)))
623 {
624 tem = negate_expr (TREE_OPERAND (t, 0));
625 return fold_build2_loc (loc, MINUS_EXPR, type,
626 tem, TREE_OPERAND (t, 1));
627 }
628 }
629 break;
630
631 case MINUS_EXPR:
632 /* - (A - B) -> B - A */
633 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
634 && !HONOR_SIGNED_ZEROS (element_mode (type))
635 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
636 return fold_build2_loc (loc, MINUS_EXPR, type,
637 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
638 break;
639
640 case MULT_EXPR:
641 if (TYPE_UNSIGNED (type))
642 break;
643
644 /* Fall through. */
645
646 case RDIV_EXPR:
647 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
648 {
649 tem = TREE_OPERAND (t, 1);
650 if (negate_expr_p (tem))
651 return fold_build2_loc (loc, TREE_CODE (t), type,
652 TREE_OPERAND (t, 0), negate_expr (tem));
653 tem = TREE_OPERAND (t, 0);
654 if (negate_expr_p (tem))
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 negate_expr (tem), TREE_OPERAND (t, 1));
657 }
658 break;
659
660 case TRUNC_DIV_EXPR:
661 case ROUND_DIV_EXPR:
662 case EXACT_DIV_EXPR:
663 /* In general we can't negate A / B, because if A is INT_MIN and
664 B is 1, we may turn this into INT_MIN / -1 which is undefined
665 and actually traps on some architectures. But if overflow is
666 undefined, we can negate, because - (INT_MIN / 1) is an
667 overflow. */
668 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
669 {
670 const char * const warnmsg = G_("assuming signed overflow does not "
671 "occur when negating a division");
672 tem = TREE_OPERAND (t, 1);
673 if (negate_expr_p (tem))
674 {
675 if (INTEGRAL_TYPE_P (type)
676 && (TREE_CODE (tem) != INTEGER_CST
677 || integer_onep (tem)))
678 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
679 return fold_build2_loc (loc, TREE_CODE (t), type,
680 TREE_OPERAND (t, 0), negate_expr (tem));
681 }
682 /* If overflow is undefined then we have to be careful because
683 we ask whether it's ok to associate the negate with the
684 division which is not ok for example for
685 -((a - b) / c) where (-(a - b)) / c may invoke undefined
686 overflow because of negating INT_MIN. So do not use
687 negate_expr_p here but open-code the two important cases. */
688 tem = TREE_OPERAND (t, 0);
689 if ((INTEGRAL_TYPE_P (type)
690 && (TREE_CODE (tem) == NEGATE_EXPR
691 || (TREE_CODE (tem) == INTEGER_CST
692 && may_negate_without_overflow_p (tem))))
693 || !INTEGRAL_TYPE_P (type))
694 return fold_build2_loc (loc, TREE_CODE (t), type,
695 negate_expr (tem), TREE_OPERAND (t, 1));
696 }
697 break;
698
699 case NOP_EXPR:
700 /* Convert -((double)float) into (double)(-float). */
701 if (TREE_CODE (type) == REAL_TYPE)
702 {
703 tem = strip_float_extensions (t);
704 if (tem != t && negate_expr_p (tem))
705 return fold_convert_loc (loc, type, negate_expr (tem));
706 }
707 break;
708
709 case CALL_EXPR:
710 /* Negate -f(x) as f(-x). */
711 if (negate_mathfn_p (builtin_mathfn_code (t))
712 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
713 {
714 tree fndecl, arg;
715
716 fndecl = get_callee_fndecl (t);
717 arg = negate_expr (CALL_EXPR_ARG (t, 0));
718 return build_call_expr_loc (loc, fndecl, 1, arg);
719 }
720 break;
721
722 case RSHIFT_EXPR:
723 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
724 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
725 {
726 tree op1 = TREE_OPERAND (t, 1);
727 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
728 {
729 tree ntype = TYPE_UNSIGNED (type)
730 ? signed_type_for (type)
731 : unsigned_type_for (type);
732 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
733 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
734 return fold_convert_loc (loc, type, temp);
735 }
736 }
737 break;
738
739 default:
740 break;
741 }
742
743 return NULL_TREE;
744 }
745
746 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
747 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
748 return NULL_TREE. */
749
750 static tree
751 negate_expr (tree t)
752 {
753 tree type, tem;
754 location_t loc;
755
756 if (t == NULL_TREE)
757 return NULL_TREE;
758
759 loc = EXPR_LOCATION (t);
760 type = TREE_TYPE (t);
761 STRIP_SIGN_NOPS (t);
762
763 tem = fold_negate_expr (loc, t);
764 if (!tem)
765 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
766 return fold_convert_loc (loc, type, tem);
767 }
768 \f
769 /* Split a tree IN into a constant, literal and variable parts that could be
770 combined with CODE to make IN. "constant" means an expression with
771 TREE_CONSTANT but that isn't an actual constant. CODE must be a
772 commutative arithmetic operation. Store the constant part into *CONP,
773 the literal in *LITP and return the variable part. If a part isn't
774 present, set it to null. If the tree does not decompose in this way,
775 return the entire tree as the variable part and the other parts as null.
776
777 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
778 case, we negate an operand that was subtracted. Except if it is a
779 literal for which we use *MINUS_LITP instead.
780
781 If NEGATE_P is true, we are negating all of IN, again except a literal
782 for which we use *MINUS_LITP instead.
783
784 If IN is itself a literal or constant, return it as appropriate.
785
786 Note that we do not guarantee that any of the three values will be the
787 same type as IN, but they will have the same signedness and mode. */
788
789 static tree
790 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
791 tree *minus_litp, int negate_p)
792 {
793 tree var = 0;
794
795 *conp = 0;
796 *litp = 0;
797 *minus_litp = 0;
798
799 /* Strip any conversions that don't change the machine mode or signedness. */
800 STRIP_SIGN_NOPS (in);
801
802 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
803 || TREE_CODE (in) == FIXED_CST)
804 *litp = in;
805 else if (TREE_CODE (in) == code
806 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
807 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
808 /* We can associate addition and subtraction together (even
809 though the C standard doesn't say so) for integers because
810 the value is not affected. For reals, the value might be
811 affected, so we can't. */
812 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
813 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
814 {
815 tree op0 = TREE_OPERAND (in, 0);
816 tree op1 = TREE_OPERAND (in, 1);
817 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
818 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
819
820 /* First see if either of the operands is a literal, then a constant. */
821 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
822 || TREE_CODE (op0) == FIXED_CST)
823 *litp = op0, op0 = 0;
824 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
825 || TREE_CODE (op1) == FIXED_CST)
826 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
827
828 if (op0 != 0 && TREE_CONSTANT (op0))
829 *conp = op0, op0 = 0;
830 else if (op1 != 0 && TREE_CONSTANT (op1))
831 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
832
833 /* If we haven't dealt with either operand, this is not a case we can
834 decompose. Otherwise, VAR is either of the ones remaining, if any. */
835 if (op0 != 0 && op1 != 0)
836 var = in;
837 else if (op0 != 0)
838 var = op0;
839 else
840 var = op1, neg_var_p = neg1_p;
841
842 /* Now do any needed negations. */
843 if (neg_litp_p)
844 *minus_litp = *litp, *litp = 0;
845 if (neg_conp_p)
846 *conp = negate_expr (*conp);
847 if (neg_var_p)
848 var = negate_expr (var);
849 }
850 else if (TREE_CODE (in) == BIT_NOT_EXPR
851 && code == PLUS_EXPR)
852 {
853 /* -X - 1 is folded to ~X, undo that here. */
854 *minus_litp = build_one_cst (TREE_TYPE (in));
855 var = negate_expr (TREE_OPERAND (in, 0));
856 }
857 else if (TREE_CONSTANT (in))
858 *conp = in;
859 else
860 var = in;
861
862 if (negate_p)
863 {
864 if (*litp)
865 *minus_litp = *litp, *litp = 0;
866 else if (*minus_litp)
867 *litp = *minus_litp, *minus_litp = 0;
868 *conp = negate_expr (*conp);
869 var = negate_expr (var);
870 }
871
872 return var;
873 }
874
875 /* Re-associate trees split by the above function. T1 and T2 are
876 either expressions to associate or null. Return the new
877 expression, if any. LOC is the location of the new expression. If
878 we build an operation, do it in TYPE and with CODE. */
879
880 static tree
881 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
882 {
883 if (t1 == 0)
884 return t2;
885 else if (t2 == 0)
886 return t1;
887
888 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
889 try to fold this since we will have infinite recursion. But do
890 deal with any NEGATE_EXPRs. */
891 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
892 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
893 {
894 if (code == PLUS_EXPR)
895 {
896 if (TREE_CODE (t1) == NEGATE_EXPR)
897 return build2_loc (loc, MINUS_EXPR, type,
898 fold_convert_loc (loc, type, t2),
899 fold_convert_loc (loc, type,
900 TREE_OPERAND (t1, 0)));
901 else if (TREE_CODE (t2) == NEGATE_EXPR)
902 return build2_loc (loc, MINUS_EXPR, type,
903 fold_convert_loc (loc, type, t1),
904 fold_convert_loc (loc, type,
905 TREE_OPERAND (t2, 0)));
906 else if (integer_zerop (t2))
907 return fold_convert_loc (loc, type, t1);
908 }
909 else if (code == MINUS_EXPR)
910 {
911 if (integer_zerop (t2))
912 return fold_convert_loc (loc, type, t1);
913 }
914
915 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type, t2));
917 }
918
919 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
920 fold_convert_loc (loc, type, t2));
921 }
922 \f
923 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
924 for use in int_const_binop, size_binop and size_diffop. */
925
926 static bool
927 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
928 {
929 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
930 return false;
931 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
932 return false;
933
934 switch (code)
935 {
936 case LSHIFT_EXPR:
937 case RSHIFT_EXPR:
938 case LROTATE_EXPR:
939 case RROTATE_EXPR:
940 return true;
941
942 default:
943 break;
944 }
945
946 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
947 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
948 && TYPE_MODE (type1) == TYPE_MODE (type2);
949 }
950
951
952 /* Combine two integer constants ARG1 and ARG2 under operation CODE
953 to produce a new constant. Return NULL_TREE if we don't know how
954 to evaluate CODE at compile-time. */
955
956 static tree
957 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
958 int overflowable)
959 {
960 wide_int res;
961 tree t;
962 tree type = TREE_TYPE (arg1);
963 signop sign = TYPE_SIGN (type);
964 bool overflow = false;
965
966 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
967 TYPE_SIGN (TREE_TYPE (parg2)));
968
969 switch (code)
970 {
971 case BIT_IOR_EXPR:
972 res = wi::bit_or (arg1, arg2);
973 break;
974
975 case BIT_XOR_EXPR:
976 res = wi::bit_xor (arg1, arg2);
977 break;
978
979 case BIT_AND_EXPR:
980 res = wi::bit_and (arg1, arg2);
981 break;
982
983 case RSHIFT_EXPR:
984 case LSHIFT_EXPR:
985 if (wi::neg_p (arg2))
986 {
987 arg2 = -arg2;
988 if (code == RSHIFT_EXPR)
989 code = LSHIFT_EXPR;
990 else
991 code = RSHIFT_EXPR;
992 }
993
994 if (code == RSHIFT_EXPR)
995 /* It's unclear from the C standard whether shifts can overflow.
996 The following code ignores overflow; perhaps a C standard
997 interpretation ruling is needed. */
998 res = wi::rshift (arg1, arg2, sign);
999 else
1000 res = wi::lshift (arg1, arg2);
1001 break;
1002
1003 case RROTATE_EXPR:
1004 case LROTATE_EXPR:
1005 if (wi::neg_p (arg2))
1006 {
1007 arg2 = -arg2;
1008 if (code == RROTATE_EXPR)
1009 code = LROTATE_EXPR;
1010 else
1011 code = RROTATE_EXPR;
1012 }
1013
1014 if (code == RROTATE_EXPR)
1015 res = wi::rrotate (arg1, arg2);
1016 else
1017 res = wi::lrotate (arg1, arg2);
1018 break;
1019
1020 case PLUS_EXPR:
1021 res = wi::add (arg1, arg2, sign, &overflow);
1022 break;
1023
1024 case MINUS_EXPR:
1025 res = wi::sub (arg1, arg2, sign, &overflow);
1026 break;
1027
1028 case MULT_EXPR:
1029 res = wi::mul (arg1, arg2, sign, &overflow);
1030 break;
1031
1032 case MULT_HIGHPART_EXPR:
1033 res = wi::mul_high (arg1, arg2, sign);
1034 break;
1035
1036 case TRUNC_DIV_EXPR:
1037 case EXACT_DIV_EXPR:
1038 if (arg2 == 0)
1039 return NULL_TREE;
1040 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case FLOOR_DIV_EXPR:
1044 if (arg2 == 0)
1045 return NULL_TREE;
1046 res = wi::div_floor (arg1, arg2, sign, &overflow);
1047 break;
1048
1049 case CEIL_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case ROUND_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_round (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case TRUNC_MOD_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case FLOOR_MOD_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case CEIL_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1077 break;
1078
1079 case ROUND_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_round (arg1, arg2, sign, &overflow);
1083 break;
1084
1085 case MIN_EXPR:
1086 res = wi::min (arg1, arg2, sign);
1087 break;
1088
1089 case MAX_EXPR:
1090 res = wi::max (arg1, arg2, sign);
1091 break;
1092
1093 default:
1094 return NULL_TREE;
1095 }
1096
1097 t = force_fit_type (type, res, overflowable,
1098 (((sign == SIGNED || overflowable == -1)
1099 && overflow)
1100 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1101
1102 return t;
1103 }
1104
1105 tree
1106 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1107 {
1108 return int_const_binop_1 (code, arg1, arg2, 1);
1109 }
1110
1111 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1112 constant. We assume ARG1 and ARG2 have the same data type, or at least
1113 are the same kind of constant and the same machine mode. Return zero if
1114 combining the constants is not allowed in the current operating mode. */
1115
1116 static tree
1117 const_binop (enum tree_code code, tree arg1, tree arg2)
1118 {
1119 /* Sanity check for the recursive cases. */
1120 if (!arg1 || !arg2)
1121 return NULL_TREE;
1122
1123 STRIP_NOPS (arg1);
1124 STRIP_NOPS (arg2);
1125
1126 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1127 {
1128 if (code == POINTER_PLUS_EXPR)
1129 return int_const_binop (PLUS_EXPR,
1130 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1131
1132 return int_const_binop (code, arg1, arg2);
1133 }
1134
1135 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1136 {
1137 machine_mode mode;
1138 REAL_VALUE_TYPE d1;
1139 REAL_VALUE_TYPE d2;
1140 REAL_VALUE_TYPE value;
1141 REAL_VALUE_TYPE result;
1142 bool inexact;
1143 tree t, type;
1144
1145 /* The following codes are handled by real_arithmetic. */
1146 switch (code)
1147 {
1148 case PLUS_EXPR:
1149 case MINUS_EXPR:
1150 case MULT_EXPR:
1151 case RDIV_EXPR:
1152 case MIN_EXPR:
1153 case MAX_EXPR:
1154 break;
1155
1156 default:
1157 return NULL_TREE;
1158 }
1159
1160 d1 = TREE_REAL_CST (arg1);
1161 d2 = TREE_REAL_CST (arg2);
1162
1163 type = TREE_TYPE (arg1);
1164 mode = TYPE_MODE (type);
1165
1166 /* Don't perform operation if we honor signaling NaNs and
1167 either operand is a NaN. */
1168 if (HONOR_SNANS (mode)
1169 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1170 return NULL_TREE;
1171
1172 /* Don't perform operation if it would raise a division
1173 by zero exception. */
1174 if (code == RDIV_EXPR
1175 && REAL_VALUES_EQUAL (d2, dconst0)
1176 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1177 return NULL_TREE;
1178
1179 /* If either operand is a NaN, just return it. Otherwise, set up
1180 for floating-point trap; we return an overflow. */
1181 if (REAL_VALUE_ISNAN (d1))
1182 return arg1;
1183 else if (REAL_VALUE_ISNAN (d2))
1184 return arg2;
1185
1186 inexact = real_arithmetic (&value, code, &d1, &d2);
1187 real_convert (&result, mode, &value);
1188
1189 /* Don't constant fold this floating point operation if
1190 the result has overflowed and flag_trapping_math. */
1191 if (flag_trapping_math
1192 && MODE_HAS_INFINITIES (mode)
1193 && REAL_VALUE_ISINF (result)
1194 && !REAL_VALUE_ISINF (d1)
1195 && !REAL_VALUE_ISINF (d2))
1196 return NULL_TREE;
1197
1198 /* Don't constant fold this floating point operation if the
1199 result may dependent upon the run-time rounding mode and
1200 flag_rounding_math is set, or if GCC's software emulation
1201 is unable to accurately represent the result. */
1202 if ((flag_rounding_math
1203 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1204 && (inexact || !real_identical (&result, &value)))
1205 return NULL_TREE;
1206
1207 t = build_real (type, result);
1208
1209 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1210 return t;
1211 }
1212
1213 if (TREE_CODE (arg1) == FIXED_CST)
1214 {
1215 FIXED_VALUE_TYPE f1;
1216 FIXED_VALUE_TYPE f2;
1217 FIXED_VALUE_TYPE result;
1218 tree t, type;
1219 int sat_p;
1220 bool overflow_p;
1221
1222 /* The following codes are handled by fixed_arithmetic. */
1223 switch (code)
1224 {
1225 case PLUS_EXPR:
1226 case MINUS_EXPR:
1227 case MULT_EXPR:
1228 case TRUNC_DIV_EXPR:
1229 if (TREE_CODE (arg2) != FIXED_CST)
1230 return NULL_TREE;
1231 f2 = TREE_FIXED_CST (arg2);
1232 break;
1233
1234 case LSHIFT_EXPR:
1235 case RSHIFT_EXPR:
1236 {
1237 if (TREE_CODE (arg2) != INTEGER_CST)
1238 return NULL_TREE;
1239 wide_int w2 = arg2;
1240 f2.data.high = w2.elt (1);
1241 f2.data.low = w2.elt (0);
1242 f2.mode = SImode;
1243 }
1244 break;
1245
1246 default:
1247 return NULL_TREE;
1248 }
1249
1250 f1 = TREE_FIXED_CST (arg1);
1251 type = TREE_TYPE (arg1);
1252 sat_p = TYPE_SATURATING (type);
1253 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1254 t = build_fixed (type, result);
1255 /* Propagate overflow flags. */
1256 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1257 TREE_OVERFLOW (t) = 1;
1258 return t;
1259 }
1260
1261 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1262 {
1263 tree type = TREE_TYPE (arg1);
1264 tree r1 = TREE_REALPART (arg1);
1265 tree i1 = TREE_IMAGPART (arg1);
1266 tree r2 = TREE_REALPART (arg2);
1267 tree i2 = TREE_IMAGPART (arg2);
1268 tree real, imag;
1269
1270 switch (code)
1271 {
1272 case PLUS_EXPR:
1273 case MINUS_EXPR:
1274 real = const_binop (code, r1, r2);
1275 imag = const_binop (code, i1, i2);
1276 break;
1277
1278 case MULT_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_mul);
1283
1284 real = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, r1, r2),
1286 const_binop (MULT_EXPR, i1, i2));
1287 imag = const_binop (PLUS_EXPR,
1288 const_binop (MULT_EXPR, r1, i2),
1289 const_binop (MULT_EXPR, i1, r2));
1290 break;
1291
1292 case RDIV_EXPR:
1293 if (COMPLEX_FLOAT_TYPE_P (type))
1294 return do_mpc_arg2 (arg1, arg2, type,
1295 /* do_nonfinite= */ folding_initializer,
1296 mpc_div);
1297 /* Fallthru ... */
1298 case TRUNC_DIV_EXPR:
1299 case CEIL_DIV_EXPR:
1300 case FLOOR_DIV_EXPR:
1301 case ROUND_DIV_EXPR:
1302 if (flag_complex_method == 0)
1303 {
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_straight().
1306
1307 Expand complex division to scalars, straightforward algorithm.
1308 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1309 t = br*br + bi*bi
1310 */
1311 tree magsquared
1312 = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r2, r2),
1314 const_binop (MULT_EXPR, i2, i2));
1315 tree t1
1316 = const_binop (PLUS_EXPR,
1317 const_binop (MULT_EXPR, r1, r2),
1318 const_binop (MULT_EXPR, i1, i2));
1319 tree t2
1320 = const_binop (MINUS_EXPR,
1321 const_binop (MULT_EXPR, i1, r2),
1322 const_binop (MULT_EXPR, r1, i2));
1323
1324 real = const_binop (code, t1, magsquared);
1325 imag = const_binop (code, t2, magsquared);
1326 }
1327 else
1328 {
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_wide().
1331
1332 Expand complex division to scalars, modified algorithm to minimize
1333 overflow with wide input ranges. */
1334 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1335 fold_abs_const (r2, TREE_TYPE (type)),
1336 fold_abs_const (i2, TREE_TYPE (type)));
1337
1338 if (integer_nonzerop (compare))
1339 {
1340 /* In the TRUE branch, we compute
1341 ratio = br/bi;
1342 div = (br * ratio) + bi;
1343 tr = (ar * ratio) + ai;
1344 ti = (ai * ratio) - ar;
1345 tr = tr / div;
1346 ti = ti / div; */
1347 tree ratio = const_binop (code, r2, i2);
1348 tree div = const_binop (PLUS_EXPR, i2,
1349 const_binop (MULT_EXPR, r2, ratio));
1350 real = const_binop (MULT_EXPR, r1, ratio);
1351 real = const_binop (PLUS_EXPR, real, i1);
1352 real = const_binop (code, real, div);
1353
1354 imag = const_binop (MULT_EXPR, i1, ratio);
1355 imag = const_binop (MINUS_EXPR, imag, r1);
1356 imag = const_binop (code, imag, div);
1357 }
1358 else
1359 {
1360 /* In the FALSE branch, we compute
1361 ratio = d/c;
1362 divisor = (d * ratio) + c;
1363 tr = (b * ratio) + a;
1364 ti = b - (a * ratio);
1365 tr = tr / div;
1366 ti = ti / div; */
1367 tree ratio = const_binop (code, i2, r2);
1368 tree div = const_binop (PLUS_EXPR, r2,
1369 const_binop (MULT_EXPR, i2, ratio));
1370
1371 real = const_binop (MULT_EXPR, i1, ratio);
1372 real = const_binop (PLUS_EXPR, real, r1);
1373 real = const_binop (code, real, div);
1374
1375 imag = const_binop (MULT_EXPR, r1, ratio);
1376 imag = const_binop (MINUS_EXPR, i1, imag);
1377 imag = const_binop (code, imag, div);
1378 }
1379 }
1380 break;
1381
1382 default:
1383 return NULL_TREE;
1384 }
1385
1386 if (real && imag)
1387 return build_complex (type, real, imag);
1388 }
1389
1390 if (TREE_CODE (arg1) == VECTOR_CST
1391 && TREE_CODE (arg2) == VECTOR_CST)
1392 {
1393 tree type = TREE_TYPE (arg1);
1394 int count = TYPE_VECTOR_SUBPARTS (type), i;
1395 tree *elts = XALLOCAVEC (tree, count);
1396
1397 for (i = 0; i < count; i++)
1398 {
1399 tree elem1 = VECTOR_CST_ELT (arg1, i);
1400 tree elem2 = VECTOR_CST_ELT (arg2, i);
1401
1402 elts[i] = const_binop (code, elem1, elem2);
1403
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if (elts[i] == NULL_TREE)
1407 return NULL_TREE;
1408 }
1409
1410 return build_vector (type, elts);
1411 }
1412
1413 /* Shifts allow a scalar offset for a vector. */
1414 if (TREE_CODE (arg1) == VECTOR_CST
1415 && TREE_CODE (arg2) == INTEGER_CST)
1416 {
1417 tree type = TREE_TYPE (arg1);
1418 int count = TYPE_VECTOR_SUBPARTS (type), i;
1419 tree *elts = XALLOCAVEC (tree, count);
1420
1421 for (i = 0; i < count; i++)
1422 {
1423 tree elem1 = VECTOR_CST_ELT (arg1, i);
1424
1425 elts[i] = const_binop (code, elem1, arg2);
1426
1427 /* It is possible that const_binop cannot handle the given
1428 code and return NULL_TREE. */
1429 if (elts[i] == NULL_TREE)
1430 return NULL_TREE;
1431 }
1432
1433 return build_vector (type, elts);
1434 }
1435 return NULL_TREE;
1436 }
1437
1438 /* Overload that adds a TYPE parameter to be able to dispatch
1439 to fold_relational_const. */
1440
1441 tree
1442 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1443 {
1444 if (TREE_CODE_CLASS (code) == tcc_comparison)
1445 return fold_relational_const (code, type, arg1, arg2);
1446
1447 /* ??? Until we make the const_binop worker take the type of the
1448 result as argument put those cases that need it here. */
1449 switch (code)
1450 {
1451 case COMPLEX_EXPR:
1452 if ((TREE_CODE (arg1) == REAL_CST
1453 && TREE_CODE (arg2) == REAL_CST)
1454 || (TREE_CODE (arg1) == INTEGER_CST
1455 && TREE_CODE (arg2) == INTEGER_CST))
1456 return build_complex (type, arg1, arg2);
1457 return NULL_TREE;
1458
1459 case VEC_PACK_TRUNC_EXPR:
1460 case VEC_PACK_FIX_TRUNC_EXPR:
1461 {
1462 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1463 tree *elts;
1464
1465 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1466 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1467 if (TREE_CODE (arg1) != VECTOR_CST
1468 || TREE_CODE (arg2) != VECTOR_CST)
1469 return NULL_TREE;
1470
1471 elts = XALLOCAVEC (tree, nelts);
1472 if (!vec_cst_ctor_to_array (arg1, elts)
1473 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1474 return NULL_TREE;
1475
1476 for (i = 0; i < nelts; i++)
1477 {
1478 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1479 ? NOP_EXPR : FIX_TRUNC_EXPR,
1480 TREE_TYPE (type), elts[i]);
1481 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1482 return NULL_TREE;
1483 }
1484
1485 return build_vector (type, elts);
1486 }
1487
1488 case VEC_WIDEN_MULT_LO_EXPR:
1489 case VEC_WIDEN_MULT_HI_EXPR:
1490 case VEC_WIDEN_MULT_EVEN_EXPR:
1491 case VEC_WIDEN_MULT_ODD_EXPR:
1492 {
1493 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1494 unsigned int out, ofs, scale;
1495 tree *elts;
1496
1497 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1498 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1499 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1500 return NULL_TREE;
1501
1502 elts = XALLOCAVEC (tree, nelts * 4);
1503 if (!vec_cst_ctor_to_array (arg1, elts)
1504 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1505 return NULL_TREE;
1506
1507 if (code == VEC_WIDEN_MULT_LO_EXPR)
1508 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1509 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1510 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1511 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1512 scale = 1, ofs = 0;
1513 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1514 scale = 1, ofs = 1;
1515
1516 for (out = 0; out < nelts; out++)
1517 {
1518 unsigned int in1 = (out << scale) + ofs;
1519 unsigned int in2 = in1 + nelts * 2;
1520 tree t1, t2;
1521
1522 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1523 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1524
1525 if (t1 == NULL_TREE || t2 == NULL_TREE)
1526 return NULL_TREE;
1527 elts[out] = const_binop (MULT_EXPR, t1, t2);
1528 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1529 return NULL_TREE;
1530 }
1531
1532 return build_vector (type, elts);
1533 }
1534
1535 default:;
1536 }
1537
1538 if (TREE_CODE_CLASS (code) != tcc_binary)
1539 return NULL_TREE;
1540
1541 /* Make sure type and arg0 have the same saturating flag. */
1542 gcc_checking_assert (TYPE_SATURATING (type)
1543 == TYPE_SATURATING (TREE_TYPE (arg1)));
1544
1545 return const_binop (code, arg1, arg2);
1546 }
1547
1548 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1549 Return zero if computing the constants is not possible. */
1550
1551 tree
1552 const_unop (enum tree_code code, tree type, tree arg0)
1553 {
1554 switch (code)
1555 {
1556 CASE_CONVERT:
1557 case FLOAT_EXPR:
1558 case FIX_TRUNC_EXPR:
1559 case FIXED_CONVERT_EXPR:
1560 return fold_convert_const (code, type, arg0);
1561
1562 case ADDR_SPACE_CONVERT_EXPR:
1563 if (integer_zerop (arg0))
1564 return fold_convert_const (code, type, arg0);
1565 break;
1566
1567 case VIEW_CONVERT_EXPR:
1568 return fold_view_convert_expr (type, arg0);
1569
1570 case NEGATE_EXPR:
1571 {
1572 /* Can't call fold_negate_const directly here as that doesn't
1573 handle all cases and we might not be able to negate some
1574 constants. */
1575 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1576 if (tem && CONSTANT_CLASS_P (tem))
1577 return tem;
1578 break;
1579 }
1580
1581 case ABS_EXPR:
1582 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1583 return fold_abs_const (arg0, type);
1584 break;
1585
1586 case CONJ_EXPR:
1587 if (TREE_CODE (arg0) == COMPLEX_CST)
1588 {
1589 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1590 TREE_TYPE (type));
1591 return build_complex (type, TREE_REALPART (arg0), ipart);
1592 }
1593 break;
1594
1595 case BIT_NOT_EXPR:
1596 if (TREE_CODE (arg0) == INTEGER_CST)
1597 return fold_not_const (arg0, type);
1598 /* Perform BIT_NOT_EXPR on each element individually. */
1599 else if (TREE_CODE (arg0) == VECTOR_CST)
1600 {
1601 tree *elements;
1602 tree elem;
1603 unsigned count = VECTOR_CST_NELTS (arg0), i;
1604
1605 elements = XALLOCAVEC (tree, count);
1606 for (i = 0; i < count; i++)
1607 {
1608 elem = VECTOR_CST_ELT (arg0, i);
1609 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1610 if (elem == NULL_TREE)
1611 break;
1612 elements[i] = elem;
1613 }
1614 if (i == count)
1615 return build_vector (type, elements);
1616 }
1617 break;
1618
1619 case TRUTH_NOT_EXPR:
1620 if (TREE_CODE (arg0) == INTEGER_CST)
1621 return constant_boolean_node (integer_zerop (arg0), type);
1622 break;
1623
1624 case REALPART_EXPR:
1625 if (TREE_CODE (arg0) == COMPLEX_CST)
1626 return fold_convert (type, TREE_REALPART (arg0));
1627 break;
1628
1629 case IMAGPART_EXPR:
1630 if (TREE_CODE (arg0) == COMPLEX_CST)
1631 return fold_convert (type, TREE_IMAGPART (arg0));
1632 break;
1633
1634 case VEC_UNPACK_LO_EXPR:
1635 case VEC_UNPACK_HI_EXPR:
1636 case VEC_UNPACK_FLOAT_LO_EXPR:
1637 case VEC_UNPACK_FLOAT_HI_EXPR:
1638 {
1639 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1640 tree *elts;
1641 enum tree_code subcode;
1642
1643 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1644 if (TREE_CODE (arg0) != VECTOR_CST)
1645 return NULL_TREE;
1646
1647 elts = XALLOCAVEC (tree, nelts * 2);
1648 if (!vec_cst_ctor_to_array (arg0, elts))
1649 return NULL_TREE;
1650
1651 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1652 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1653 elts += nelts;
1654
1655 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1656 subcode = NOP_EXPR;
1657 else
1658 subcode = FLOAT_EXPR;
1659
1660 for (i = 0; i < nelts; i++)
1661 {
1662 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1663 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1664 return NULL_TREE;
1665 }
1666
1667 return build_vector (type, elts);
1668 }
1669
1670 case REDUC_MIN_EXPR:
1671 case REDUC_MAX_EXPR:
1672 case REDUC_PLUS_EXPR:
1673 {
1674 unsigned int nelts, i;
1675 tree *elts;
1676 enum tree_code subcode;
1677
1678 if (TREE_CODE (arg0) != VECTOR_CST)
1679 return NULL_TREE;
1680 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1681
1682 elts = XALLOCAVEC (tree, nelts);
1683 if (!vec_cst_ctor_to_array (arg0, elts))
1684 return NULL_TREE;
1685
1686 switch (code)
1687 {
1688 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1689 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1690 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1691 default: gcc_unreachable ();
1692 }
1693
1694 for (i = 1; i < nelts; i++)
1695 {
1696 elts[0] = const_binop (subcode, elts[0], elts[i]);
1697 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1698 return NULL_TREE;
1699 }
1700
1701 return elts[0];
1702 }
1703
1704 default:
1705 break;
1706 }
1707
1708 return NULL_TREE;
1709 }
1710
1711 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1712 indicates which particular sizetype to create. */
1713
1714 tree
1715 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1716 {
1717 return build_int_cst (sizetype_tab[(int) kind], number);
1718 }
1719 \f
1720 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1721 is a tree code. The type of the result is taken from the operands.
1722 Both must be equivalent integer types, ala int_binop_types_match_p.
1723 If the operands are constant, so is the result. */
1724
1725 tree
1726 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1727 {
1728 tree type = TREE_TYPE (arg0);
1729
1730 if (arg0 == error_mark_node || arg1 == error_mark_node)
1731 return error_mark_node;
1732
1733 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1734 TREE_TYPE (arg1)));
1735
1736 /* Handle the special case of two integer constants faster. */
1737 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1738 {
1739 /* And some specific cases even faster than that. */
1740 if (code == PLUS_EXPR)
1741 {
1742 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1743 return arg1;
1744 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1745 return arg0;
1746 }
1747 else if (code == MINUS_EXPR)
1748 {
1749 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1750 return arg0;
1751 }
1752 else if (code == MULT_EXPR)
1753 {
1754 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1755 return arg1;
1756 }
1757
1758 /* Handle general case of two integer constants. For sizetype
1759 constant calculations we always want to know about overflow,
1760 even in the unsigned case. */
1761 return int_const_binop_1 (code, arg0, arg1, -1);
1762 }
1763
1764 return fold_build2_loc (loc, code, type, arg0, arg1);
1765 }
1766
1767 /* Given two values, either both of sizetype or both of bitsizetype,
1768 compute the difference between the two values. Return the value
1769 in signed type corresponding to the type of the operands. */
1770
1771 tree
1772 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1773 {
1774 tree type = TREE_TYPE (arg0);
1775 tree ctype;
1776
1777 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1778 TREE_TYPE (arg1)));
1779
1780 /* If the type is already signed, just do the simple thing. */
1781 if (!TYPE_UNSIGNED (type))
1782 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1783
1784 if (type == sizetype)
1785 ctype = ssizetype;
1786 else if (type == bitsizetype)
1787 ctype = sbitsizetype;
1788 else
1789 ctype = signed_type_for (type);
1790
1791 /* If either operand is not a constant, do the conversions to the signed
1792 type and subtract. The hardware will do the right thing with any
1793 overflow in the subtraction. */
1794 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1795 return size_binop_loc (loc, MINUS_EXPR,
1796 fold_convert_loc (loc, ctype, arg0),
1797 fold_convert_loc (loc, ctype, arg1));
1798
1799 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1800 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1801 overflow) and negate (which can't either). Special-case a result
1802 of zero while we're here. */
1803 if (tree_int_cst_equal (arg0, arg1))
1804 return build_int_cst (ctype, 0);
1805 else if (tree_int_cst_lt (arg1, arg0))
1806 return fold_convert_loc (loc, ctype,
1807 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1808 else
1809 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1810 fold_convert_loc (loc, ctype,
1811 size_binop_loc (loc,
1812 MINUS_EXPR,
1813 arg1, arg0)));
1814 }
1815 \f
1816 /* A subroutine of fold_convert_const handling conversions of an
1817 INTEGER_CST to another integer type. */
1818
1819 static tree
1820 fold_convert_const_int_from_int (tree type, const_tree arg1)
1821 {
1822 /* Given an integer constant, make new constant with new type,
1823 appropriately sign-extended or truncated. Use widest_int
1824 so that any extension is done according ARG1's type. */
1825 return force_fit_type (type, wi::to_widest (arg1),
1826 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1827 TREE_OVERFLOW (arg1));
1828 }
1829
1830 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1831 to an integer type. */
1832
1833 static tree
1834 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1835 {
1836 bool overflow = false;
1837 tree t;
1838
1839 /* The following code implements the floating point to integer
1840 conversion rules required by the Java Language Specification,
1841 that IEEE NaNs are mapped to zero and values that overflow
1842 the target precision saturate, i.e. values greater than
1843 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1844 are mapped to INT_MIN. These semantics are allowed by the
1845 C and C++ standards that simply state that the behavior of
1846 FP-to-integer conversion is unspecified upon overflow. */
1847
1848 wide_int val;
1849 REAL_VALUE_TYPE r;
1850 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1851
1852 switch (code)
1853 {
1854 case FIX_TRUNC_EXPR:
1855 real_trunc (&r, VOIDmode, &x);
1856 break;
1857
1858 default:
1859 gcc_unreachable ();
1860 }
1861
1862 /* If R is NaN, return zero and show we have an overflow. */
1863 if (REAL_VALUE_ISNAN (r))
1864 {
1865 overflow = true;
1866 val = wi::zero (TYPE_PRECISION (type));
1867 }
1868
1869 /* See if R is less than the lower bound or greater than the
1870 upper bound. */
1871
1872 if (! overflow)
1873 {
1874 tree lt = TYPE_MIN_VALUE (type);
1875 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1876 if (REAL_VALUES_LESS (r, l))
1877 {
1878 overflow = true;
1879 val = lt;
1880 }
1881 }
1882
1883 if (! overflow)
1884 {
1885 tree ut = TYPE_MAX_VALUE (type);
1886 if (ut)
1887 {
1888 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1889 if (REAL_VALUES_LESS (u, r))
1890 {
1891 overflow = true;
1892 val = ut;
1893 }
1894 }
1895 }
1896
1897 if (! overflow)
1898 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1899
1900 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1901 return t;
1902 }
1903
1904 /* A subroutine of fold_convert_const handling conversions of a
1905 FIXED_CST to an integer type. */
1906
1907 static tree
1908 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1909 {
1910 tree t;
1911 double_int temp, temp_trunc;
1912 unsigned int mode;
1913
1914 /* Right shift FIXED_CST to temp by fbit. */
1915 temp = TREE_FIXED_CST (arg1).data;
1916 mode = TREE_FIXED_CST (arg1).mode;
1917 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1918 {
1919 temp = temp.rshift (GET_MODE_FBIT (mode),
1920 HOST_BITS_PER_DOUBLE_INT,
1921 SIGNED_FIXED_POINT_MODE_P (mode));
1922
1923 /* Left shift temp to temp_trunc by fbit. */
1924 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1925 HOST_BITS_PER_DOUBLE_INT,
1926 SIGNED_FIXED_POINT_MODE_P (mode));
1927 }
1928 else
1929 {
1930 temp = double_int_zero;
1931 temp_trunc = double_int_zero;
1932 }
1933
1934 /* If FIXED_CST is negative, we need to round the value toward 0.
1935 By checking if the fractional bits are not zero to add 1 to temp. */
1936 if (SIGNED_FIXED_POINT_MODE_P (mode)
1937 && temp_trunc.is_negative ()
1938 && TREE_FIXED_CST (arg1).data != temp_trunc)
1939 temp += double_int_one;
1940
1941 /* Given a fixed-point constant, make new constant with new type,
1942 appropriately sign-extended or truncated. */
1943 t = force_fit_type (type, temp, -1,
1944 (temp.is_negative ()
1945 && (TYPE_UNSIGNED (type)
1946 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1947 | TREE_OVERFLOW (arg1));
1948
1949 return t;
1950 }
1951
1952 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1953 to another floating point type. */
1954
1955 static tree
1956 fold_convert_const_real_from_real (tree type, const_tree arg1)
1957 {
1958 REAL_VALUE_TYPE value;
1959 tree t;
1960
1961 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1962 t = build_real (type, value);
1963
1964 /* If converting an infinity or NAN to a representation that doesn't
1965 have one, set the overflow bit so that we can produce some kind of
1966 error message at the appropriate point if necessary. It's not the
1967 most user-friendly message, but it's better than nothing. */
1968 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1969 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1970 TREE_OVERFLOW (t) = 1;
1971 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1972 && !MODE_HAS_NANS (TYPE_MODE (type)))
1973 TREE_OVERFLOW (t) = 1;
1974 /* Regular overflow, conversion produced an infinity in a mode that
1975 can't represent them. */
1976 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1977 && REAL_VALUE_ISINF (value)
1978 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1979 TREE_OVERFLOW (t) = 1;
1980 else
1981 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1982 return t;
1983 }
1984
1985 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1986 to a floating point type. */
1987
1988 static tree
1989 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1990 {
1991 REAL_VALUE_TYPE value;
1992 tree t;
1993
1994 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1995 t = build_real (type, value);
1996
1997 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1998 return t;
1999 }
2000
2001 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2002 to another fixed-point type. */
2003
2004 static tree
2005 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2006 {
2007 FIXED_VALUE_TYPE value;
2008 tree t;
2009 bool overflow_p;
2010
2011 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2012 TYPE_SATURATING (type));
2013 t = build_fixed (type, value);
2014
2015 /* Propagate overflow flags. */
2016 if (overflow_p | TREE_OVERFLOW (arg1))
2017 TREE_OVERFLOW (t) = 1;
2018 return t;
2019 }
2020
2021 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2022 to a fixed-point type. */
2023
2024 static tree
2025 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2026 {
2027 FIXED_VALUE_TYPE value;
2028 tree t;
2029 bool overflow_p;
2030 double_int di;
2031
2032 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2033
2034 di.low = TREE_INT_CST_ELT (arg1, 0);
2035 if (TREE_INT_CST_NUNITS (arg1) == 1)
2036 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2037 else
2038 di.high = TREE_INT_CST_ELT (arg1, 1);
2039
2040 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2041 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2042 TYPE_SATURATING (type));
2043 t = build_fixed (type, value);
2044
2045 /* Propagate overflow flags. */
2046 if (overflow_p | TREE_OVERFLOW (arg1))
2047 TREE_OVERFLOW (t) = 1;
2048 return t;
2049 }
2050
2051 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2052 to a fixed-point type. */
2053
2054 static tree
2055 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2056 {
2057 FIXED_VALUE_TYPE value;
2058 tree t;
2059 bool overflow_p;
2060
2061 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2062 &TREE_REAL_CST (arg1),
2063 TYPE_SATURATING (type));
2064 t = build_fixed (type, value);
2065
2066 /* Propagate overflow flags. */
2067 if (overflow_p | TREE_OVERFLOW (arg1))
2068 TREE_OVERFLOW (t) = 1;
2069 return t;
2070 }
2071
2072 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2073 type TYPE. If no simplification can be done return NULL_TREE. */
2074
2075 static tree
2076 fold_convert_const (enum tree_code code, tree type, tree arg1)
2077 {
2078 if (TREE_TYPE (arg1) == type)
2079 return arg1;
2080
2081 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2082 || TREE_CODE (type) == OFFSET_TYPE)
2083 {
2084 if (TREE_CODE (arg1) == INTEGER_CST)
2085 return fold_convert_const_int_from_int (type, arg1);
2086 else if (TREE_CODE (arg1) == REAL_CST)
2087 return fold_convert_const_int_from_real (code, type, arg1);
2088 else if (TREE_CODE (arg1) == FIXED_CST)
2089 return fold_convert_const_int_from_fixed (type, arg1);
2090 }
2091 else if (TREE_CODE (type) == REAL_TYPE)
2092 {
2093 if (TREE_CODE (arg1) == INTEGER_CST)
2094 return build_real_from_int_cst (type, arg1);
2095 else if (TREE_CODE (arg1) == REAL_CST)
2096 return fold_convert_const_real_from_real (type, arg1);
2097 else if (TREE_CODE (arg1) == FIXED_CST)
2098 return fold_convert_const_real_from_fixed (type, arg1);
2099 }
2100 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2101 {
2102 if (TREE_CODE (arg1) == FIXED_CST)
2103 return fold_convert_const_fixed_from_fixed (type, arg1);
2104 else if (TREE_CODE (arg1) == INTEGER_CST)
2105 return fold_convert_const_fixed_from_int (type, arg1);
2106 else if (TREE_CODE (arg1) == REAL_CST)
2107 return fold_convert_const_fixed_from_real (type, arg1);
2108 }
2109 return NULL_TREE;
2110 }
2111
2112 /* Construct a vector of zero elements of vector type TYPE. */
2113
2114 static tree
2115 build_zero_vector (tree type)
2116 {
2117 tree t;
2118
2119 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2120 return build_vector_from_val (type, t);
2121 }
2122
2123 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2124
2125 bool
2126 fold_convertible_p (const_tree type, const_tree arg)
2127 {
2128 tree orig = TREE_TYPE (arg);
2129
2130 if (type == orig)
2131 return true;
2132
2133 if (TREE_CODE (arg) == ERROR_MARK
2134 || TREE_CODE (type) == ERROR_MARK
2135 || TREE_CODE (orig) == ERROR_MARK)
2136 return false;
2137
2138 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2139 return true;
2140
2141 switch (TREE_CODE (type))
2142 {
2143 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2144 case POINTER_TYPE: case REFERENCE_TYPE:
2145 case OFFSET_TYPE:
2146 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2147 || TREE_CODE (orig) == OFFSET_TYPE)
2148 return true;
2149 return (TREE_CODE (orig) == VECTOR_TYPE
2150 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2151
2152 case REAL_TYPE:
2153 case FIXED_POINT_TYPE:
2154 case COMPLEX_TYPE:
2155 case VECTOR_TYPE:
2156 case VOID_TYPE:
2157 return TREE_CODE (type) == TREE_CODE (orig);
2158
2159 default:
2160 return false;
2161 }
2162 }
2163
2164 /* Convert expression ARG to type TYPE. Used by the middle-end for
2165 simple conversions in preference to calling the front-end's convert. */
2166
2167 tree
2168 fold_convert_loc (location_t loc, tree type, tree arg)
2169 {
2170 tree orig = TREE_TYPE (arg);
2171 tree tem;
2172
2173 if (type == orig)
2174 return arg;
2175
2176 if (TREE_CODE (arg) == ERROR_MARK
2177 || TREE_CODE (type) == ERROR_MARK
2178 || TREE_CODE (orig) == ERROR_MARK)
2179 return error_mark_node;
2180
2181 switch (TREE_CODE (type))
2182 {
2183 case POINTER_TYPE:
2184 case REFERENCE_TYPE:
2185 /* Handle conversions between pointers to different address spaces. */
2186 if (POINTER_TYPE_P (orig)
2187 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2188 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2189 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2190 /* fall through */
2191
2192 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2193 case OFFSET_TYPE:
2194 if (TREE_CODE (arg) == INTEGER_CST)
2195 {
2196 tem = fold_convert_const (NOP_EXPR, type, arg);
2197 if (tem != NULL_TREE)
2198 return tem;
2199 }
2200 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2201 || TREE_CODE (orig) == OFFSET_TYPE)
2202 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2203 if (TREE_CODE (orig) == COMPLEX_TYPE)
2204 return fold_convert_loc (loc, type,
2205 fold_build1_loc (loc, REALPART_EXPR,
2206 TREE_TYPE (orig), arg));
2207 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2208 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2209 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2210
2211 case REAL_TYPE:
2212 if (TREE_CODE (arg) == INTEGER_CST)
2213 {
2214 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2215 if (tem != NULL_TREE)
2216 return tem;
2217 }
2218 else if (TREE_CODE (arg) == REAL_CST)
2219 {
2220 tem = fold_convert_const (NOP_EXPR, type, arg);
2221 if (tem != NULL_TREE)
2222 return tem;
2223 }
2224 else if (TREE_CODE (arg) == FIXED_CST)
2225 {
2226 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2227 if (tem != NULL_TREE)
2228 return tem;
2229 }
2230
2231 switch (TREE_CODE (orig))
2232 {
2233 case INTEGER_TYPE:
2234 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2235 case POINTER_TYPE: case REFERENCE_TYPE:
2236 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2237
2238 case REAL_TYPE:
2239 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2240
2241 case FIXED_POINT_TYPE:
2242 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2243
2244 case COMPLEX_TYPE:
2245 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2246 return fold_convert_loc (loc, type, tem);
2247
2248 default:
2249 gcc_unreachable ();
2250 }
2251
2252 case FIXED_POINT_TYPE:
2253 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2254 || TREE_CODE (arg) == REAL_CST)
2255 {
2256 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2257 if (tem != NULL_TREE)
2258 goto fold_convert_exit;
2259 }
2260
2261 switch (TREE_CODE (orig))
2262 {
2263 case FIXED_POINT_TYPE:
2264 case INTEGER_TYPE:
2265 case ENUMERAL_TYPE:
2266 case BOOLEAN_TYPE:
2267 case REAL_TYPE:
2268 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2269
2270 case COMPLEX_TYPE:
2271 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2272 return fold_convert_loc (loc, type, tem);
2273
2274 default:
2275 gcc_unreachable ();
2276 }
2277
2278 case COMPLEX_TYPE:
2279 switch (TREE_CODE (orig))
2280 {
2281 case INTEGER_TYPE:
2282 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2283 case POINTER_TYPE: case REFERENCE_TYPE:
2284 case REAL_TYPE:
2285 case FIXED_POINT_TYPE:
2286 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2287 fold_convert_loc (loc, TREE_TYPE (type), arg),
2288 fold_convert_loc (loc, TREE_TYPE (type),
2289 integer_zero_node));
2290 case COMPLEX_TYPE:
2291 {
2292 tree rpart, ipart;
2293
2294 if (TREE_CODE (arg) == COMPLEX_EXPR)
2295 {
2296 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2297 TREE_OPERAND (arg, 0));
2298 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2299 TREE_OPERAND (arg, 1));
2300 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2301 }
2302
2303 arg = save_expr (arg);
2304 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2305 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2306 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2307 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2308 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2309 }
2310
2311 default:
2312 gcc_unreachable ();
2313 }
2314
2315 case VECTOR_TYPE:
2316 if (integer_zerop (arg))
2317 return build_zero_vector (type);
2318 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2319 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2320 || TREE_CODE (orig) == VECTOR_TYPE);
2321 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2322
2323 case VOID_TYPE:
2324 tem = fold_ignored_result (arg);
2325 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2326
2327 default:
2328 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2329 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2330 gcc_unreachable ();
2331 }
2332 fold_convert_exit:
2333 protected_set_expr_location_unshare (tem, loc);
2334 return tem;
2335 }
2336 \f
2337 /* Return false if expr can be assumed not to be an lvalue, true
2338 otherwise. */
2339
2340 static bool
2341 maybe_lvalue_p (const_tree x)
2342 {
2343 /* We only need to wrap lvalue tree codes. */
2344 switch (TREE_CODE (x))
2345 {
2346 case VAR_DECL:
2347 case PARM_DECL:
2348 case RESULT_DECL:
2349 case LABEL_DECL:
2350 case FUNCTION_DECL:
2351 case SSA_NAME:
2352
2353 case COMPONENT_REF:
2354 case MEM_REF:
2355 case INDIRECT_REF:
2356 case ARRAY_REF:
2357 case ARRAY_RANGE_REF:
2358 case BIT_FIELD_REF:
2359 case OBJ_TYPE_REF:
2360
2361 case REALPART_EXPR:
2362 case IMAGPART_EXPR:
2363 case PREINCREMENT_EXPR:
2364 case PREDECREMENT_EXPR:
2365 case SAVE_EXPR:
2366 case TRY_CATCH_EXPR:
2367 case WITH_CLEANUP_EXPR:
2368 case COMPOUND_EXPR:
2369 case MODIFY_EXPR:
2370 case TARGET_EXPR:
2371 case COND_EXPR:
2372 case BIND_EXPR:
2373 break;
2374
2375 default:
2376 /* Assume the worst for front-end tree codes. */
2377 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2378 break;
2379 return false;
2380 }
2381
2382 return true;
2383 }
2384
2385 /* Return an expr equal to X but certainly not valid as an lvalue. */
2386
2387 tree
2388 non_lvalue_loc (location_t loc, tree x)
2389 {
2390 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2391 us. */
2392 if (in_gimple_form)
2393 return x;
2394
2395 if (! maybe_lvalue_p (x))
2396 return x;
2397 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2398 }
2399
2400 /* When pedantic, return an expr equal to X but certainly not valid as a
2401 pedantic lvalue. Otherwise, return X. */
2402
2403 static tree
2404 pedantic_non_lvalue_loc (location_t loc, tree x)
2405 {
2406 return protected_set_expr_location_unshare (x, loc);
2407 }
2408 \f
2409 /* Given a tree comparison code, return the code that is the logical inverse.
2410 It is generally not safe to do this for floating-point comparisons, except
2411 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2412 ERROR_MARK in this case. */
2413
2414 enum tree_code
2415 invert_tree_comparison (enum tree_code code, bool honor_nans)
2416 {
2417 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2418 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2419 return ERROR_MARK;
2420
2421 switch (code)
2422 {
2423 case EQ_EXPR:
2424 return NE_EXPR;
2425 case NE_EXPR:
2426 return EQ_EXPR;
2427 case GT_EXPR:
2428 return honor_nans ? UNLE_EXPR : LE_EXPR;
2429 case GE_EXPR:
2430 return honor_nans ? UNLT_EXPR : LT_EXPR;
2431 case LT_EXPR:
2432 return honor_nans ? UNGE_EXPR : GE_EXPR;
2433 case LE_EXPR:
2434 return honor_nans ? UNGT_EXPR : GT_EXPR;
2435 case LTGT_EXPR:
2436 return UNEQ_EXPR;
2437 case UNEQ_EXPR:
2438 return LTGT_EXPR;
2439 case UNGT_EXPR:
2440 return LE_EXPR;
2441 case UNGE_EXPR:
2442 return LT_EXPR;
2443 case UNLT_EXPR:
2444 return GE_EXPR;
2445 case UNLE_EXPR:
2446 return GT_EXPR;
2447 case ORDERED_EXPR:
2448 return UNORDERED_EXPR;
2449 case UNORDERED_EXPR:
2450 return ORDERED_EXPR;
2451 default:
2452 gcc_unreachable ();
2453 }
2454 }
2455
2456 /* Similar, but return the comparison that results if the operands are
2457 swapped. This is safe for floating-point. */
2458
2459 enum tree_code
2460 swap_tree_comparison (enum tree_code code)
2461 {
2462 switch (code)
2463 {
2464 case EQ_EXPR:
2465 case NE_EXPR:
2466 case ORDERED_EXPR:
2467 case UNORDERED_EXPR:
2468 case LTGT_EXPR:
2469 case UNEQ_EXPR:
2470 return code;
2471 case GT_EXPR:
2472 return LT_EXPR;
2473 case GE_EXPR:
2474 return LE_EXPR;
2475 case LT_EXPR:
2476 return GT_EXPR;
2477 case LE_EXPR:
2478 return GE_EXPR;
2479 case UNGT_EXPR:
2480 return UNLT_EXPR;
2481 case UNGE_EXPR:
2482 return UNLE_EXPR;
2483 case UNLT_EXPR:
2484 return UNGT_EXPR;
2485 case UNLE_EXPR:
2486 return UNGE_EXPR;
2487 default:
2488 gcc_unreachable ();
2489 }
2490 }
2491
2492
2493 /* Convert a comparison tree code from an enum tree_code representation
2494 into a compcode bit-based encoding. This function is the inverse of
2495 compcode_to_comparison. */
2496
2497 static enum comparison_code
2498 comparison_to_compcode (enum tree_code code)
2499 {
2500 switch (code)
2501 {
2502 case LT_EXPR:
2503 return COMPCODE_LT;
2504 case EQ_EXPR:
2505 return COMPCODE_EQ;
2506 case LE_EXPR:
2507 return COMPCODE_LE;
2508 case GT_EXPR:
2509 return COMPCODE_GT;
2510 case NE_EXPR:
2511 return COMPCODE_NE;
2512 case GE_EXPR:
2513 return COMPCODE_GE;
2514 case ORDERED_EXPR:
2515 return COMPCODE_ORD;
2516 case UNORDERED_EXPR:
2517 return COMPCODE_UNORD;
2518 case UNLT_EXPR:
2519 return COMPCODE_UNLT;
2520 case UNEQ_EXPR:
2521 return COMPCODE_UNEQ;
2522 case UNLE_EXPR:
2523 return COMPCODE_UNLE;
2524 case UNGT_EXPR:
2525 return COMPCODE_UNGT;
2526 case LTGT_EXPR:
2527 return COMPCODE_LTGT;
2528 case UNGE_EXPR:
2529 return COMPCODE_UNGE;
2530 default:
2531 gcc_unreachable ();
2532 }
2533 }
2534
2535 /* Convert a compcode bit-based encoding of a comparison operator back
2536 to GCC's enum tree_code representation. This function is the
2537 inverse of comparison_to_compcode. */
2538
2539 static enum tree_code
2540 compcode_to_comparison (enum comparison_code code)
2541 {
2542 switch (code)
2543 {
2544 case COMPCODE_LT:
2545 return LT_EXPR;
2546 case COMPCODE_EQ:
2547 return EQ_EXPR;
2548 case COMPCODE_LE:
2549 return LE_EXPR;
2550 case COMPCODE_GT:
2551 return GT_EXPR;
2552 case COMPCODE_NE:
2553 return NE_EXPR;
2554 case COMPCODE_GE:
2555 return GE_EXPR;
2556 case COMPCODE_ORD:
2557 return ORDERED_EXPR;
2558 case COMPCODE_UNORD:
2559 return UNORDERED_EXPR;
2560 case COMPCODE_UNLT:
2561 return UNLT_EXPR;
2562 case COMPCODE_UNEQ:
2563 return UNEQ_EXPR;
2564 case COMPCODE_UNLE:
2565 return UNLE_EXPR;
2566 case COMPCODE_UNGT:
2567 return UNGT_EXPR;
2568 case COMPCODE_LTGT:
2569 return LTGT_EXPR;
2570 case COMPCODE_UNGE:
2571 return UNGE_EXPR;
2572 default:
2573 gcc_unreachable ();
2574 }
2575 }
2576
2577 /* Return a tree for the comparison which is the combination of
2578 doing the AND or OR (depending on CODE) of the two operations LCODE
2579 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2580 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2581 if this makes the transformation invalid. */
2582
2583 tree
2584 combine_comparisons (location_t loc,
2585 enum tree_code code, enum tree_code lcode,
2586 enum tree_code rcode, tree truth_type,
2587 tree ll_arg, tree lr_arg)
2588 {
2589 bool honor_nans = HONOR_NANS (ll_arg);
2590 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2591 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2592 int compcode;
2593
2594 switch (code)
2595 {
2596 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2597 compcode = lcompcode & rcompcode;
2598 break;
2599
2600 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2601 compcode = lcompcode | rcompcode;
2602 break;
2603
2604 default:
2605 return NULL_TREE;
2606 }
2607
2608 if (!honor_nans)
2609 {
2610 /* Eliminate unordered comparisons, as well as LTGT and ORD
2611 which are not used unless the mode has NaNs. */
2612 compcode &= ~COMPCODE_UNORD;
2613 if (compcode == COMPCODE_LTGT)
2614 compcode = COMPCODE_NE;
2615 else if (compcode == COMPCODE_ORD)
2616 compcode = COMPCODE_TRUE;
2617 }
2618 else if (flag_trapping_math)
2619 {
2620 /* Check that the original operation and the optimized ones will trap
2621 under the same condition. */
2622 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2623 && (lcompcode != COMPCODE_EQ)
2624 && (lcompcode != COMPCODE_ORD);
2625 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2626 && (rcompcode != COMPCODE_EQ)
2627 && (rcompcode != COMPCODE_ORD);
2628 bool trap = (compcode & COMPCODE_UNORD) == 0
2629 && (compcode != COMPCODE_EQ)
2630 && (compcode != COMPCODE_ORD);
2631
2632 /* In a short-circuited boolean expression the LHS might be
2633 such that the RHS, if evaluated, will never trap. For
2634 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2635 if neither x nor y is NaN. (This is a mixed blessing: for
2636 example, the expression above will never trap, hence
2637 optimizing it to x < y would be invalid). */
2638 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2639 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2640 rtrap = false;
2641
2642 /* If the comparison was short-circuited, and only the RHS
2643 trapped, we may now generate a spurious trap. */
2644 if (rtrap && !ltrap
2645 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2646 return NULL_TREE;
2647
2648 /* If we changed the conditions that cause a trap, we lose. */
2649 if ((ltrap || rtrap) != trap)
2650 return NULL_TREE;
2651 }
2652
2653 if (compcode == COMPCODE_TRUE)
2654 return constant_boolean_node (true, truth_type);
2655 else if (compcode == COMPCODE_FALSE)
2656 return constant_boolean_node (false, truth_type);
2657 else
2658 {
2659 enum tree_code tcode;
2660
2661 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2662 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2663 }
2664 }
2665 \f
2666 /* Return nonzero if two operands (typically of the same tree node)
2667 are necessarily equal. If either argument has side-effects this
2668 function returns zero. FLAGS modifies behavior as follows:
2669
2670 If OEP_ONLY_CONST is set, only return nonzero for constants.
2671 This function tests whether the operands are indistinguishable;
2672 it does not test whether they are equal using C's == operation.
2673 The distinction is important for IEEE floating point, because
2674 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2675 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2676
2677 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2678 even though it may hold multiple values during a function.
2679 This is because a GCC tree node guarantees that nothing else is
2680 executed between the evaluation of its "operands" (which may often
2681 be evaluated in arbitrary order). Hence if the operands themselves
2682 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2683 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2684 unset means assuming isochronic (or instantaneous) tree equivalence.
2685 Unless comparing arbitrary expression trees, such as from different
2686 statements, this flag can usually be left unset.
2687
2688 If OEP_PURE_SAME is set, then pure functions with identical arguments
2689 are considered the same. It is used when the caller has other ways
2690 to ensure that global memory is unchanged in between. */
2691
2692 int
2693 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2694 {
2695 /* If either is ERROR_MARK, they aren't equal. */
2696 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2697 || TREE_TYPE (arg0) == error_mark_node
2698 || TREE_TYPE (arg1) == error_mark_node)
2699 return 0;
2700
2701 /* Similar, if either does not have a type (like a released SSA name),
2702 they aren't equal. */
2703 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2704 return 0;
2705
2706 /* Check equality of integer constants before bailing out due to
2707 precision differences. */
2708 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2709 return tree_int_cst_equal (arg0, arg1);
2710
2711 /* If both types don't have the same signedness, then we can't consider
2712 them equal. We must check this before the STRIP_NOPS calls
2713 because they may change the signedness of the arguments. As pointers
2714 strictly don't have a signedness, require either two pointers or
2715 two non-pointers as well. */
2716 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2717 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2718 return 0;
2719
2720 /* We cannot consider pointers to different address space equal. */
2721 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2722 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2723 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2724 return 0;
2725
2726 /* If both types don't have the same precision, then it is not safe
2727 to strip NOPs. */
2728 if (element_precision (TREE_TYPE (arg0))
2729 != element_precision (TREE_TYPE (arg1)))
2730 return 0;
2731
2732 STRIP_NOPS (arg0);
2733 STRIP_NOPS (arg1);
2734
2735 /* In case both args are comparisons but with different comparison
2736 code, try to swap the comparison operands of one arg to produce
2737 a match and compare that variant. */
2738 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2739 && COMPARISON_CLASS_P (arg0)
2740 && COMPARISON_CLASS_P (arg1))
2741 {
2742 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2743
2744 if (TREE_CODE (arg0) == swap_code)
2745 return operand_equal_p (TREE_OPERAND (arg0, 0),
2746 TREE_OPERAND (arg1, 1), flags)
2747 && operand_equal_p (TREE_OPERAND (arg0, 1),
2748 TREE_OPERAND (arg1, 0), flags);
2749 }
2750
2751 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2752 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2753 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2754 return 0;
2755
2756 /* This is needed for conversions and for COMPONENT_REF.
2757 Might as well play it safe and always test this. */
2758 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2759 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2760 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2761 return 0;
2762
2763 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2764 We don't care about side effects in that case because the SAVE_EXPR
2765 takes care of that for us. In all other cases, two expressions are
2766 equal if they have no side effects. If we have two identical
2767 expressions with side effects that should be treated the same due
2768 to the only side effects being identical SAVE_EXPR's, that will
2769 be detected in the recursive calls below.
2770 If we are taking an invariant address of two identical objects
2771 they are necessarily equal as well. */
2772 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2773 && (TREE_CODE (arg0) == SAVE_EXPR
2774 || (flags & OEP_CONSTANT_ADDRESS_OF)
2775 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2776 return 1;
2777
2778 /* Next handle constant cases, those for which we can return 1 even
2779 if ONLY_CONST is set. */
2780 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2781 switch (TREE_CODE (arg0))
2782 {
2783 case INTEGER_CST:
2784 return tree_int_cst_equal (arg0, arg1);
2785
2786 case FIXED_CST:
2787 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2788 TREE_FIXED_CST (arg1));
2789
2790 case REAL_CST:
2791 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2792 TREE_REAL_CST (arg1)))
2793 return 1;
2794
2795
2796 if (!HONOR_SIGNED_ZEROS (arg0))
2797 {
2798 /* If we do not distinguish between signed and unsigned zero,
2799 consider them equal. */
2800 if (real_zerop (arg0) && real_zerop (arg1))
2801 return 1;
2802 }
2803 return 0;
2804
2805 case VECTOR_CST:
2806 {
2807 unsigned i;
2808
2809 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2810 return 0;
2811
2812 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2813 {
2814 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2815 VECTOR_CST_ELT (arg1, i), flags))
2816 return 0;
2817 }
2818 return 1;
2819 }
2820
2821 case COMPLEX_CST:
2822 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2823 flags)
2824 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2825 flags));
2826
2827 case STRING_CST:
2828 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2829 && ! memcmp (TREE_STRING_POINTER (arg0),
2830 TREE_STRING_POINTER (arg1),
2831 TREE_STRING_LENGTH (arg0)));
2832
2833 case ADDR_EXPR:
2834 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2835 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2836 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2837 default:
2838 break;
2839 }
2840
2841 if (flags & OEP_ONLY_CONST)
2842 return 0;
2843
2844 /* Define macros to test an operand from arg0 and arg1 for equality and a
2845 variant that allows null and views null as being different from any
2846 non-null value. In the latter case, if either is null, the both
2847 must be; otherwise, do the normal comparison. */
2848 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2849 TREE_OPERAND (arg1, N), flags)
2850
2851 #define OP_SAME_WITH_NULL(N) \
2852 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2853 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2854
2855 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2856 {
2857 case tcc_unary:
2858 /* Two conversions are equal only if signedness and modes match. */
2859 switch (TREE_CODE (arg0))
2860 {
2861 CASE_CONVERT:
2862 case FIX_TRUNC_EXPR:
2863 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2864 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2865 return 0;
2866 break;
2867 default:
2868 break;
2869 }
2870
2871 return OP_SAME (0);
2872
2873
2874 case tcc_comparison:
2875 case tcc_binary:
2876 if (OP_SAME (0) && OP_SAME (1))
2877 return 1;
2878
2879 /* For commutative ops, allow the other order. */
2880 return (commutative_tree_code (TREE_CODE (arg0))
2881 && operand_equal_p (TREE_OPERAND (arg0, 0),
2882 TREE_OPERAND (arg1, 1), flags)
2883 && operand_equal_p (TREE_OPERAND (arg0, 1),
2884 TREE_OPERAND (arg1, 0), flags));
2885
2886 case tcc_reference:
2887 /* If either of the pointer (or reference) expressions we are
2888 dereferencing contain a side effect, these cannot be equal,
2889 but their addresses can be. */
2890 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2891 && (TREE_SIDE_EFFECTS (arg0)
2892 || TREE_SIDE_EFFECTS (arg1)))
2893 return 0;
2894
2895 switch (TREE_CODE (arg0))
2896 {
2897 case INDIRECT_REF:
2898 if (!(flags & OEP_ADDRESS_OF)
2899 && (TYPE_ALIGN (TREE_TYPE (arg0))
2900 != TYPE_ALIGN (TREE_TYPE (arg1))))
2901 return 0;
2902 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2903 return OP_SAME (0);
2904
2905 case REALPART_EXPR:
2906 case IMAGPART_EXPR:
2907 return OP_SAME (0);
2908
2909 case TARGET_MEM_REF:
2910 case MEM_REF:
2911 /* Require equal access sizes, and similar pointer types.
2912 We can have incomplete types for array references of
2913 variable-sized arrays from the Fortran frontend
2914 though. Also verify the types are compatible. */
2915 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2916 || (TYPE_SIZE (TREE_TYPE (arg0))
2917 && TYPE_SIZE (TREE_TYPE (arg1))
2918 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2919 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2920 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2921 && ((flags & OEP_ADDRESS_OF)
2922 || (alias_ptr_types_compatible_p
2923 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2924 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2925 && (MR_DEPENDENCE_CLIQUE (arg0)
2926 == MR_DEPENDENCE_CLIQUE (arg1))
2927 && (MR_DEPENDENCE_BASE (arg0)
2928 == MR_DEPENDENCE_BASE (arg1))
2929 && (TYPE_ALIGN (TREE_TYPE (arg0))
2930 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2931 return 0;
2932 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2933 return (OP_SAME (0) && OP_SAME (1)
2934 /* TARGET_MEM_REF require equal extra operands. */
2935 && (TREE_CODE (arg0) != TARGET_MEM_REF
2936 || (OP_SAME_WITH_NULL (2)
2937 && OP_SAME_WITH_NULL (3)
2938 && OP_SAME_WITH_NULL (4))));
2939
2940 case ARRAY_REF:
2941 case ARRAY_RANGE_REF:
2942 /* Operands 2 and 3 may be null.
2943 Compare the array index by value if it is constant first as we
2944 may have different types but same value here. */
2945 if (!OP_SAME (0))
2946 return 0;
2947 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2948 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2949 TREE_OPERAND (arg1, 1))
2950 || OP_SAME (1))
2951 && OP_SAME_WITH_NULL (2)
2952 && OP_SAME_WITH_NULL (3));
2953
2954 case COMPONENT_REF:
2955 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2956 may be NULL when we're called to compare MEM_EXPRs. */
2957 if (!OP_SAME_WITH_NULL (0)
2958 || !OP_SAME (1))
2959 return 0;
2960 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2961 return OP_SAME_WITH_NULL (2);
2962
2963 case BIT_FIELD_REF:
2964 if (!OP_SAME (0))
2965 return 0;
2966 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2967 return OP_SAME (1) && OP_SAME (2);
2968
2969 default:
2970 return 0;
2971 }
2972
2973 case tcc_expression:
2974 switch (TREE_CODE (arg0))
2975 {
2976 case ADDR_EXPR:
2977 return operand_equal_p (TREE_OPERAND (arg0, 0),
2978 TREE_OPERAND (arg1, 0),
2979 flags | OEP_ADDRESS_OF);
2980
2981 case TRUTH_NOT_EXPR:
2982 return OP_SAME (0);
2983
2984 case TRUTH_ANDIF_EXPR:
2985 case TRUTH_ORIF_EXPR:
2986 return OP_SAME (0) && OP_SAME (1);
2987
2988 case FMA_EXPR:
2989 case WIDEN_MULT_PLUS_EXPR:
2990 case WIDEN_MULT_MINUS_EXPR:
2991 if (!OP_SAME (2))
2992 return 0;
2993 /* The multiplcation operands are commutative. */
2994 /* FALLTHRU */
2995
2996 case TRUTH_AND_EXPR:
2997 case TRUTH_OR_EXPR:
2998 case TRUTH_XOR_EXPR:
2999 if (OP_SAME (0) && OP_SAME (1))
3000 return 1;
3001
3002 /* Otherwise take into account this is a commutative operation. */
3003 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3004 TREE_OPERAND (arg1, 1), flags)
3005 && operand_equal_p (TREE_OPERAND (arg0, 1),
3006 TREE_OPERAND (arg1, 0), flags));
3007
3008 case COND_EXPR:
3009 case VEC_COND_EXPR:
3010 case DOT_PROD_EXPR:
3011 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3012
3013 default:
3014 return 0;
3015 }
3016
3017 case tcc_vl_exp:
3018 switch (TREE_CODE (arg0))
3019 {
3020 case CALL_EXPR:
3021 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3022 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3023 /* If not both CALL_EXPRs are either internal or normal function
3024 functions, then they are not equal. */
3025 return 0;
3026 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3027 {
3028 /* If the CALL_EXPRs call different internal functions, then they
3029 are not equal. */
3030 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3031 return 0;
3032 }
3033 else
3034 {
3035 /* If the CALL_EXPRs call different functions, then they are not
3036 equal. */
3037 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3038 flags))
3039 return 0;
3040 }
3041
3042 {
3043 unsigned int cef = call_expr_flags (arg0);
3044 if (flags & OEP_PURE_SAME)
3045 cef &= ECF_CONST | ECF_PURE;
3046 else
3047 cef &= ECF_CONST;
3048 if (!cef)
3049 return 0;
3050 }
3051
3052 /* Now see if all the arguments are the same. */
3053 {
3054 const_call_expr_arg_iterator iter0, iter1;
3055 const_tree a0, a1;
3056 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3057 a1 = first_const_call_expr_arg (arg1, &iter1);
3058 a0 && a1;
3059 a0 = next_const_call_expr_arg (&iter0),
3060 a1 = next_const_call_expr_arg (&iter1))
3061 if (! operand_equal_p (a0, a1, flags))
3062 return 0;
3063
3064 /* If we get here and both argument lists are exhausted
3065 then the CALL_EXPRs are equal. */
3066 return ! (a0 || a1);
3067 }
3068 default:
3069 return 0;
3070 }
3071
3072 case tcc_declaration:
3073 /* Consider __builtin_sqrt equal to sqrt. */
3074 return (TREE_CODE (arg0) == FUNCTION_DECL
3075 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3076 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3077 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3078
3079 default:
3080 return 0;
3081 }
3082
3083 #undef OP_SAME
3084 #undef OP_SAME_WITH_NULL
3085 }
3086 \f
3087 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3088 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3089
3090 When in doubt, return 0. */
3091
3092 static int
3093 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3094 {
3095 int unsignedp1, unsignedpo;
3096 tree primarg0, primarg1, primother;
3097 unsigned int correct_width;
3098
3099 if (operand_equal_p (arg0, arg1, 0))
3100 return 1;
3101
3102 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3103 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3104 return 0;
3105
3106 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3107 and see if the inner values are the same. This removes any
3108 signedness comparison, which doesn't matter here. */
3109 primarg0 = arg0, primarg1 = arg1;
3110 STRIP_NOPS (primarg0);
3111 STRIP_NOPS (primarg1);
3112 if (operand_equal_p (primarg0, primarg1, 0))
3113 return 1;
3114
3115 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3116 actual comparison operand, ARG0.
3117
3118 First throw away any conversions to wider types
3119 already present in the operands. */
3120
3121 primarg1 = get_narrower (arg1, &unsignedp1);
3122 primother = get_narrower (other, &unsignedpo);
3123
3124 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3125 if (unsignedp1 == unsignedpo
3126 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3127 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3128 {
3129 tree type = TREE_TYPE (arg0);
3130
3131 /* Make sure shorter operand is extended the right way
3132 to match the longer operand. */
3133 primarg1 = fold_convert (signed_or_unsigned_type_for
3134 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3135
3136 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3137 return 1;
3138 }
3139
3140 return 0;
3141 }
3142 \f
3143 /* See if ARG is an expression that is either a comparison or is performing
3144 arithmetic on comparisons. The comparisons must only be comparing
3145 two different values, which will be stored in *CVAL1 and *CVAL2; if
3146 they are nonzero it means that some operands have already been found.
3147 No variables may be used anywhere else in the expression except in the
3148 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3149 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3150
3151 If this is true, return 1. Otherwise, return zero. */
3152
3153 static int
3154 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3155 {
3156 enum tree_code code = TREE_CODE (arg);
3157 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3158
3159 /* We can handle some of the tcc_expression cases here. */
3160 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3161 tclass = tcc_unary;
3162 else if (tclass == tcc_expression
3163 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3164 || code == COMPOUND_EXPR))
3165 tclass = tcc_binary;
3166
3167 else if (tclass == tcc_expression && code == SAVE_EXPR
3168 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3169 {
3170 /* If we've already found a CVAL1 or CVAL2, this expression is
3171 two complex to handle. */
3172 if (*cval1 || *cval2)
3173 return 0;
3174
3175 tclass = tcc_unary;
3176 *save_p = 1;
3177 }
3178
3179 switch (tclass)
3180 {
3181 case tcc_unary:
3182 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3183
3184 case tcc_binary:
3185 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3186 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3187 cval1, cval2, save_p));
3188
3189 case tcc_constant:
3190 return 1;
3191
3192 case tcc_expression:
3193 if (code == COND_EXPR)
3194 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3195 cval1, cval2, save_p)
3196 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3197 cval1, cval2, save_p)
3198 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3199 cval1, cval2, save_p));
3200 return 0;
3201
3202 case tcc_comparison:
3203 /* First see if we can handle the first operand, then the second. For
3204 the second operand, we know *CVAL1 can't be zero. It must be that
3205 one side of the comparison is each of the values; test for the
3206 case where this isn't true by failing if the two operands
3207 are the same. */
3208
3209 if (operand_equal_p (TREE_OPERAND (arg, 0),
3210 TREE_OPERAND (arg, 1), 0))
3211 return 0;
3212
3213 if (*cval1 == 0)
3214 *cval1 = TREE_OPERAND (arg, 0);
3215 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3216 ;
3217 else if (*cval2 == 0)
3218 *cval2 = TREE_OPERAND (arg, 0);
3219 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3220 ;
3221 else
3222 return 0;
3223
3224 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3225 ;
3226 else if (*cval2 == 0)
3227 *cval2 = TREE_OPERAND (arg, 1);
3228 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3229 ;
3230 else
3231 return 0;
3232
3233 return 1;
3234
3235 default:
3236 return 0;
3237 }
3238 }
3239 \f
3240 /* ARG is a tree that is known to contain just arithmetic operations and
3241 comparisons. Evaluate the operations in the tree substituting NEW0 for
3242 any occurrence of OLD0 as an operand of a comparison and likewise for
3243 NEW1 and OLD1. */
3244
3245 static tree
3246 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3247 tree old1, tree new1)
3248 {
3249 tree type = TREE_TYPE (arg);
3250 enum tree_code code = TREE_CODE (arg);
3251 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3252
3253 /* We can handle some of the tcc_expression cases here. */
3254 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3255 tclass = tcc_unary;
3256 else if (tclass == tcc_expression
3257 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3258 tclass = tcc_binary;
3259
3260 switch (tclass)
3261 {
3262 case tcc_unary:
3263 return fold_build1_loc (loc, code, type,
3264 eval_subst (loc, TREE_OPERAND (arg, 0),
3265 old0, new0, old1, new1));
3266
3267 case tcc_binary:
3268 return fold_build2_loc (loc, code, type,
3269 eval_subst (loc, TREE_OPERAND (arg, 0),
3270 old0, new0, old1, new1),
3271 eval_subst (loc, TREE_OPERAND (arg, 1),
3272 old0, new0, old1, new1));
3273
3274 case tcc_expression:
3275 switch (code)
3276 {
3277 case SAVE_EXPR:
3278 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3279 old1, new1);
3280
3281 case COMPOUND_EXPR:
3282 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3283 old1, new1);
3284
3285 case COND_EXPR:
3286 return fold_build3_loc (loc, code, type,
3287 eval_subst (loc, TREE_OPERAND (arg, 0),
3288 old0, new0, old1, new1),
3289 eval_subst (loc, TREE_OPERAND (arg, 1),
3290 old0, new0, old1, new1),
3291 eval_subst (loc, TREE_OPERAND (arg, 2),
3292 old0, new0, old1, new1));
3293 default:
3294 break;
3295 }
3296 /* Fall through - ??? */
3297
3298 case tcc_comparison:
3299 {
3300 tree arg0 = TREE_OPERAND (arg, 0);
3301 tree arg1 = TREE_OPERAND (arg, 1);
3302
3303 /* We need to check both for exact equality and tree equality. The
3304 former will be true if the operand has a side-effect. In that
3305 case, we know the operand occurred exactly once. */
3306
3307 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3308 arg0 = new0;
3309 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3310 arg0 = new1;
3311
3312 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3313 arg1 = new0;
3314 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3315 arg1 = new1;
3316
3317 return fold_build2_loc (loc, code, type, arg0, arg1);
3318 }
3319
3320 default:
3321 return arg;
3322 }
3323 }
3324 \f
3325 /* Return a tree for the case when the result of an expression is RESULT
3326 converted to TYPE and OMITTED was previously an operand of the expression
3327 but is now not needed (e.g., we folded OMITTED * 0).
3328
3329 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3330 the conversion of RESULT to TYPE. */
3331
3332 tree
3333 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3334 {
3335 tree t = fold_convert_loc (loc, type, result);
3336
3337 /* If the resulting operand is an empty statement, just return the omitted
3338 statement casted to void. */
3339 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3340 return build1_loc (loc, NOP_EXPR, void_type_node,
3341 fold_ignored_result (omitted));
3342
3343 if (TREE_SIDE_EFFECTS (omitted))
3344 return build2_loc (loc, COMPOUND_EXPR, type,
3345 fold_ignored_result (omitted), t);
3346
3347 return non_lvalue_loc (loc, t);
3348 }
3349
3350 /* Return a tree for the case when the result of an expression is RESULT
3351 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3352 of the expression but are now not needed.
3353
3354 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3355 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3356 evaluated before OMITTED2. Otherwise, if neither has side effects,
3357 just do the conversion of RESULT to TYPE. */
3358
3359 tree
3360 omit_two_operands_loc (location_t loc, tree type, tree result,
3361 tree omitted1, tree omitted2)
3362 {
3363 tree t = fold_convert_loc (loc, type, result);
3364
3365 if (TREE_SIDE_EFFECTS (omitted2))
3366 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3367 if (TREE_SIDE_EFFECTS (omitted1))
3368 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3369
3370 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3371 }
3372
3373 \f
3374 /* Return a simplified tree node for the truth-negation of ARG. This
3375 never alters ARG itself. We assume that ARG is an operation that
3376 returns a truth value (0 or 1).
3377
3378 FIXME: one would think we would fold the result, but it causes
3379 problems with the dominator optimizer. */
3380
3381 static tree
3382 fold_truth_not_expr (location_t loc, tree arg)
3383 {
3384 tree type = TREE_TYPE (arg);
3385 enum tree_code code = TREE_CODE (arg);
3386 location_t loc1, loc2;
3387
3388 /* If this is a comparison, we can simply invert it, except for
3389 floating-point non-equality comparisons, in which case we just
3390 enclose a TRUTH_NOT_EXPR around what we have. */
3391
3392 if (TREE_CODE_CLASS (code) == tcc_comparison)
3393 {
3394 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3395 if (FLOAT_TYPE_P (op_type)
3396 && flag_trapping_math
3397 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3398 && code != NE_EXPR && code != EQ_EXPR)
3399 return NULL_TREE;
3400
3401 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3402 if (code == ERROR_MARK)
3403 return NULL_TREE;
3404
3405 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3406 TREE_OPERAND (arg, 1));
3407 }
3408
3409 switch (code)
3410 {
3411 case INTEGER_CST:
3412 return constant_boolean_node (integer_zerop (arg), type);
3413
3414 case TRUTH_AND_EXPR:
3415 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3416 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3417 return build2_loc (loc, TRUTH_OR_EXPR, type,
3418 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3419 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3420
3421 case TRUTH_OR_EXPR:
3422 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3423 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3424 return build2_loc (loc, TRUTH_AND_EXPR, type,
3425 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3426 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3427
3428 case TRUTH_XOR_EXPR:
3429 /* Here we can invert either operand. We invert the first operand
3430 unless the second operand is a TRUTH_NOT_EXPR in which case our
3431 result is the XOR of the first operand with the inside of the
3432 negation of the second operand. */
3433
3434 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3435 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3436 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3437 else
3438 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3439 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3440 TREE_OPERAND (arg, 1));
3441
3442 case TRUTH_ANDIF_EXPR:
3443 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3444 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3445 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3446 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3447 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3448
3449 case TRUTH_ORIF_EXPR:
3450 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3451 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3452 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3453 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3454 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3455
3456 case TRUTH_NOT_EXPR:
3457 return TREE_OPERAND (arg, 0);
3458
3459 case COND_EXPR:
3460 {
3461 tree arg1 = TREE_OPERAND (arg, 1);
3462 tree arg2 = TREE_OPERAND (arg, 2);
3463
3464 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3465 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3466
3467 /* A COND_EXPR may have a throw as one operand, which
3468 then has void type. Just leave void operands
3469 as they are. */
3470 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3471 VOID_TYPE_P (TREE_TYPE (arg1))
3472 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3473 VOID_TYPE_P (TREE_TYPE (arg2))
3474 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3475 }
3476
3477 case COMPOUND_EXPR:
3478 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3479 return build2_loc (loc, COMPOUND_EXPR, type,
3480 TREE_OPERAND (arg, 0),
3481 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3482
3483 case NON_LVALUE_EXPR:
3484 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3485 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3486
3487 CASE_CONVERT:
3488 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3489 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3490
3491 /* ... fall through ... */
3492
3493 case FLOAT_EXPR:
3494 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3495 return build1_loc (loc, TREE_CODE (arg), type,
3496 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3497
3498 case BIT_AND_EXPR:
3499 if (!integer_onep (TREE_OPERAND (arg, 1)))
3500 return NULL_TREE;
3501 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3502
3503 case SAVE_EXPR:
3504 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3505
3506 case CLEANUP_POINT_EXPR:
3507 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3508 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3509 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3510
3511 default:
3512 return NULL_TREE;
3513 }
3514 }
3515
3516 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3517 assume that ARG is an operation that returns a truth value (0 or 1
3518 for scalars, 0 or -1 for vectors). Return the folded expression if
3519 folding is successful. Otherwise, return NULL_TREE. */
3520
3521 static tree
3522 fold_invert_truthvalue (location_t loc, tree arg)
3523 {
3524 tree type = TREE_TYPE (arg);
3525 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3526 ? BIT_NOT_EXPR
3527 : TRUTH_NOT_EXPR,
3528 type, arg);
3529 }
3530
3531 /* Return a simplified tree node for the truth-negation of ARG. This
3532 never alters ARG itself. We assume that ARG is an operation that
3533 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3534
3535 tree
3536 invert_truthvalue_loc (location_t loc, tree arg)
3537 {
3538 if (TREE_CODE (arg) == ERROR_MARK)
3539 return arg;
3540
3541 tree type = TREE_TYPE (arg);
3542 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3543 ? BIT_NOT_EXPR
3544 : TRUTH_NOT_EXPR,
3545 type, arg);
3546 }
3547
3548 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3549 operands are another bit-wise operation with a common input. If so,
3550 distribute the bit operations to save an operation and possibly two if
3551 constants are involved. For example, convert
3552 (A | B) & (A | C) into A | (B & C)
3553 Further simplification will occur if B and C are constants.
3554
3555 If this optimization cannot be done, 0 will be returned. */
3556
3557 static tree
3558 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3559 tree arg0, tree arg1)
3560 {
3561 tree common;
3562 tree left, right;
3563
3564 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3565 || TREE_CODE (arg0) == code
3566 || (TREE_CODE (arg0) != BIT_AND_EXPR
3567 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3568 return 0;
3569
3570 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3571 {
3572 common = TREE_OPERAND (arg0, 0);
3573 left = TREE_OPERAND (arg0, 1);
3574 right = TREE_OPERAND (arg1, 1);
3575 }
3576 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3577 {
3578 common = TREE_OPERAND (arg0, 0);
3579 left = TREE_OPERAND (arg0, 1);
3580 right = TREE_OPERAND (arg1, 0);
3581 }
3582 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3583 {
3584 common = TREE_OPERAND (arg0, 1);
3585 left = TREE_OPERAND (arg0, 0);
3586 right = TREE_OPERAND (arg1, 1);
3587 }
3588 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3589 {
3590 common = TREE_OPERAND (arg0, 1);
3591 left = TREE_OPERAND (arg0, 0);
3592 right = TREE_OPERAND (arg1, 0);
3593 }
3594 else
3595 return 0;
3596
3597 common = fold_convert_loc (loc, type, common);
3598 left = fold_convert_loc (loc, type, left);
3599 right = fold_convert_loc (loc, type, right);
3600 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3601 fold_build2_loc (loc, code, type, left, right));
3602 }
3603
3604 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3605 with code CODE. This optimization is unsafe. */
3606 static tree
3607 distribute_real_division (location_t loc, enum tree_code code, tree type,
3608 tree arg0, tree arg1)
3609 {
3610 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3611 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3612
3613 /* (A / C) +- (B / C) -> (A +- B) / C. */
3614 if (mul0 == mul1
3615 && operand_equal_p (TREE_OPERAND (arg0, 1),
3616 TREE_OPERAND (arg1, 1), 0))
3617 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3618 fold_build2_loc (loc, code, type,
3619 TREE_OPERAND (arg0, 0),
3620 TREE_OPERAND (arg1, 0)),
3621 TREE_OPERAND (arg0, 1));
3622
3623 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3624 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3625 TREE_OPERAND (arg1, 0), 0)
3626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3627 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3628 {
3629 REAL_VALUE_TYPE r0, r1;
3630 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3631 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3632 if (!mul0)
3633 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3634 if (!mul1)
3635 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3636 real_arithmetic (&r0, code, &r0, &r1);
3637 return fold_build2_loc (loc, MULT_EXPR, type,
3638 TREE_OPERAND (arg0, 0),
3639 build_real (type, r0));
3640 }
3641
3642 return NULL_TREE;
3643 }
3644 \f
3645 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3646 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3647
3648 static tree
3649 make_bit_field_ref (location_t loc, tree inner, tree type,
3650 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3651 {
3652 tree result, bftype;
3653
3654 if (bitpos == 0)
3655 {
3656 tree size = TYPE_SIZE (TREE_TYPE (inner));
3657 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3658 || POINTER_TYPE_P (TREE_TYPE (inner)))
3659 && tree_fits_shwi_p (size)
3660 && tree_to_shwi (size) == bitsize)
3661 return fold_convert_loc (loc, type, inner);
3662 }
3663
3664 bftype = type;
3665 if (TYPE_PRECISION (bftype) != bitsize
3666 || TYPE_UNSIGNED (bftype) == !unsignedp)
3667 bftype = build_nonstandard_integer_type (bitsize, 0);
3668
3669 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3670 size_int (bitsize), bitsize_int (bitpos));
3671
3672 if (bftype != type)
3673 result = fold_convert_loc (loc, type, result);
3674
3675 return result;
3676 }
3677
3678 /* Optimize a bit-field compare.
3679
3680 There are two cases: First is a compare against a constant and the
3681 second is a comparison of two items where the fields are at the same
3682 bit position relative to the start of a chunk (byte, halfword, word)
3683 large enough to contain it. In these cases we can avoid the shift
3684 implicit in bitfield extractions.
3685
3686 For constants, we emit a compare of the shifted constant with the
3687 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3688 compared. For two fields at the same position, we do the ANDs with the
3689 similar mask and compare the result of the ANDs.
3690
3691 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3692 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3693 are the left and right operands of the comparison, respectively.
3694
3695 If the optimization described above can be done, we return the resulting
3696 tree. Otherwise we return zero. */
3697
3698 static tree
3699 optimize_bit_field_compare (location_t loc, enum tree_code code,
3700 tree compare_type, tree lhs, tree rhs)
3701 {
3702 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3703 tree type = TREE_TYPE (lhs);
3704 tree unsigned_type;
3705 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3706 machine_mode lmode, rmode, nmode;
3707 int lunsignedp, runsignedp;
3708 int lvolatilep = 0, rvolatilep = 0;
3709 tree linner, rinner = NULL_TREE;
3710 tree mask;
3711 tree offset;
3712
3713 /* Get all the information about the extractions being done. If the bit size
3714 if the same as the size of the underlying object, we aren't doing an
3715 extraction at all and so can do nothing. We also don't want to
3716 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3717 then will no longer be able to replace it. */
3718 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3719 &lunsignedp, &lvolatilep, false);
3720 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3721 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3722 return 0;
3723
3724 if (!const_p)
3725 {
3726 /* If this is not a constant, we can only do something if bit positions,
3727 sizes, and signedness are the same. */
3728 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3729 &runsignedp, &rvolatilep, false);
3730
3731 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3732 || lunsignedp != runsignedp || offset != 0
3733 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3734 return 0;
3735 }
3736
3737 /* See if we can find a mode to refer to this field. We should be able to,
3738 but fail if we can't. */
3739 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3740 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3741 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3742 TYPE_ALIGN (TREE_TYPE (rinner))),
3743 word_mode, false);
3744 if (nmode == VOIDmode)
3745 return 0;
3746
3747 /* Set signed and unsigned types of the precision of this mode for the
3748 shifts below. */
3749 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3750
3751 /* Compute the bit position and size for the new reference and our offset
3752 within it. If the new reference is the same size as the original, we
3753 won't optimize anything, so return zero. */
3754 nbitsize = GET_MODE_BITSIZE (nmode);
3755 nbitpos = lbitpos & ~ (nbitsize - 1);
3756 lbitpos -= nbitpos;
3757 if (nbitsize == lbitsize)
3758 return 0;
3759
3760 if (BYTES_BIG_ENDIAN)
3761 lbitpos = nbitsize - lbitsize - lbitpos;
3762
3763 /* Make the mask to be used against the extracted field. */
3764 mask = build_int_cst_type (unsigned_type, -1);
3765 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3766 mask = const_binop (RSHIFT_EXPR, mask,
3767 size_int (nbitsize - lbitsize - lbitpos));
3768
3769 if (! const_p)
3770 /* If not comparing with constant, just rework the comparison
3771 and return. */
3772 return fold_build2_loc (loc, code, compare_type,
3773 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3774 make_bit_field_ref (loc, linner,
3775 unsigned_type,
3776 nbitsize, nbitpos,
3777 1),
3778 mask),
3779 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3780 make_bit_field_ref (loc, rinner,
3781 unsigned_type,
3782 nbitsize, nbitpos,
3783 1),
3784 mask));
3785
3786 /* Otherwise, we are handling the constant case. See if the constant is too
3787 big for the field. Warn and return a tree of for 0 (false) if so. We do
3788 this not only for its own sake, but to avoid having to test for this
3789 error case below. If we didn't, we might generate wrong code.
3790
3791 For unsigned fields, the constant shifted right by the field length should
3792 be all zero. For signed fields, the high-order bits should agree with
3793 the sign bit. */
3794
3795 if (lunsignedp)
3796 {
3797 if (wi::lrshift (rhs, lbitsize) != 0)
3798 {
3799 warning (0, "comparison is always %d due to width of bit-field",
3800 code == NE_EXPR);
3801 return constant_boolean_node (code == NE_EXPR, compare_type);
3802 }
3803 }
3804 else
3805 {
3806 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3807 if (tem != 0 && tem != -1)
3808 {
3809 warning (0, "comparison is always %d due to width of bit-field",
3810 code == NE_EXPR);
3811 return constant_boolean_node (code == NE_EXPR, compare_type);
3812 }
3813 }
3814
3815 /* Single-bit compares should always be against zero. */
3816 if (lbitsize == 1 && ! integer_zerop (rhs))
3817 {
3818 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3819 rhs = build_int_cst (type, 0);
3820 }
3821
3822 /* Make a new bitfield reference, shift the constant over the
3823 appropriate number of bits and mask it with the computed mask
3824 (in case this was a signed field). If we changed it, make a new one. */
3825 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3826
3827 rhs = const_binop (BIT_AND_EXPR,
3828 const_binop (LSHIFT_EXPR,
3829 fold_convert_loc (loc, unsigned_type, rhs),
3830 size_int (lbitpos)),
3831 mask);
3832
3833 lhs = build2_loc (loc, code, compare_type,
3834 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3835 return lhs;
3836 }
3837 \f
3838 /* Subroutine for fold_truth_andor_1: decode a field reference.
3839
3840 If EXP is a comparison reference, we return the innermost reference.
3841
3842 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3843 set to the starting bit number.
3844
3845 If the innermost field can be completely contained in a mode-sized
3846 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3847
3848 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3849 otherwise it is not changed.
3850
3851 *PUNSIGNEDP is set to the signedness of the field.
3852
3853 *PMASK is set to the mask used. This is either contained in a
3854 BIT_AND_EXPR or derived from the width of the field.
3855
3856 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3857
3858 Return 0 if this is not a component reference or is one that we can't
3859 do anything with. */
3860
3861 static tree
3862 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3863 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3864 int *punsignedp, int *pvolatilep,
3865 tree *pmask, tree *pand_mask)
3866 {
3867 tree outer_type = 0;
3868 tree and_mask = 0;
3869 tree mask, inner, offset;
3870 tree unsigned_type;
3871 unsigned int precision;
3872
3873 /* All the optimizations using this function assume integer fields.
3874 There are problems with FP fields since the type_for_size call
3875 below can fail for, e.g., XFmode. */
3876 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3877 return 0;
3878
3879 /* We are interested in the bare arrangement of bits, so strip everything
3880 that doesn't affect the machine mode. However, record the type of the
3881 outermost expression if it may matter below. */
3882 if (CONVERT_EXPR_P (exp)
3883 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3884 outer_type = TREE_TYPE (exp);
3885 STRIP_NOPS (exp);
3886
3887 if (TREE_CODE (exp) == BIT_AND_EXPR)
3888 {
3889 and_mask = TREE_OPERAND (exp, 1);
3890 exp = TREE_OPERAND (exp, 0);
3891 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3892 if (TREE_CODE (and_mask) != INTEGER_CST)
3893 return 0;
3894 }
3895
3896 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3897 punsignedp, pvolatilep, false);
3898 if ((inner == exp && and_mask == 0)
3899 || *pbitsize < 0 || offset != 0
3900 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3901 return 0;
3902
3903 /* If the number of bits in the reference is the same as the bitsize of
3904 the outer type, then the outer type gives the signedness. Otherwise
3905 (in case of a small bitfield) the signedness is unchanged. */
3906 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3907 *punsignedp = TYPE_UNSIGNED (outer_type);
3908
3909 /* Compute the mask to access the bitfield. */
3910 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3911 precision = TYPE_PRECISION (unsigned_type);
3912
3913 mask = build_int_cst_type (unsigned_type, -1);
3914
3915 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3916 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3917
3918 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3919 if (and_mask != 0)
3920 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3921 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3922
3923 *pmask = mask;
3924 *pand_mask = and_mask;
3925 return inner;
3926 }
3927
3928 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3929 bit positions and MASK is SIGNED. */
3930
3931 static int
3932 all_ones_mask_p (const_tree mask, unsigned int size)
3933 {
3934 tree type = TREE_TYPE (mask);
3935 unsigned int precision = TYPE_PRECISION (type);
3936
3937 /* If this function returns true when the type of the mask is
3938 UNSIGNED, then there will be errors. In particular see
3939 gcc.c-torture/execute/990326-1.c. There does not appear to be
3940 any documentation paper trail as to why this is so. But the pre
3941 wide-int worked with that restriction and it has been preserved
3942 here. */
3943 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3944 return false;
3945
3946 return wi::mask (size, false, precision) == mask;
3947 }
3948
3949 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3950 represents the sign bit of EXP's type. If EXP represents a sign
3951 or zero extension, also test VAL against the unextended type.
3952 The return value is the (sub)expression whose sign bit is VAL,
3953 or NULL_TREE otherwise. */
3954
3955 tree
3956 sign_bit_p (tree exp, const_tree val)
3957 {
3958 int width;
3959 tree t;
3960
3961 /* Tree EXP must have an integral type. */
3962 t = TREE_TYPE (exp);
3963 if (! INTEGRAL_TYPE_P (t))
3964 return NULL_TREE;
3965
3966 /* Tree VAL must be an integer constant. */
3967 if (TREE_CODE (val) != INTEGER_CST
3968 || TREE_OVERFLOW (val))
3969 return NULL_TREE;
3970
3971 width = TYPE_PRECISION (t);
3972 if (wi::only_sign_bit_p (val, width))
3973 return exp;
3974
3975 /* Handle extension from a narrower type. */
3976 if (TREE_CODE (exp) == NOP_EXPR
3977 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3978 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3979
3980 return NULL_TREE;
3981 }
3982
3983 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3984 to be evaluated unconditionally. */
3985
3986 static int
3987 simple_operand_p (const_tree exp)
3988 {
3989 /* Strip any conversions that don't change the machine mode. */
3990 STRIP_NOPS (exp);
3991
3992 return (CONSTANT_CLASS_P (exp)
3993 || TREE_CODE (exp) == SSA_NAME
3994 || (DECL_P (exp)
3995 && ! TREE_ADDRESSABLE (exp)
3996 && ! TREE_THIS_VOLATILE (exp)
3997 && ! DECL_NONLOCAL (exp)
3998 /* Don't regard global variables as simple. They may be
3999 allocated in ways unknown to the compiler (shared memory,
4000 #pragma weak, etc). */
4001 && ! TREE_PUBLIC (exp)
4002 && ! DECL_EXTERNAL (exp)
4003 /* Weakrefs are not safe to be read, since they can be NULL.
4004 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4005 have DECL_WEAK flag set. */
4006 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4007 /* Loading a static variable is unduly expensive, but global
4008 registers aren't expensive. */
4009 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4010 }
4011
4012 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4013 to be evaluated unconditionally.
4014 I addition to simple_operand_p, we assume that comparisons, conversions,
4015 and logic-not operations are simple, if their operands are simple, too. */
4016
4017 static bool
4018 simple_operand_p_2 (tree exp)
4019 {
4020 enum tree_code code;
4021
4022 if (TREE_SIDE_EFFECTS (exp)
4023 || tree_could_trap_p (exp))
4024 return false;
4025
4026 while (CONVERT_EXPR_P (exp))
4027 exp = TREE_OPERAND (exp, 0);
4028
4029 code = TREE_CODE (exp);
4030
4031 if (TREE_CODE_CLASS (code) == tcc_comparison)
4032 return (simple_operand_p (TREE_OPERAND (exp, 0))
4033 && simple_operand_p (TREE_OPERAND (exp, 1)));
4034
4035 if (code == TRUTH_NOT_EXPR)
4036 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4037
4038 return simple_operand_p (exp);
4039 }
4040
4041 \f
4042 /* The following functions are subroutines to fold_range_test and allow it to
4043 try to change a logical combination of comparisons into a range test.
4044
4045 For example, both
4046 X == 2 || X == 3 || X == 4 || X == 5
4047 and
4048 X >= 2 && X <= 5
4049 are converted to
4050 (unsigned) (X - 2) <= 3
4051
4052 We describe each set of comparisons as being either inside or outside
4053 a range, using a variable named like IN_P, and then describe the
4054 range with a lower and upper bound. If one of the bounds is omitted,
4055 it represents either the highest or lowest value of the type.
4056
4057 In the comments below, we represent a range by two numbers in brackets
4058 preceded by a "+" to designate being inside that range, or a "-" to
4059 designate being outside that range, so the condition can be inverted by
4060 flipping the prefix. An omitted bound is represented by a "-". For
4061 example, "- [-, 10]" means being outside the range starting at the lowest
4062 possible value and ending at 10, in other words, being greater than 10.
4063 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4064 always false.
4065
4066 We set up things so that the missing bounds are handled in a consistent
4067 manner so neither a missing bound nor "true" and "false" need to be
4068 handled using a special case. */
4069
4070 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4071 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4072 and UPPER1_P are nonzero if the respective argument is an upper bound
4073 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4074 must be specified for a comparison. ARG1 will be converted to ARG0's
4075 type if both are specified. */
4076
4077 static tree
4078 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4079 tree arg1, int upper1_p)
4080 {
4081 tree tem;
4082 int result;
4083 int sgn0, sgn1;
4084
4085 /* If neither arg represents infinity, do the normal operation.
4086 Else, if not a comparison, return infinity. Else handle the special
4087 comparison rules. Note that most of the cases below won't occur, but
4088 are handled for consistency. */
4089
4090 if (arg0 != 0 && arg1 != 0)
4091 {
4092 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4093 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4094 STRIP_NOPS (tem);
4095 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4096 }
4097
4098 if (TREE_CODE_CLASS (code) != tcc_comparison)
4099 return 0;
4100
4101 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4102 for neither. In real maths, we cannot assume open ended ranges are
4103 the same. But, this is computer arithmetic, where numbers are finite.
4104 We can therefore make the transformation of any unbounded range with
4105 the value Z, Z being greater than any representable number. This permits
4106 us to treat unbounded ranges as equal. */
4107 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4108 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4109 switch (code)
4110 {
4111 case EQ_EXPR:
4112 result = sgn0 == sgn1;
4113 break;
4114 case NE_EXPR:
4115 result = sgn0 != sgn1;
4116 break;
4117 case LT_EXPR:
4118 result = sgn0 < sgn1;
4119 break;
4120 case LE_EXPR:
4121 result = sgn0 <= sgn1;
4122 break;
4123 case GT_EXPR:
4124 result = sgn0 > sgn1;
4125 break;
4126 case GE_EXPR:
4127 result = sgn0 >= sgn1;
4128 break;
4129 default:
4130 gcc_unreachable ();
4131 }
4132
4133 return constant_boolean_node (result, type);
4134 }
4135 \f
4136 /* Helper routine for make_range. Perform one step for it, return
4137 new expression if the loop should continue or NULL_TREE if it should
4138 stop. */
4139
4140 tree
4141 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4142 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4143 bool *strict_overflow_p)
4144 {
4145 tree arg0_type = TREE_TYPE (arg0);
4146 tree n_low, n_high, low = *p_low, high = *p_high;
4147 int in_p = *p_in_p, n_in_p;
4148
4149 switch (code)
4150 {
4151 case TRUTH_NOT_EXPR:
4152 /* We can only do something if the range is testing for zero. */
4153 if (low == NULL_TREE || high == NULL_TREE
4154 || ! integer_zerop (low) || ! integer_zerop (high))
4155 return NULL_TREE;
4156 *p_in_p = ! in_p;
4157 return arg0;
4158
4159 case EQ_EXPR: case NE_EXPR:
4160 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4161 /* We can only do something if the range is testing for zero
4162 and if the second operand is an integer constant. Note that
4163 saying something is "in" the range we make is done by
4164 complementing IN_P since it will set in the initial case of
4165 being not equal to zero; "out" is leaving it alone. */
4166 if (low == NULL_TREE || high == NULL_TREE
4167 || ! integer_zerop (low) || ! integer_zerop (high)
4168 || TREE_CODE (arg1) != INTEGER_CST)
4169 return NULL_TREE;
4170
4171 switch (code)
4172 {
4173 case NE_EXPR: /* - [c, c] */
4174 low = high = arg1;
4175 break;
4176 case EQ_EXPR: /* + [c, c] */
4177 in_p = ! in_p, low = high = arg1;
4178 break;
4179 case GT_EXPR: /* - [-, c] */
4180 low = 0, high = arg1;
4181 break;
4182 case GE_EXPR: /* + [c, -] */
4183 in_p = ! in_p, low = arg1, high = 0;
4184 break;
4185 case LT_EXPR: /* - [c, -] */
4186 low = arg1, high = 0;
4187 break;
4188 case LE_EXPR: /* + [-, c] */
4189 in_p = ! in_p, low = 0, high = arg1;
4190 break;
4191 default:
4192 gcc_unreachable ();
4193 }
4194
4195 /* If this is an unsigned comparison, we also know that EXP is
4196 greater than or equal to zero. We base the range tests we make
4197 on that fact, so we record it here so we can parse existing
4198 range tests. We test arg0_type since often the return type
4199 of, e.g. EQ_EXPR, is boolean. */
4200 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4201 {
4202 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4203 in_p, low, high, 1,
4204 build_int_cst (arg0_type, 0),
4205 NULL_TREE))
4206 return NULL_TREE;
4207
4208 in_p = n_in_p, low = n_low, high = n_high;
4209
4210 /* If the high bound is missing, but we have a nonzero low
4211 bound, reverse the range so it goes from zero to the low bound
4212 minus 1. */
4213 if (high == 0 && low && ! integer_zerop (low))
4214 {
4215 in_p = ! in_p;
4216 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4217 build_int_cst (TREE_TYPE (low), 1), 0);
4218 low = build_int_cst (arg0_type, 0);
4219 }
4220 }
4221
4222 *p_low = low;
4223 *p_high = high;
4224 *p_in_p = in_p;
4225 return arg0;
4226
4227 case NEGATE_EXPR:
4228 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4229 low and high are non-NULL, then normalize will DTRT. */
4230 if (!TYPE_UNSIGNED (arg0_type)
4231 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4232 {
4233 if (low == NULL_TREE)
4234 low = TYPE_MIN_VALUE (arg0_type);
4235 if (high == NULL_TREE)
4236 high = TYPE_MAX_VALUE (arg0_type);
4237 }
4238
4239 /* (-x) IN [a,b] -> x in [-b, -a] */
4240 n_low = range_binop (MINUS_EXPR, exp_type,
4241 build_int_cst (exp_type, 0),
4242 0, high, 1);
4243 n_high = range_binop (MINUS_EXPR, exp_type,
4244 build_int_cst (exp_type, 0),
4245 0, low, 0);
4246 if (n_high != 0 && TREE_OVERFLOW (n_high))
4247 return NULL_TREE;
4248 goto normalize;
4249
4250 case BIT_NOT_EXPR:
4251 /* ~ X -> -X - 1 */
4252 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4253 build_int_cst (exp_type, 1));
4254
4255 case PLUS_EXPR:
4256 case MINUS_EXPR:
4257 if (TREE_CODE (arg1) != INTEGER_CST)
4258 return NULL_TREE;
4259
4260 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4261 move a constant to the other side. */
4262 if (!TYPE_UNSIGNED (arg0_type)
4263 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4264 return NULL_TREE;
4265
4266 /* If EXP is signed, any overflow in the computation is undefined,
4267 so we don't worry about it so long as our computations on
4268 the bounds don't overflow. For unsigned, overflow is defined
4269 and this is exactly the right thing. */
4270 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4271 arg0_type, low, 0, arg1, 0);
4272 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4273 arg0_type, high, 1, arg1, 0);
4274 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4275 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4276 return NULL_TREE;
4277
4278 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4279 *strict_overflow_p = true;
4280
4281 normalize:
4282 /* Check for an unsigned range which has wrapped around the maximum
4283 value thus making n_high < n_low, and normalize it. */
4284 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4285 {
4286 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4287 build_int_cst (TREE_TYPE (n_high), 1), 0);
4288 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4289 build_int_cst (TREE_TYPE (n_low), 1), 0);
4290
4291 /* If the range is of the form +/- [ x+1, x ], we won't
4292 be able to normalize it. But then, it represents the
4293 whole range or the empty set, so make it
4294 +/- [ -, - ]. */
4295 if (tree_int_cst_equal (n_low, low)
4296 && tree_int_cst_equal (n_high, high))
4297 low = high = 0;
4298 else
4299 in_p = ! in_p;
4300 }
4301 else
4302 low = n_low, high = n_high;
4303
4304 *p_low = low;
4305 *p_high = high;
4306 *p_in_p = in_p;
4307 return arg0;
4308
4309 CASE_CONVERT:
4310 case NON_LVALUE_EXPR:
4311 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4312 return NULL_TREE;
4313
4314 if (! INTEGRAL_TYPE_P (arg0_type)
4315 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4316 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4317 return NULL_TREE;
4318
4319 n_low = low, n_high = high;
4320
4321 if (n_low != 0)
4322 n_low = fold_convert_loc (loc, arg0_type, n_low);
4323
4324 if (n_high != 0)
4325 n_high = fold_convert_loc (loc, arg0_type, n_high);
4326
4327 /* If we're converting arg0 from an unsigned type, to exp,
4328 a signed type, we will be doing the comparison as unsigned.
4329 The tests above have already verified that LOW and HIGH
4330 are both positive.
4331
4332 So we have to ensure that we will handle large unsigned
4333 values the same way that the current signed bounds treat
4334 negative values. */
4335
4336 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4337 {
4338 tree high_positive;
4339 tree equiv_type;
4340 /* For fixed-point modes, we need to pass the saturating flag
4341 as the 2nd parameter. */
4342 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4343 equiv_type
4344 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4345 TYPE_SATURATING (arg0_type));
4346 else
4347 equiv_type
4348 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4349
4350 /* A range without an upper bound is, naturally, unbounded.
4351 Since convert would have cropped a very large value, use
4352 the max value for the destination type. */
4353 high_positive
4354 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4355 : TYPE_MAX_VALUE (arg0_type);
4356
4357 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4358 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4359 fold_convert_loc (loc, arg0_type,
4360 high_positive),
4361 build_int_cst (arg0_type, 1));
4362
4363 /* If the low bound is specified, "and" the range with the
4364 range for which the original unsigned value will be
4365 positive. */
4366 if (low != 0)
4367 {
4368 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4369 1, fold_convert_loc (loc, arg0_type,
4370 integer_zero_node),
4371 high_positive))
4372 return NULL_TREE;
4373
4374 in_p = (n_in_p == in_p);
4375 }
4376 else
4377 {
4378 /* Otherwise, "or" the range with the range of the input
4379 that will be interpreted as negative. */
4380 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4381 1, fold_convert_loc (loc, arg0_type,
4382 integer_zero_node),
4383 high_positive))
4384 return NULL_TREE;
4385
4386 in_p = (in_p != n_in_p);
4387 }
4388 }
4389
4390 *p_low = n_low;
4391 *p_high = n_high;
4392 *p_in_p = in_p;
4393 return arg0;
4394
4395 default:
4396 return NULL_TREE;
4397 }
4398 }
4399
4400 /* Given EXP, a logical expression, set the range it is testing into
4401 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4402 actually being tested. *PLOW and *PHIGH will be made of the same
4403 type as the returned expression. If EXP is not a comparison, we
4404 will most likely not be returning a useful value and range. Set
4405 *STRICT_OVERFLOW_P to true if the return value is only valid
4406 because signed overflow is undefined; otherwise, do not change
4407 *STRICT_OVERFLOW_P. */
4408
4409 tree
4410 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4411 bool *strict_overflow_p)
4412 {
4413 enum tree_code code;
4414 tree arg0, arg1 = NULL_TREE;
4415 tree exp_type, nexp;
4416 int in_p;
4417 tree low, high;
4418 location_t loc = EXPR_LOCATION (exp);
4419
4420 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4421 and see if we can refine the range. Some of the cases below may not
4422 happen, but it doesn't seem worth worrying about this. We "continue"
4423 the outer loop when we've changed something; otherwise we "break"
4424 the switch, which will "break" the while. */
4425
4426 in_p = 0;
4427 low = high = build_int_cst (TREE_TYPE (exp), 0);
4428
4429 while (1)
4430 {
4431 code = TREE_CODE (exp);
4432 exp_type = TREE_TYPE (exp);
4433 arg0 = NULL_TREE;
4434
4435 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4436 {
4437 if (TREE_OPERAND_LENGTH (exp) > 0)
4438 arg0 = TREE_OPERAND (exp, 0);
4439 if (TREE_CODE_CLASS (code) == tcc_binary
4440 || TREE_CODE_CLASS (code) == tcc_comparison
4441 || (TREE_CODE_CLASS (code) == tcc_expression
4442 && TREE_OPERAND_LENGTH (exp) > 1))
4443 arg1 = TREE_OPERAND (exp, 1);
4444 }
4445 if (arg0 == NULL_TREE)
4446 break;
4447
4448 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4449 &high, &in_p, strict_overflow_p);
4450 if (nexp == NULL_TREE)
4451 break;
4452 exp = nexp;
4453 }
4454
4455 /* If EXP is a constant, we can evaluate whether this is true or false. */
4456 if (TREE_CODE (exp) == INTEGER_CST)
4457 {
4458 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4459 exp, 0, low, 0))
4460 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4461 exp, 1, high, 1)));
4462 low = high = 0;
4463 exp = 0;
4464 }
4465
4466 *pin_p = in_p, *plow = low, *phigh = high;
4467 return exp;
4468 }
4469 \f
4470 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4471 type, TYPE, return an expression to test if EXP is in (or out of, depending
4472 on IN_P) the range. Return 0 if the test couldn't be created. */
4473
4474 tree
4475 build_range_check (location_t loc, tree type, tree exp, int in_p,
4476 tree low, tree high)
4477 {
4478 tree etype = TREE_TYPE (exp), value;
4479
4480 /* Disable this optimization for function pointer expressions
4481 on targets that require function pointer canonicalization. */
4482 if (targetm.have_canonicalize_funcptr_for_compare ()
4483 && TREE_CODE (etype) == POINTER_TYPE
4484 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4485 return NULL_TREE;
4486
4487 if (! in_p)
4488 {
4489 value = build_range_check (loc, type, exp, 1, low, high);
4490 if (value != 0)
4491 return invert_truthvalue_loc (loc, value);
4492
4493 return 0;
4494 }
4495
4496 if (low == 0 && high == 0)
4497 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4498
4499 if (low == 0)
4500 return fold_build2_loc (loc, LE_EXPR, type, exp,
4501 fold_convert_loc (loc, etype, high));
4502
4503 if (high == 0)
4504 return fold_build2_loc (loc, GE_EXPR, type, exp,
4505 fold_convert_loc (loc, etype, low));
4506
4507 if (operand_equal_p (low, high, 0))
4508 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4509 fold_convert_loc (loc, etype, low));
4510
4511 if (integer_zerop (low))
4512 {
4513 if (! TYPE_UNSIGNED (etype))
4514 {
4515 etype = unsigned_type_for (etype);
4516 high = fold_convert_loc (loc, etype, high);
4517 exp = fold_convert_loc (loc, etype, exp);
4518 }
4519 return build_range_check (loc, type, exp, 1, 0, high);
4520 }
4521
4522 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4523 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4524 {
4525 int prec = TYPE_PRECISION (etype);
4526
4527 if (wi::mask (prec - 1, false, prec) == high)
4528 {
4529 if (TYPE_UNSIGNED (etype))
4530 {
4531 tree signed_etype = signed_type_for (etype);
4532 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4533 etype
4534 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4535 else
4536 etype = signed_etype;
4537 exp = fold_convert_loc (loc, etype, exp);
4538 }
4539 return fold_build2_loc (loc, GT_EXPR, type, exp,
4540 build_int_cst (etype, 0));
4541 }
4542 }
4543
4544 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4545 This requires wrap-around arithmetics for the type of the expression.
4546 First make sure that arithmetics in this type is valid, then make sure
4547 that it wraps around. */
4548 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4549 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4550 TYPE_UNSIGNED (etype));
4551
4552 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4553 {
4554 tree utype, minv, maxv;
4555
4556 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4557 for the type in question, as we rely on this here. */
4558 utype = unsigned_type_for (etype);
4559 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4560 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4561 build_int_cst (TREE_TYPE (maxv), 1), 1);
4562 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4563
4564 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4565 minv, 1, maxv, 1)))
4566 etype = utype;
4567 else
4568 return 0;
4569 }
4570
4571 high = fold_convert_loc (loc, etype, high);
4572 low = fold_convert_loc (loc, etype, low);
4573 exp = fold_convert_loc (loc, etype, exp);
4574
4575 value = const_binop (MINUS_EXPR, high, low);
4576
4577
4578 if (POINTER_TYPE_P (etype))
4579 {
4580 if (value != 0 && !TREE_OVERFLOW (value))
4581 {
4582 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4583 return build_range_check (loc, type,
4584 fold_build_pointer_plus_loc (loc, exp, low),
4585 1, build_int_cst (etype, 0), value);
4586 }
4587 return 0;
4588 }
4589
4590 if (value != 0 && !TREE_OVERFLOW (value))
4591 return build_range_check (loc, type,
4592 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4593 1, build_int_cst (etype, 0), value);
4594
4595 return 0;
4596 }
4597 \f
4598 /* Return the predecessor of VAL in its type, handling the infinite case. */
4599
4600 static tree
4601 range_predecessor (tree val)
4602 {
4603 tree type = TREE_TYPE (val);
4604
4605 if (INTEGRAL_TYPE_P (type)
4606 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4607 return 0;
4608 else
4609 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4610 build_int_cst (TREE_TYPE (val), 1), 0);
4611 }
4612
4613 /* Return the successor of VAL in its type, handling the infinite case. */
4614
4615 static tree
4616 range_successor (tree val)
4617 {
4618 tree type = TREE_TYPE (val);
4619
4620 if (INTEGRAL_TYPE_P (type)
4621 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4622 return 0;
4623 else
4624 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4625 build_int_cst (TREE_TYPE (val), 1), 0);
4626 }
4627
4628 /* Given two ranges, see if we can merge them into one. Return 1 if we
4629 can, 0 if we can't. Set the output range into the specified parameters. */
4630
4631 bool
4632 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4633 tree high0, int in1_p, tree low1, tree high1)
4634 {
4635 int no_overlap;
4636 int subset;
4637 int temp;
4638 tree tem;
4639 int in_p;
4640 tree low, high;
4641 int lowequal = ((low0 == 0 && low1 == 0)
4642 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4643 low0, 0, low1, 0)));
4644 int highequal = ((high0 == 0 && high1 == 0)
4645 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4646 high0, 1, high1, 1)));
4647
4648 /* Make range 0 be the range that starts first, or ends last if they
4649 start at the same value. Swap them if it isn't. */
4650 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4651 low0, 0, low1, 0))
4652 || (lowequal
4653 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4654 high1, 1, high0, 1))))
4655 {
4656 temp = in0_p, in0_p = in1_p, in1_p = temp;
4657 tem = low0, low0 = low1, low1 = tem;
4658 tem = high0, high0 = high1, high1 = tem;
4659 }
4660
4661 /* Now flag two cases, whether the ranges are disjoint or whether the
4662 second range is totally subsumed in the first. Note that the tests
4663 below are simplified by the ones above. */
4664 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4665 high0, 1, low1, 0));
4666 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4667 high1, 1, high0, 1));
4668
4669 /* We now have four cases, depending on whether we are including or
4670 excluding the two ranges. */
4671 if (in0_p && in1_p)
4672 {
4673 /* If they don't overlap, the result is false. If the second range
4674 is a subset it is the result. Otherwise, the range is from the start
4675 of the second to the end of the first. */
4676 if (no_overlap)
4677 in_p = 0, low = high = 0;
4678 else if (subset)
4679 in_p = 1, low = low1, high = high1;
4680 else
4681 in_p = 1, low = low1, high = high0;
4682 }
4683
4684 else if (in0_p && ! in1_p)
4685 {
4686 /* If they don't overlap, the result is the first range. If they are
4687 equal, the result is false. If the second range is a subset of the
4688 first, and the ranges begin at the same place, we go from just after
4689 the end of the second range to the end of the first. If the second
4690 range is not a subset of the first, or if it is a subset and both
4691 ranges end at the same place, the range starts at the start of the
4692 first range and ends just before the second range.
4693 Otherwise, we can't describe this as a single range. */
4694 if (no_overlap)
4695 in_p = 1, low = low0, high = high0;
4696 else if (lowequal && highequal)
4697 in_p = 0, low = high = 0;
4698 else if (subset && lowequal)
4699 {
4700 low = range_successor (high1);
4701 high = high0;
4702 in_p = 1;
4703 if (low == 0)
4704 {
4705 /* We are in the weird situation where high0 > high1 but
4706 high1 has no successor. Punt. */
4707 return 0;
4708 }
4709 }
4710 else if (! subset || highequal)
4711 {
4712 low = low0;
4713 high = range_predecessor (low1);
4714 in_p = 1;
4715 if (high == 0)
4716 {
4717 /* low0 < low1 but low1 has no predecessor. Punt. */
4718 return 0;
4719 }
4720 }
4721 else
4722 return 0;
4723 }
4724
4725 else if (! in0_p && in1_p)
4726 {
4727 /* If they don't overlap, the result is the second range. If the second
4728 is a subset of the first, the result is false. Otherwise,
4729 the range starts just after the first range and ends at the
4730 end of the second. */
4731 if (no_overlap)
4732 in_p = 1, low = low1, high = high1;
4733 else if (subset || highequal)
4734 in_p = 0, low = high = 0;
4735 else
4736 {
4737 low = range_successor (high0);
4738 high = high1;
4739 in_p = 1;
4740 if (low == 0)
4741 {
4742 /* high1 > high0 but high0 has no successor. Punt. */
4743 return 0;
4744 }
4745 }
4746 }
4747
4748 else
4749 {
4750 /* The case where we are excluding both ranges. Here the complex case
4751 is if they don't overlap. In that case, the only time we have a
4752 range is if they are adjacent. If the second is a subset of the
4753 first, the result is the first. Otherwise, the range to exclude
4754 starts at the beginning of the first range and ends at the end of the
4755 second. */
4756 if (no_overlap)
4757 {
4758 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4759 range_successor (high0),
4760 1, low1, 0)))
4761 in_p = 0, low = low0, high = high1;
4762 else
4763 {
4764 /* Canonicalize - [min, x] into - [-, x]. */
4765 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4766 switch (TREE_CODE (TREE_TYPE (low0)))
4767 {
4768 case ENUMERAL_TYPE:
4769 if (TYPE_PRECISION (TREE_TYPE (low0))
4770 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4771 break;
4772 /* FALLTHROUGH */
4773 case INTEGER_TYPE:
4774 if (tree_int_cst_equal (low0,
4775 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4776 low0 = 0;
4777 break;
4778 case POINTER_TYPE:
4779 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4780 && integer_zerop (low0))
4781 low0 = 0;
4782 break;
4783 default:
4784 break;
4785 }
4786
4787 /* Canonicalize - [x, max] into - [x, -]. */
4788 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4789 switch (TREE_CODE (TREE_TYPE (high1)))
4790 {
4791 case ENUMERAL_TYPE:
4792 if (TYPE_PRECISION (TREE_TYPE (high1))
4793 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4794 break;
4795 /* FALLTHROUGH */
4796 case INTEGER_TYPE:
4797 if (tree_int_cst_equal (high1,
4798 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4799 high1 = 0;
4800 break;
4801 case POINTER_TYPE:
4802 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4803 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4804 high1, 1,
4805 build_int_cst (TREE_TYPE (high1), 1),
4806 1)))
4807 high1 = 0;
4808 break;
4809 default:
4810 break;
4811 }
4812
4813 /* The ranges might be also adjacent between the maximum and
4814 minimum values of the given type. For
4815 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4816 return + [x + 1, y - 1]. */
4817 if (low0 == 0 && high1 == 0)
4818 {
4819 low = range_successor (high0);
4820 high = range_predecessor (low1);
4821 if (low == 0 || high == 0)
4822 return 0;
4823
4824 in_p = 1;
4825 }
4826 else
4827 return 0;
4828 }
4829 }
4830 else if (subset)
4831 in_p = 0, low = low0, high = high0;
4832 else
4833 in_p = 0, low = low0, high = high1;
4834 }
4835
4836 *pin_p = in_p, *plow = low, *phigh = high;
4837 return 1;
4838 }
4839 \f
4840
4841 /* Subroutine of fold, looking inside expressions of the form
4842 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4843 of the COND_EXPR. This function is being used also to optimize
4844 A op B ? C : A, by reversing the comparison first.
4845
4846 Return a folded expression whose code is not a COND_EXPR
4847 anymore, or NULL_TREE if no folding opportunity is found. */
4848
4849 static tree
4850 fold_cond_expr_with_comparison (location_t loc, tree type,
4851 tree arg0, tree arg1, tree arg2)
4852 {
4853 enum tree_code comp_code = TREE_CODE (arg0);
4854 tree arg00 = TREE_OPERAND (arg0, 0);
4855 tree arg01 = TREE_OPERAND (arg0, 1);
4856 tree arg1_type = TREE_TYPE (arg1);
4857 tree tem;
4858
4859 STRIP_NOPS (arg1);
4860 STRIP_NOPS (arg2);
4861
4862 /* If we have A op 0 ? A : -A, consider applying the following
4863 transformations:
4864
4865 A == 0? A : -A same as -A
4866 A != 0? A : -A same as A
4867 A >= 0? A : -A same as abs (A)
4868 A > 0? A : -A same as abs (A)
4869 A <= 0? A : -A same as -abs (A)
4870 A < 0? A : -A same as -abs (A)
4871
4872 None of these transformations work for modes with signed
4873 zeros. If A is +/-0, the first two transformations will
4874 change the sign of the result (from +0 to -0, or vice
4875 versa). The last four will fix the sign of the result,
4876 even though the original expressions could be positive or
4877 negative, depending on the sign of A.
4878
4879 Note that all these transformations are correct if A is
4880 NaN, since the two alternatives (A and -A) are also NaNs. */
4881 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4882 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4883 ? real_zerop (arg01)
4884 : integer_zerop (arg01))
4885 && ((TREE_CODE (arg2) == NEGATE_EXPR
4886 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4887 /* In the case that A is of the form X-Y, '-A' (arg2) may
4888 have already been folded to Y-X, check for that. */
4889 || (TREE_CODE (arg1) == MINUS_EXPR
4890 && TREE_CODE (arg2) == MINUS_EXPR
4891 && operand_equal_p (TREE_OPERAND (arg1, 0),
4892 TREE_OPERAND (arg2, 1), 0)
4893 && operand_equal_p (TREE_OPERAND (arg1, 1),
4894 TREE_OPERAND (arg2, 0), 0))))
4895 switch (comp_code)
4896 {
4897 case EQ_EXPR:
4898 case UNEQ_EXPR:
4899 tem = fold_convert_loc (loc, arg1_type, arg1);
4900 return pedantic_non_lvalue_loc (loc,
4901 fold_convert_loc (loc, type,
4902 negate_expr (tem)));
4903 case NE_EXPR:
4904 case LTGT_EXPR:
4905 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4906 case UNGE_EXPR:
4907 case UNGT_EXPR:
4908 if (flag_trapping_math)
4909 break;
4910 /* Fall through. */
4911 case GE_EXPR:
4912 case GT_EXPR:
4913 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4914 arg1 = fold_convert_loc (loc, signed_type_for
4915 (TREE_TYPE (arg1)), arg1);
4916 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4917 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4918 case UNLE_EXPR:
4919 case UNLT_EXPR:
4920 if (flag_trapping_math)
4921 break;
4922 case LE_EXPR:
4923 case LT_EXPR:
4924 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4925 arg1 = fold_convert_loc (loc, signed_type_for
4926 (TREE_TYPE (arg1)), arg1);
4927 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4928 return negate_expr (fold_convert_loc (loc, type, tem));
4929 default:
4930 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4931 break;
4932 }
4933
4934 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4935 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4936 both transformations are correct when A is NaN: A != 0
4937 is then true, and A == 0 is false. */
4938
4939 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4940 && integer_zerop (arg01) && integer_zerop (arg2))
4941 {
4942 if (comp_code == NE_EXPR)
4943 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4944 else if (comp_code == EQ_EXPR)
4945 return build_zero_cst (type);
4946 }
4947
4948 /* Try some transformations of A op B ? A : B.
4949
4950 A == B? A : B same as B
4951 A != B? A : B same as A
4952 A >= B? A : B same as max (A, B)
4953 A > B? A : B same as max (B, A)
4954 A <= B? A : B same as min (A, B)
4955 A < B? A : B same as min (B, A)
4956
4957 As above, these transformations don't work in the presence
4958 of signed zeros. For example, if A and B are zeros of
4959 opposite sign, the first two transformations will change
4960 the sign of the result. In the last four, the original
4961 expressions give different results for (A=+0, B=-0) and
4962 (A=-0, B=+0), but the transformed expressions do not.
4963
4964 The first two transformations are correct if either A or B
4965 is a NaN. In the first transformation, the condition will
4966 be false, and B will indeed be chosen. In the case of the
4967 second transformation, the condition A != B will be true,
4968 and A will be chosen.
4969
4970 The conversions to max() and min() are not correct if B is
4971 a number and A is not. The conditions in the original
4972 expressions will be false, so all four give B. The min()
4973 and max() versions would give a NaN instead. */
4974 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4975 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4976 /* Avoid these transformations if the COND_EXPR may be used
4977 as an lvalue in the C++ front-end. PR c++/19199. */
4978 && (in_gimple_form
4979 || VECTOR_TYPE_P (type)
4980 || (! lang_GNU_CXX ()
4981 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4982 || ! maybe_lvalue_p (arg1)
4983 || ! maybe_lvalue_p (arg2)))
4984 {
4985 tree comp_op0 = arg00;
4986 tree comp_op1 = arg01;
4987 tree comp_type = TREE_TYPE (comp_op0);
4988
4989 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4990 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4991 {
4992 comp_type = type;
4993 comp_op0 = arg1;
4994 comp_op1 = arg2;
4995 }
4996
4997 switch (comp_code)
4998 {
4999 case EQ_EXPR:
5000 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5001 case NE_EXPR:
5002 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5003 case LE_EXPR:
5004 case LT_EXPR:
5005 case UNLE_EXPR:
5006 case UNLT_EXPR:
5007 /* In C++ a ?: expression can be an lvalue, so put the
5008 operand which will be used if they are equal first
5009 so that we can convert this back to the
5010 corresponding COND_EXPR. */
5011 if (!HONOR_NANS (arg1))
5012 {
5013 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5014 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5015 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5016 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5017 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5018 comp_op1, comp_op0);
5019 return pedantic_non_lvalue_loc (loc,
5020 fold_convert_loc (loc, type, tem));
5021 }
5022 break;
5023 case GE_EXPR:
5024 case GT_EXPR:
5025 case UNGE_EXPR:
5026 case UNGT_EXPR:
5027 if (!HONOR_NANS (arg1))
5028 {
5029 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5030 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5031 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5032 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5033 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5034 comp_op1, comp_op0);
5035 return pedantic_non_lvalue_loc (loc,
5036 fold_convert_loc (loc, type, tem));
5037 }
5038 break;
5039 case UNEQ_EXPR:
5040 if (!HONOR_NANS (arg1))
5041 return pedantic_non_lvalue_loc (loc,
5042 fold_convert_loc (loc, type, arg2));
5043 break;
5044 case LTGT_EXPR:
5045 if (!HONOR_NANS (arg1))
5046 return pedantic_non_lvalue_loc (loc,
5047 fold_convert_loc (loc, type, arg1));
5048 break;
5049 default:
5050 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5051 break;
5052 }
5053 }
5054
5055 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5056 we might still be able to simplify this. For example,
5057 if C1 is one less or one more than C2, this might have started
5058 out as a MIN or MAX and been transformed by this function.
5059 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5060
5061 if (INTEGRAL_TYPE_P (type)
5062 && TREE_CODE (arg01) == INTEGER_CST
5063 && TREE_CODE (arg2) == INTEGER_CST)
5064 switch (comp_code)
5065 {
5066 case EQ_EXPR:
5067 if (TREE_CODE (arg1) == INTEGER_CST)
5068 break;
5069 /* We can replace A with C1 in this case. */
5070 arg1 = fold_convert_loc (loc, type, arg01);
5071 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5072
5073 case LT_EXPR:
5074 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5075 MIN_EXPR, to preserve the signedness of the comparison. */
5076 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5077 OEP_ONLY_CONST)
5078 && operand_equal_p (arg01,
5079 const_binop (PLUS_EXPR, arg2,
5080 build_int_cst (type, 1)),
5081 OEP_ONLY_CONST))
5082 {
5083 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5084 fold_convert_loc (loc, TREE_TYPE (arg00),
5085 arg2));
5086 return pedantic_non_lvalue_loc (loc,
5087 fold_convert_loc (loc, type, tem));
5088 }
5089 break;
5090
5091 case LE_EXPR:
5092 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5093 as above. */
5094 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5095 OEP_ONLY_CONST)
5096 && operand_equal_p (arg01,
5097 const_binop (MINUS_EXPR, arg2,
5098 build_int_cst (type, 1)),
5099 OEP_ONLY_CONST))
5100 {
5101 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5102 fold_convert_loc (loc, TREE_TYPE (arg00),
5103 arg2));
5104 return pedantic_non_lvalue_loc (loc,
5105 fold_convert_loc (loc, type, tem));
5106 }
5107 break;
5108
5109 case GT_EXPR:
5110 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5111 MAX_EXPR, to preserve the signedness of the comparison. */
5112 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5113 OEP_ONLY_CONST)
5114 && operand_equal_p (arg01,
5115 const_binop (MINUS_EXPR, arg2,
5116 build_int_cst (type, 1)),
5117 OEP_ONLY_CONST))
5118 {
5119 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5120 fold_convert_loc (loc, TREE_TYPE (arg00),
5121 arg2));
5122 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5123 }
5124 break;
5125
5126 case GE_EXPR:
5127 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5128 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5129 OEP_ONLY_CONST)
5130 && operand_equal_p (arg01,
5131 const_binop (PLUS_EXPR, arg2,
5132 build_int_cst (type, 1)),
5133 OEP_ONLY_CONST))
5134 {
5135 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5136 fold_convert_loc (loc, TREE_TYPE (arg00),
5137 arg2));
5138 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5139 }
5140 break;
5141 case NE_EXPR:
5142 break;
5143 default:
5144 gcc_unreachable ();
5145 }
5146
5147 return NULL_TREE;
5148 }
5149
5150
5151 \f
5152 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5153 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5154 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5155 false) >= 2)
5156 #endif
5157
5158 /* EXP is some logical combination of boolean tests. See if we can
5159 merge it into some range test. Return the new tree if so. */
5160
5161 static tree
5162 fold_range_test (location_t loc, enum tree_code code, tree type,
5163 tree op0, tree op1)
5164 {
5165 int or_op = (code == TRUTH_ORIF_EXPR
5166 || code == TRUTH_OR_EXPR);
5167 int in0_p, in1_p, in_p;
5168 tree low0, low1, low, high0, high1, high;
5169 bool strict_overflow_p = false;
5170 tree tem, lhs, rhs;
5171 const char * const warnmsg = G_("assuming signed overflow does not occur "
5172 "when simplifying range test");
5173
5174 if (!INTEGRAL_TYPE_P (type))
5175 return 0;
5176
5177 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5178 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5179
5180 /* If this is an OR operation, invert both sides; we will invert
5181 again at the end. */
5182 if (or_op)
5183 in0_p = ! in0_p, in1_p = ! in1_p;
5184
5185 /* If both expressions are the same, if we can merge the ranges, and we
5186 can build the range test, return it or it inverted. If one of the
5187 ranges is always true or always false, consider it to be the same
5188 expression as the other. */
5189 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5190 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5191 in1_p, low1, high1)
5192 && 0 != (tem = (build_range_check (loc, type,
5193 lhs != 0 ? lhs
5194 : rhs != 0 ? rhs : integer_zero_node,
5195 in_p, low, high))))
5196 {
5197 if (strict_overflow_p)
5198 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5199 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5200 }
5201
5202 /* On machines where the branch cost is expensive, if this is a
5203 short-circuited branch and the underlying object on both sides
5204 is the same, make a non-short-circuit operation. */
5205 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5206 && lhs != 0 && rhs != 0
5207 && (code == TRUTH_ANDIF_EXPR
5208 || code == TRUTH_ORIF_EXPR)
5209 && operand_equal_p (lhs, rhs, 0))
5210 {
5211 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5212 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5213 which cases we can't do this. */
5214 if (simple_operand_p (lhs))
5215 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5216 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5217 type, op0, op1);
5218
5219 else if (!lang_hooks.decls.global_bindings_p ()
5220 && !CONTAINS_PLACEHOLDER_P (lhs))
5221 {
5222 tree common = save_expr (lhs);
5223
5224 if (0 != (lhs = build_range_check (loc, type, common,
5225 or_op ? ! in0_p : in0_p,
5226 low0, high0))
5227 && (0 != (rhs = build_range_check (loc, type, common,
5228 or_op ? ! in1_p : in1_p,
5229 low1, high1))))
5230 {
5231 if (strict_overflow_p)
5232 fold_overflow_warning (warnmsg,
5233 WARN_STRICT_OVERFLOW_COMPARISON);
5234 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5235 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5236 type, lhs, rhs);
5237 }
5238 }
5239 }
5240
5241 return 0;
5242 }
5243 \f
5244 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5245 bit value. Arrange things so the extra bits will be set to zero if and
5246 only if C is signed-extended to its full width. If MASK is nonzero,
5247 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5248
5249 static tree
5250 unextend (tree c, int p, int unsignedp, tree mask)
5251 {
5252 tree type = TREE_TYPE (c);
5253 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5254 tree temp;
5255
5256 if (p == modesize || unsignedp)
5257 return c;
5258
5259 /* We work by getting just the sign bit into the low-order bit, then
5260 into the high-order bit, then sign-extend. We then XOR that value
5261 with C. */
5262 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5263
5264 /* We must use a signed type in order to get an arithmetic right shift.
5265 However, we must also avoid introducing accidental overflows, so that
5266 a subsequent call to integer_zerop will work. Hence we must
5267 do the type conversion here. At this point, the constant is either
5268 zero or one, and the conversion to a signed type can never overflow.
5269 We could get an overflow if this conversion is done anywhere else. */
5270 if (TYPE_UNSIGNED (type))
5271 temp = fold_convert (signed_type_for (type), temp);
5272
5273 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5274 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5275 if (mask != 0)
5276 temp = const_binop (BIT_AND_EXPR, temp,
5277 fold_convert (TREE_TYPE (c), mask));
5278 /* If necessary, convert the type back to match the type of C. */
5279 if (TYPE_UNSIGNED (type))
5280 temp = fold_convert (type, temp);
5281
5282 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5283 }
5284 \f
5285 /* For an expression that has the form
5286 (A && B) || ~B
5287 or
5288 (A || B) && ~B,
5289 we can drop one of the inner expressions and simplify to
5290 A || ~B
5291 or
5292 A && ~B
5293 LOC is the location of the resulting expression. OP is the inner
5294 logical operation; the left-hand side in the examples above, while CMPOP
5295 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5296 removing a condition that guards another, as in
5297 (A != NULL && A->...) || A == NULL
5298 which we must not transform. If RHS_ONLY is true, only eliminate the
5299 right-most operand of the inner logical operation. */
5300
5301 static tree
5302 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5303 bool rhs_only)
5304 {
5305 tree type = TREE_TYPE (cmpop);
5306 enum tree_code code = TREE_CODE (cmpop);
5307 enum tree_code truthop_code = TREE_CODE (op);
5308 tree lhs = TREE_OPERAND (op, 0);
5309 tree rhs = TREE_OPERAND (op, 1);
5310 tree orig_lhs = lhs, orig_rhs = rhs;
5311 enum tree_code rhs_code = TREE_CODE (rhs);
5312 enum tree_code lhs_code = TREE_CODE (lhs);
5313 enum tree_code inv_code;
5314
5315 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5316 return NULL_TREE;
5317
5318 if (TREE_CODE_CLASS (code) != tcc_comparison)
5319 return NULL_TREE;
5320
5321 if (rhs_code == truthop_code)
5322 {
5323 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5324 if (newrhs != NULL_TREE)
5325 {
5326 rhs = newrhs;
5327 rhs_code = TREE_CODE (rhs);
5328 }
5329 }
5330 if (lhs_code == truthop_code && !rhs_only)
5331 {
5332 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5333 if (newlhs != NULL_TREE)
5334 {
5335 lhs = newlhs;
5336 lhs_code = TREE_CODE (lhs);
5337 }
5338 }
5339
5340 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5341 if (inv_code == rhs_code
5342 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5343 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5344 return lhs;
5345 if (!rhs_only && inv_code == lhs_code
5346 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5347 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5348 return rhs;
5349 if (rhs != orig_rhs || lhs != orig_lhs)
5350 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5351 lhs, rhs);
5352 return NULL_TREE;
5353 }
5354
5355 /* Find ways of folding logical expressions of LHS and RHS:
5356 Try to merge two comparisons to the same innermost item.
5357 Look for range tests like "ch >= '0' && ch <= '9'".
5358 Look for combinations of simple terms on machines with expensive branches
5359 and evaluate the RHS unconditionally.
5360
5361 For example, if we have p->a == 2 && p->b == 4 and we can make an
5362 object large enough to span both A and B, we can do this with a comparison
5363 against the object ANDed with the a mask.
5364
5365 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5366 operations to do this with one comparison.
5367
5368 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5369 function and the one above.
5370
5371 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5372 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5373
5374 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5375 two operands.
5376
5377 We return the simplified tree or 0 if no optimization is possible. */
5378
5379 static tree
5380 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5381 tree lhs, tree rhs)
5382 {
5383 /* If this is the "or" of two comparisons, we can do something if
5384 the comparisons are NE_EXPR. If this is the "and", we can do something
5385 if the comparisons are EQ_EXPR. I.e.,
5386 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5387
5388 WANTED_CODE is this operation code. For single bit fields, we can
5389 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5390 comparison for one-bit fields. */
5391
5392 enum tree_code wanted_code;
5393 enum tree_code lcode, rcode;
5394 tree ll_arg, lr_arg, rl_arg, rr_arg;
5395 tree ll_inner, lr_inner, rl_inner, rr_inner;
5396 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5397 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5398 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5399 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5400 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5401 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5402 machine_mode lnmode, rnmode;
5403 tree ll_mask, lr_mask, rl_mask, rr_mask;
5404 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5405 tree l_const, r_const;
5406 tree lntype, rntype, result;
5407 HOST_WIDE_INT first_bit, end_bit;
5408 int volatilep;
5409
5410 /* Start by getting the comparison codes. Fail if anything is volatile.
5411 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5412 it were surrounded with a NE_EXPR. */
5413
5414 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5415 return 0;
5416
5417 lcode = TREE_CODE (lhs);
5418 rcode = TREE_CODE (rhs);
5419
5420 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5421 {
5422 lhs = build2 (NE_EXPR, truth_type, lhs,
5423 build_int_cst (TREE_TYPE (lhs), 0));
5424 lcode = NE_EXPR;
5425 }
5426
5427 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5428 {
5429 rhs = build2 (NE_EXPR, truth_type, rhs,
5430 build_int_cst (TREE_TYPE (rhs), 0));
5431 rcode = NE_EXPR;
5432 }
5433
5434 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5435 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5436 return 0;
5437
5438 ll_arg = TREE_OPERAND (lhs, 0);
5439 lr_arg = TREE_OPERAND (lhs, 1);
5440 rl_arg = TREE_OPERAND (rhs, 0);
5441 rr_arg = TREE_OPERAND (rhs, 1);
5442
5443 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5444 if (simple_operand_p (ll_arg)
5445 && simple_operand_p (lr_arg))
5446 {
5447 if (operand_equal_p (ll_arg, rl_arg, 0)
5448 && operand_equal_p (lr_arg, rr_arg, 0))
5449 {
5450 result = combine_comparisons (loc, code, lcode, rcode,
5451 truth_type, ll_arg, lr_arg);
5452 if (result)
5453 return result;
5454 }
5455 else if (operand_equal_p (ll_arg, rr_arg, 0)
5456 && operand_equal_p (lr_arg, rl_arg, 0))
5457 {
5458 result = combine_comparisons (loc, code, lcode,
5459 swap_tree_comparison (rcode),
5460 truth_type, ll_arg, lr_arg);
5461 if (result)
5462 return result;
5463 }
5464 }
5465
5466 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5467 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5468
5469 /* If the RHS can be evaluated unconditionally and its operands are
5470 simple, it wins to evaluate the RHS unconditionally on machines
5471 with expensive branches. In this case, this isn't a comparison
5472 that can be merged. */
5473
5474 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5475 false) >= 2
5476 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5477 && simple_operand_p (rl_arg)
5478 && simple_operand_p (rr_arg))
5479 {
5480 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5481 if (code == TRUTH_OR_EXPR
5482 && lcode == NE_EXPR && integer_zerop (lr_arg)
5483 && rcode == NE_EXPR && integer_zerop (rr_arg)
5484 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5485 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5486 return build2_loc (loc, NE_EXPR, truth_type,
5487 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5488 ll_arg, rl_arg),
5489 build_int_cst (TREE_TYPE (ll_arg), 0));
5490
5491 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5492 if (code == TRUTH_AND_EXPR
5493 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5494 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5495 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5496 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5497 return build2_loc (loc, EQ_EXPR, truth_type,
5498 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5499 ll_arg, rl_arg),
5500 build_int_cst (TREE_TYPE (ll_arg), 0));
5501 }
5502
5503 /* See if the comparisons can be merged. Then get all the parameters for
5504 each side. */
5505
5506 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5507 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5508 return 0;
5509
5510 volatilep = 0;
5511 ll_inner = decode_field_reference (loc, ll_arg,
5512 &ll_bitsize, &ll_bitpos, &ll_mode,
5513 &ll_unsignedp, &volatilep, &ll_mask,
5514 &ll_and_mask);
5515 lr_inner = decode_field_reference (loc, lr_arg,
5516 &lr_bitsize, &lr_bitpos, &lr_mode,
5517 &lr_unsignedp, &volatilep, &lr_mask,
5518 &lr_and_mask);
5519 rl_inner = decode_field_reference (loc, rl_arg,
5520 &rl_bitsize, &rl_bitpos, &rl_mode,
5521 &rl_unsignedp, &volatilep, &rl_mask,
5522 &rl_and_mask);
5523 rr_inner = decode_field_reference (loc, rr_arg,
5524 &rr_bitsize, &rr_bitpos, &rr_mode,
5525 &rr_unsignedp, &volatilep, &rr_mask,
5526 &rr_and_mask);
5527
5528 /* It must be true that the inner operation on the lhs of each
5529 comparison must be the same if we are to be able to do anything.
5530 Then see if we have constants. If not, the same must be true for
5531 the rhs's. */
5532 if (volatilep || ll_inner == 0 || rl_inner == 0
5533 || ! operand_equal_p (ll_inner, rl_inner, 0))
5534 return 0;
5535
5536 if (TREE_CODE (lr_arg) == INTEGER_CST
5537 && TREE_CODE (rr_arg) == INTEGER_CST)
5538 l_const = lr_arg, r_const = rr_arg;
5539 else if (lr_inner == 0 || rr_inner == 0
5540 || ! operand_equal_p (lr_inner, rr_inner, 0))
5541 return 0;
5542 else
5543 l_const = r_const = 0;
5544
5545 /* If either comparison code is not correct for our logical operation,
5546 fail. However, we can convert a one-bit comparison against zero into
5547 the opposite comparison against that bit being set in the field. */
5548
5549 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5550 if (lcode != wanted_code)
5551 {
5552 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5553 {
5554 /* Make the left operand unsigned, since we are only interested
5555 in the value of one bit. Otherwise we are doing the wrong
5556 thing below. */
5557 ll_unsignedp = 1;
5558 l_const = ll_mask;
5559 }
5560 else
5561 return 0;
5562 }
5563
5564 /* This is analogous to the code for l_const above. */
5565 if (rcode != wanted_code)
5566 {
5567 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5568 {
5569 rl_unsignedp = 1;
5570 r_const = rl_mask;
5571 }
5572 else
5573 return 0;
5574 }
5575
5576 /* See if we can find a mode that contains both fields being compared on
5577 the left. If we can't, fail. Otherwise, update all constants and masks
5578 to be relative to a field of that size. */
5579 first_bit = MIN (ll_bitpos, rl_bitpos);
5580 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5581 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5582 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5583 volatilep);
5584 if (lnmode == VOIDmode)
5585 return 0;
5586
5587 lnbitsize = GET_MODE_BITSIZE (lnmode);
5588 lnbitpos = first_bit & ~ (lnbitsize - 1);
5589 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5590 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5591
5592 if (BYTES_BIG_ENDIAN)
5593 {
5594 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5595 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5596 }
5597
5598 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5599 size_int (xll_bitpos));
5600 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5601 size_int (xrl_bitpos));
5602
5603 if (l_const)
5604 {
5605 l_const = fold_convert_loc (loc, lntype, l_const);
5606 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5607 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5608 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5609 fold_build1_loc (loc, BIT_NOT_EXPR,
5610 lntype, ll_mask))))
5611 {
5612 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5613
5614 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5615 }
5616 }
5617 if (r_const)
5618 {
5619 r_const = fold_convert_loc (loc, lntype, r_const);
5620 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5621 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5622 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5623 fold_build1_loc (loc, BIT_NOT_EXPR,
5624 lntype, rl_mask))))
5625 {
5626 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5627
5628 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5629 }
5630 }
5631
5632 /* If the right sides are not constant, do the same for it. Also,
5633 disallow this optimization if a size or signedness mismatch occurs
5634 between the left and right sides. */
5635 if (l_const == 0)
5636 {
5637 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5638 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5639 /* Make sure the two fields on the right
5640 correspond to the left without being swapped. */
5641 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5642 return 0;
5643
5644 first_bit = MIN (lr_bitpos, rr_bitpos);
5645 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5646 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5647 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5648 volatilep);
5649 if (rnmode == VOIDmode)
5650 return 0;
5651
5652 rnbitsize = GET_MODE_BITSIZE (rnmode);
5653 rnbitpos = first_bit & ~ (rnbitsize - 1);
5654 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5655 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5656
5657 if (BYTES_BIG_ENDIAN)
5658 {
5659 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5660 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5661 }
5662
5663 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5664 rntype, lr_mask),
5665 size_int (xlr_bitpos));
5666 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5667 rntype, rr_mask),
5668 size_int (xrr_bitpos));
5669
5670 /* Make a mask that corresponds to both fields being compared.
5671 Do this for both items being compared. If the operands are the
5672 same size and the bits being compared are in the same position
5673 then we can do this by masking both and comparing the masked
5674 results. */
5675 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5676 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5677 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5678 {
5679 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5680 ll_unsignedp || rl_unsignedp);
5681 if (! all_ones_mask_p (ll_mask, lnbitsize))
5682 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5683
5684 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5685 lr_unsignedp || rr_unsignedp);
5686 if (! all_ones_mask_p (lr_mask, rnbitsize))
5687 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5688
5689 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5690 }
5691
5692 /* There is still another way we can do something: If both pairs of
5693 fields being compared are adjacent, we may be able to make a wider
5694 field containing them both.
5695
5696 Note that we still must mask the lhs/rhs expressions. Furthermore,
5697 the mask must be shifted to account for the shift done by
5698 make_bit_field_ref. */
5699 if ((ll_bitsize + ll_bitpos == rl_bitpos
5700 && lr_bitsize + lr_bitpos == rr_bitpos)
5701 || (ll_bitpos == rl_bitpos + rl_bitsize
5702 && lr_bitpos == rr_bitpos + rr_bitsize))
5703 {
5704 tree type;
5705
5706 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5707 ll_bitsize + rl_bitsize,
5708 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5709 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5710 lr_bitsize + rr_bitsize,
5711 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5712
5713 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5714 size_int (MIN (xll_bitpos, xrl_bitpos)));
5715 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5716 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5717
5718 /* Convert to the smaller type before masking out unwanted bits. */
5719 type = lntype;
5720 if (lntype != rntype)
5721 {
5722 if (lnbitsize > rnbitsize)
5723 {
5724 lhs = fold_convert_loc (loc, rntype, lhs);
5725 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5726 type = rntype;
5727 }
5728 else if (lnbitsize < rnbitsize)
5729 {
5730 rhs = fold_convert_loc (loc, lntype, rhs);
5731 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5732 type = lntype;
5733 }
5734 }
5735
5736 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5737 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5738
5739 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5740 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5741
5742 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5743 }
5744
5745 return 0;
5746 }
5747
5748 /* Handle the case of comparisons with constants. If there is something in
5749 common between the masks, those bits of the constants must be the same.
5750 If not, the condition is always false. Test for this to avoid generating
5751 incorrect code below. */
5752 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5753 if (! integer_zerop (result)
5754 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5755 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5756 {
5757 if (wanted_code == NE_EXPR)
5758 {
5759 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5760 return constant_boolean_node (true, truth_type);
5761 }
5762 else
5763 {
5764 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5765 return constant_boolean_node (false, truth_type);
5766 }
5767 }
5768
5769 /* Construct the expression we will return. First get the component
5770 reference we will make. Unless the mask is all ones the width of
5771 that field, perform the mask operation. Then compare with the
5772 merged constant. */
5773 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5774 ll_unsignedp || rl_unsignedp);
5775
5776 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5777 if (! all_ones_mask_p (ll_mask, lnbitsize))
5778 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5779
5780 return build2_loc (loc, wanted_code, truth_type, result,
5781 const_binop (BIT_IOR_EXPR, l_const, r_const));
5782 }
5783 \f
5784 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5785 constant. */
5786
5787 static tree
5788 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5789 tree op0, tree op1)
5790 {
5791 tree arg0 = op0;
5792 enum tree_code op_code;
5793 tree comp_const;
5794 tree minmax_const;
5795 int consts_equal, consts_lt;
5796 tree inner;
5797
5798 STRIP_SIGN_NOPS (arg0);
5799
5800 op_code = TREE_CODE (arg0);
5801 minmax_const = TREE_OPERAND (arg0, 1);
5802 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5803 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5804 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5805 inner = TREE_OPERAND (arg0, 0);
5806
5807 /* If something does not permit us to optimize, return the original tree. */
5808 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5809 || TREE_CODE (comp_const) != INTEGER_CST
5810 || TREE_OVERFLOW (comp_const)
5811 || TREE_CODE (minmax_const) != INTEGER_CST
5812 || TREE_OVERFLOW (minmax_const))
5813 return NULL_TREE;
5814
5815 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5816 and GT_EXPR, doing the rest with recursive calls using logical
5817 simplifications. */
5818 switch (code)
5819 {
5820 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5821 {
5822 tree tem
5823 = optimize_minmax_comparison (loc,
5824 invert_tree_comparison (code, false),
5825 type, op0, op1);
5826 if (tem)
5827 return invert_truthvalue_loc (loc, tem);
5828 return NULL_TREE;
5829 }
5830
5831 case GE_EXPR:
5832 return
5833 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5834 optimize_minmax_comparison
5835 (loc, EQ_EXPR, type, arg0, comp_const),
5836 optimize_minmax_comparison
5837 (loc, GT_EXPR, type, arg0, comp_const));
5838
5839 case EQ_EXPR:
5840 if (op_code == MAX_EXPR && consts_equal)
5841 /* MAX (X, 0) == 0 -> X <= 0 */
5842 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5843
5844 else if (op_code == MAX_EXPR && consts_lt)
5845 /* MAX (X, 0) == 5 -> X == 5 */
5846 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5847
5848 else if (op_code == MAX_EXPR)
5849 /* MAX (X, 0) == -1 -> false */
5850 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5851
5852 else if (consts_equal)
5853 /* MIN (X, 0) == 0 -> X >= 0 */
5854 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5855
5856 else if (consts_lt)
5857 /* MIN (X, 0) == 5 -> false */
5858 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5859
5860 else
5861 /* MIN (X, 0) == -1 -> X == -1 */
5862 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5863
5864 case GT_EXPR:
5865 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5866 /* MAX (X, 0) > 0 -> X > 0
5867 MAX (X, 0) > 5 -> X > 5 */
5868 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5869
5870 else if (op_code == MAX_EXPR)
5871 /* MAX (X, 0) > -1 -> true */
5872 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5873
5874 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5875 /* MIN (X, 0) > 0 -> false
5876 MIN (X, 0) > 5 -> false */
5877 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5878
5879 else
5880 /* MIN (X, 0) > -1 -> X > -1 */
5881 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5882
5883 default:
5884 return NULL_TREE;
5885 }
5886 }
5887 \f
5888 /* T is an integer expression that is being multiplied, divided, or taken a
5889 modulus (CODE says which and what kind of divide or modulus) by a
5890 constant C. See if we can eliminate that operation by folding it with
5891 other operations already in T. WIDE_TYPE, if non-null, is a type that
5892 should be used for the computation if wider than our type.
5893
5894 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5895 (X * 2) + (Y * 4). We must, however, be assured that either the original
5896 expression would not overflow or that overflow is undefined for the type
5897 in the language in question.
5898
5899 If we return a non-null expression, it is an equivalent form of the
5900 original computation, but need not be in the original type.
5901
5902 We set *STRICT_OVERFLOW_P to true if the return values depends on
5903 signed overflow being undefined. Otherwise we do not change
5904 *STRICT_OVERFLOW_P. */
5905
5906 static tree
5907 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5908 bool *strict_overflow_p)
5909 {
5910 /* To avoid exponential search depth, refuse to allow recursion past
5911 three levels. Beyond that (1) it's highly unlikely that we'll find
5912 something interesting and (2) we've probably processed it before
5913 when we built the inner expression. */
5914
5915 static int depth;
5916 tree ret;
5917
5918 if (depth > 3)
5919 return NULL;
5920
5921 depth++;
5922 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5923 depth--;
5924
5925 return ret;
5926 }
5927
5928 static tree
5929 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5930 bool *strict_overflow_p)
5931 {
5932 tree type = TREE_TYPE (t);
5933 enum tree_code tcode = TREE_CODE (t);
5934 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5935 > GET_MODE_SIZE (TYPE_MODE (type)))
5936 ? wide_type : type);
5937 tree t1, t2;
5938 int same_p = tcode == code;
5939 tree op0 = NULL_TREE, op1 = NULL_TREE;
5940 bool sub_strict_overflow_p;
5941
5942 /* Don't deal with constants of zero here; they confuse the code below. */
5943 if (integer_zerop (c))
5944 return NULL_TREE;
5945
5946 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5947 op0 = TREE_OPERAND (t, 0);
5948
5949 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5950 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5951
5952 /* Note that we need not handle conditional operations here since fold
5953 already handles those cases. So just do arithmetic here. */
5954 switch (tcode)
5955 {
5956 case INTEGER_CST:
5957 /* For a constant, we can always simplify if we are a multiply
5958 or (for divide and modulus) if it is a multiple of our constant. */
5959 if (code == MULT_EXPR
5960 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5961 return const_binop (code, fold_convert (ctype, t),
5962 fold_convert (ctype, c));
5963 break;
5964
5965 CASE_CONVERT: case NON_LVALUE_EXPR:
5966 /* If op0 is an expression ... */
5967 if ((COMPARISON_CLASS_P (op0)
5968 || UNARY_CLASS_P (op0)
5969 || BINARY_CLASS_P (op0)
5970 || VL_EXP_CLASS_P (op0)
5971 || EXPRESSION_CLASS_P (op0))
5972 /* ... and has wrapping overflow, and its type is smaller
5973 than ctype, then we cannot pass through as widening. */
5974 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5975 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5976 && (TYPE_PRECISION (ctype)
5977 > TYPE_PRECISION (TREE_TYPE (op0))))
5978 /* ... or this is a truncation (t is narrower than op0),
5979 then we cannot pass through this narrowing. */
5980 || (TYPE_PRECISION (type)
5981 < TYPE_PRECISION (TREE_TYPE (op0)))
5982 /* ... or signedness changes for division or modulus,
5983 then we cannot pass through this conversion. */
5984 || (code != MULT_EXPR
5985 && (TYPE_UNSIGNED (ctype)
5986 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5987 /* ... or has undefined overflow while the converted to
5988 type has not, we cannot do the operation in the inner type
5989 as that would introduce undefined overflow. */
5990 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5991 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
5992 && !TYPE_OVERFLOW_UNDEFINED (type))))
5993 break;
5994
5995 /* Pass the constant down and see if we can make a simplification. If
5996 we can, replace this expression with the inner simplification for
5997 possible later conversion to our or some other type. */
5998 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5999 && TREE_CODE (t2) == INTEGER_CST
6000 && !TREE_OVERFLOW (t2)
6001 && (0 != (t1 = extract_muldiv (op0, t2, code,
6002 code == MULT_EXPR
6003 ? ctype : NULL_TREE,
6004 strict_overflow_p))))
6005 return t1;
6006 break;
6007
6008 case ABS_EXPR:
6009 /* If widening the type changes it from signed to unsigned, then we
6010 must avoid building ABS_EXPR itself as unsigned. */
6011 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6012 {
6013 tree cstype = (*signed_type_for) (ctype);
6014 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6015 != 0)
6016 {
6017 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6018 return fold_convert (ctype, t1);
6019 }
6020 break;
6021 }
6022 /* If the constant is negative, we cannot simplify this. */
6023 if (tree_int_cst_sgn (c) == -1)
6024 break;
6025 /* FALLTHROUGH */
6026 case NEGATE_EXPR:
6027 /* For division and modulus, type can't be unsigned, as e.g.
6028 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6029 For signed types, even with wrapping overflow, this is fine. */
6030 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6031 break;
6032 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6033 != 0)
6034 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6035 break;
6036
6037 case MIN_EXPR: case MAX_EXPR:
6038 /* If widening the type changes the signedness, then we can't perform
6039 this optimization as that changes the result. */
6040 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6041 break;
6042
6043 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6044 sub_strict_overflow_p = false;
6045 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6046 &sub_strict_overflow_p)) != 0
6047 && (t2 = extract_muldiv (op1, c, code, wide_type,
6048 &sub_strict_overflow_p)) != 0)
6049 {
6050 if (tree_int_cst_sgn (c) < 0)
6051 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6052 if (sub_strict_overflow_p)
6053 *strict_overflow_p = true;
6054 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6055 fold_convert (ctype, t2));
6056 }
6057 break;
6058
6059 case LSHIFT_EXPR: case RSHIFT_EXPR:
6060 /* If the second operand is constant, this is a multiplication
6061 or floor division, by a power of two, so we can treat it that
6062 way unless the multiplier or divisor overflows. Signed
6063 left-shift overflow is implementation-defined rather than
6064 undefined in C90, so do not convert signed left shift into
6065 multiplication. */
6066 if (TREE_CODE (op1) == INTEGER_CST
6067 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6068 /* const_binop may not detect overflow correctly,
6069 so check for it explicitly here. */
6070 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6071 && 0 != (t1 = fold_convert (ctype,
6072 const_binop (LSHIFT_EXPR,
6073 size_one_node,
6074 op1)))
6075 && !TREE_OVERFLOW (t1))
6076 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6077 ? MULT_EXPR : FLOOR_DIV_EXPR,
6078 ctype,
6079 fold_convert (ctype, op0),
6080 t1),
6081 c, code, wide_type, strict_overflow_p);
6082 break;
6083
6084 case PLUS_EXPR: case MINUS_EXPR:
6085 /* See if we can eliminate the operation on both sides. If we can, we
6086 can return a new PLUS or MINUS. If we can't, the only remaining
6087 cases where we can do anything are if the second operand is a
6088 constant. */
6089 sub_strict_overflow_p = false;
6090 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6091 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6092 if (t1 != 0 && t2 != 0
6093 && (code == MULT_EXPR
6094 /* If not multiplication, we can only do this if both operands
6095 are divisible by c. */
6096 || (multiple_of_p (ctype, op0, c)
6097 && multiple_of_p (ctype, op1, c))))
6098 {
6099 if (sub_strict_overflow_p)
6100 *strict_overflow_p = true;
6101 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6102 fold_convert (ctype, t2));
6103 }
6104
6105 /* If this was a subtraction, negate OP1 and set it to be an addition.
6106 This simplifies the logic below. */
6107 if (tcode == MINUS_EXPR)
6108 {
6109 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6110 /* If OP1 was not easily negatable, the constant may be OP0. */
6111 if (TREE_CODE (op0) == INTEGER_CST)
6112 {
6113 std::swap (op0, op1);
6114 std::swap (t1, t2);
6115 }
6116 }
6117
6118 if (TREE_CODE (op1) != INTEGER_CST)
6119 break;
6120
6121 /* If either OP1 or C are negative, this optimization is not safe for
6122 some of the division and remainder types while for others we need
6123 to change the code. */
6124 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6125 {
6126 if (code == CEIL_DIV_EXPR)
6127 code = FLOOR_DIV_EXPR;
6128 else if (code == FLOOR_DIV_EXPR)
6129 code = CEIL_DIV_EXPR;
6130 else if (code != MULT_EXPR
6131 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6132 break;
6133 }
6134
6135 /* If it's a multiply or a division/modulus operation of a multiple
6136 of our constant, do the operation and verify it doesn't overflow. */
6137 if (code == MULT_EXPR
6138 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6139 {
6140 op1 = const_binop (code, fold_convert (ctype, op1),
6141 fold_convert (ctype, c));
6142 /* We allow the constant to overflow with wrapping semantics. */
6143 if (op1 == 0
6144 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6145 break;
6146 }
6147 else
6148 break;
6149
6150 /* If we have an unsigned type, we cannot widen the operation since it
6151 will change the result if the original computation overflowed. */
6152 if (TYPE_UNSIGNED (ctype) && ctype != type)
6153 break;
6154
6155 /* If we were able to eliminate our operation from the first side,
6156 apply our operation to the second side and reform the PLUS. */
6157 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6158 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6159
6160 /* The last case is if we are a multiply. In that case, we can
6161 apply the distributive law to commute the multiply and addition
6162 if the multiplication of the constants doesn't overflow
6163 and overflow is defined. With undefined overflow
6164 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6165 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6166 return fold_build2 (tcode, ctype,
6167 fold_build2 (code, ctype,
6168 fold_convert (ctype, op0),
6169 fold_convert (ctype, c)),
6170 op1);
6171
6172 break;
6173
6174 case MULT_EXPR:
6175 /* We have a special case here if we are doing something like
6176 (C * 8) % 4 since we know that's zero. */
6177 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6178 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6179 /* If the multiplication can overflow we cannot optimize this. */
6180 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6181 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6182 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6183 {
6184 *strict_overflow_p = true;
6185 return omit_one_operand (type, integer_zero_node, op0);
6186 }
6187
6188 /* ... fall through ... */
6189
6190 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6191 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6192 /* If we can extract our operation from the LHS, do so and return a
6193 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6194 do something only if the second operand is a constant. */
6195 if (same_p
6196 && (t1 = extract_muldiv (op0, c, code, wide_type,
6197 strict_overflow_p)) != 0)
6198 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6199 fold_convert (ctype, op1));
6200 else if (tcode == MULT_EXPR && code == MULT_EXPR
6201 && (t1 = extract_muldiv (op1, c, code, wide_type,
6202 strict_overflow_p)) != 0)
6203 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6204 fold_convert (ctype, t1));
6205 else if (TREE_CODE (op1) != INTEGER_CST)
6206 return 0;
6207
6208 /* If these are the same operation types, we can associate them
6209 assuming no overflow. */
6210 if (tcode == code)
6211 {
6212 bool overflow_p = false;
6213 bool overflow_mul_p;
6214 signop sign = TYPE_SIGN (ctype);
6215 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6216 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6217 if (overflow_mul_p
6218 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6219 overflow_p = true;
6220 if (!overflow_p)
6221 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6222 wide_int_to_tree (ctype, mul));
6223 }
6224
6225 /* If these operations "cancel" each other, we have the main
6226 optimizations of this pass, which occur when either constant is a
6227 multiple of the other, in which case we replace this with either an
6228 operation or CODE or TCODE.
6229
6230 If we have an unsigned type, we cannot do this since it will change
6231 the result if the original computation overflowed. */
6232 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6233 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6234 || (tcode == MULT_EXPR
6235 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6236 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6237 && code != MULT_EXPR)))
6238 {
6239 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6240 {
6241 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6242 *strict_overflow_p = true;
6243 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6244 fold_convert (ctype,
6245 const_binop (TRUNC_DIV_EXPR,
6246 op1, c)));
6247 }
6248 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6249 {
6250 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6251 *strict_overflow_p = true;
6252 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6253 fold_convert (ctype,
6254 const_binop (TRUNC_DIV_EXPR,
6255 c, op1)));
6256 }
6257 }
6258 break;
6259
6260 default:
6261 break;
6262 }
6263
6264 return 0;
6265 }
6266 \f
6267 /* Return a node which has the indicated constant VALUE (either 0 or
6268 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6269 and is of the indicated TYPE. */
6270
6271 tree
6272 constant_boolean_node (bool value, tree type)
6273 {
6274 if (type == integer_type_node)
6275 return value ? integer_one_node : integer_zero_node;
6276 else if (type == boolean_type_node)
6277 return value ? boolean_true_node : boolean_false_node;
6278 else if (TREE_CODE (type) == VECTOR_TYPE)
6279 return build_vector_from_val (type,
6280 build_int_cst (TREE_TYPE (type),
6281 value ? -1 : 0));
6282 else
6283 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6284 }
6285
6286
6287 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6288 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6289 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6290 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6291 COND is the first argument to CODE; otherwise (as in the example
6292 given here), it is the second argument. TYPE is the type of the
6293 original expression. Return NULL_TREE if no simplification is
6294 possible. */
6295
6296 static tree
6297 fold_binary_op_with_conditional_arg (location_t loc,
6298 enum tree_code code,
6299 tree type, tree op0, tree op1,
6300 tree cond, tree arg, int cond_first_p)
6301 {
6302 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6303 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6304 tree test, true_value, false_value;
6305 tree lhs = NULL_TREE;
6306 tree rhs = NULL_TREE;
6307 enum tree_code cond_code = COND_EXPR;
6308
6309 if (TREE_CODE (cond) == COND_EXPR
6310 || TREE_CODE (cond) == VEC_COND_EXPR)
6311 {
6312 test = TREE_OPERAND (cond, 0);
6313 true_value = TREE_OPERAND (cond, 1);
6314 false_value = TREE_OPERAND (cond, 2);
6315 /* If this operand throws an expression, then it does not make
6316 sense to try to perform a logical or arithmetic operation
6317 involving it. */
6318 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6319 lhs = true_value;
6320 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6321 rhs = false_value;
6322 }
6323 else
6324 {
6325 tree testtype = TREE_TYPE (cond);
6326 test = cond;
6327 true_value = constant_boolean_node (true, testtype);
6328 false_value = constant_boolean_node (false, testtype);
6329 }
6330
6331 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6332 cond_code = VEC_COND_EXPR;
6333
6334 /* This transformation is only worthwhile if we don't have to wrap ARG
6335 in a SAVE_EXPR and the operation can be simplified without recursing
6336 on at least one of the branches once its pushed inside the COND_EXPR. */
6337 if (!TREE_CONSTANT (arg)
6338 && (TREE_SIDE_EFFECTS (arg)
6339 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6340 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6341 return NULL_TREE;
6342
6343 arg = fold_convert_loc (loc, arg_type, arg);
6344 if (lhs == 0)
6345 {
6346 true_value = fold_convert_loc (loc, cond_type, true_value);
6347 if (cond_first_p)
6348 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6349 else
6350 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6351 }
6352 if (rhs == 0)
6353 {
6354 false_value = fold_convert_loc (loc, cond_type, false_value);
6355 if (cond_first_p)
6356 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6357 else
6358 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6359 }
6360
6361 /* Check that we have simplified at least one of the branches. */
6362 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6363 return NULL_TREE;
6364
6365 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6366 }
6367
6368 \f
6369 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6370
6371 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6372 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6373 ADDEND is the same as X.
6374
6375 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6376 and finite. The problematic cases are when X is zero, and its mode
6377 has signed zeros. In the case of rounding towards -infinity,
6378 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6379 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6380
6381 bool
6382 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6383 {
6384 if (!real_zerop (addend))
6385 return false;
6386
6387 /* Don't allow the fold with -fsignaling-nans. */
6388 if (HONOR_SNANS (element_mode (type)))
6389 return false;
6390
6391 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6392 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6393 return true;
6394
6395 /* In a vector or complex, we would need to check the sign of all zeros. */
6396 if (TREE_CODE (addend) != REAL_CST)
6397 return false;
6398
6399 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6400 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6401 negate = !negate;
6402
6403 /* The mode has signed zeros, and we have to honor their sign.
6404 In this situation, there is only one case we can return true for.
6405 X - 0 is the same as X unless rounding towards -infinity is
6406 supported. */
6407 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6408 }
6409
6410 /* Subroutine of fold() that optimizes comparisons of a division by
6411 a nonzero integer constant against an integer constant, i.e.
6412 X/C1 op C2.
6413
6414 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6415 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6416 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6417
6418 The function returns the constant folded tree if a simplification
6419 can be made, and NULL_TREE otherwise. */
6420
6421 static tree
6422 fold_div_compare (location_t loc,
6423 enum tree_code code, tree type, tree arg0, tree arg1)
6424 {
6425 tree prod, tmp, hi, lo;
6426 tree arg00 = TREE_OPERAND (arg0, 0);
6427 tree arg01 = TREE_OPERAND (arg0, 1);
6428 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6429 bool neg_overflow = false;
6430 bool overflow;
6431
6432 /* We have to do this the hard way to detect unsigned overflow.
6433 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6434 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6435 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6436 neg_overflow = false;
6437
6438 if (sign == UNSIGNED)
6439 {
6440 tmp = int_const_binop (MINUS_EXPR, arg01,
6441 build_int_cst (TREE_TYPE (arg01), 1));
6442 lo = prod;
6443
6444 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6445 val = wi::add (prod, tmp, sign, &overflow);
6446 hi = force_fit_type (TREE_TYPE (arg00), val,
6447 -1, overflow | TREE_OVERFLOW (prod));
6448 }
6449 else if (tree_int_cst_sgn (arg01) >= 0)
6450 {
6451 tmp = int_const_binop (MINUS_EXPR, arg01,
6452 build_int_cst (TREE_TYPE (arg01), 1));
6453 switch (tree_int_cst_sgn (arg1))
6454 {
6455 case -1:
6456 neg_overflow = true;
6457 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6458 hi = prod;
6459 break;
6460
6461 case 0:
6462 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6463 hi = tmp;
6464 break;
6465
6466 case 1:
6467 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6468 lo = prod;
6469 break;
6470
6471 default:
6472 gcc_unreachable ();
6473 }
6474 }
6475 else
6476 {
6477 /* A negative divisor reverses the relational operators. */
6478 code = swap_tree_comparison (code);
6479
6480 tmp = int_const_binop (PLUS_EXPR, arg01,
6481 build_int_cst (TREE_TYPE (arg01), 1));
6482 switch (tree_int_cst_sgn (arg1))
6483 {
6484 case -1:
6485 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6486 lo = prod;
6487 break;
6488
6489 case 0:
6490 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6491 lo = tmp;
6492 break;
6493
6494 case 1:
6495 neg_overflow = true;
6496 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6497 hi = prod;
6498 break;
6499
6500 default:
6501 gcc_unreachable ();
6502 }
6503 }
6504
6505 switch (code)
6506 {
6507 case EQ_EXPR:
6508 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6509 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6510 if (TREE_OVERFLOW (hi))
6511 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6512 if (TREE_OVERFLOW (lo))
6513 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6514 return build_range_check (loc, type, arg00, 1, lo, hi);
6515
6516 case NE_EXPR:
6517 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6518 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6519 if (TREE_OVERFLOW (hi))
6520 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6521 if (TREE_OVERFLOW (lo))
6522 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6523 return build_range_check (loc, type, arg00, 0, lo, hi);
6524
6525 case LT_EXPR:
6526 if (TREE_OVERFLOW (lo))
6527 {
6528 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6529 return omit_one_operand_loc (loc, type, tmp, arg00);
6530 }
6531 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6532
6533 case LE_EXPR:
6534 if (TREE_OVERFLOW (hi))
6535 {
6536 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6537 return omit_one_operand_loc (loc, type, tmp, arg00);
6538 }
6539 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6540
6541 case GT_EXPR:
6542 if (TREE_OVERFLOW (hi))
6543 {
6544 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6545 return omit_one_operand_loc (loc, type, tmp, arg00);
6546 }
6547 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6548
6549 case GE_EXPR:
6550 if (TREE_OVERFLOW (lo))
6551 {
6552 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6553 return omit_one_operand_loc (loc, type, tmp, arg00);
6554 }
6555 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6556
6557 default:
6558 break;
6559 }
6560
6561 return NULL_TREE;
6562 }
6563
6564
6565 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6566 equality/inequality test, then return a simplified form of the test
6567 using a sign testing. Otherwise return NULL. TYPE is the desired
6568 result type. */
6569
6570 static tree
6571 fold_single_bit_test_into_sign_test (location_t loc,
6572 enum tree_code code, tree arg0, tree arg1,
6573 tree result_type)
6574 {
6575 /* If this is testing a single bit, we can optimize the test. */
6576 if ((code == NE_EXPR || code == EQ_EXPR)
6577 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6578 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6579 {
6580 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6581 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6582 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6583
6584 if (arg00 != NULL_TREE
6585 /* This is only a win if casting to a signed type is cheap,
6586 i.e. when arg00's type is not a partial mode. */
6587 && TYPE_PRECISION (TREE_TYPE (arg00))
6588 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6589 {
6590 tree stype = signed_type_for (TREE_TYPE (arg00));
6591 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6592 result_type,
6593 fold_convert_loc (loc, stype, arg00),
6594 build_int_cst (stype, 0));
6595 }
6596 }
6597
6598 return NULL_TREE;
6599 }
6600
6601 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6602 equality/inequality test, then return a simplified form of
6603 the test using shifts and logical operations. Otherwise return
6604 NULL. TYPE is the desired result type. */
6605
6606 tree
6607 fold_single_bit_test (location_t loc, enum tree_code code,
6608 tree arg0, tree arg1, tree result_type)
6609 {
6610 /* If this is testing a single bit, we can optimize the test. */
6611 if ((code == NE_EXPR || code == EQ_EXPR)
6612 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6613 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6614 {
6615 tree inner = TREE_OPERAND (arg0, 0);
6616 tree type = TREE_TYPE (arg0);
6617 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6618 machine_mode operand_mode = TYPE_MODE (type);
6619 int ops_unsigned;
6620 tree signed_type, unsigned_type, intermediate_type;
6621 tree tem, one;
6622
6623 /* First, see if we can fold the single bit test into a sign-bit
6624 test. */
6625 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6626 result_type);
6627 if (tem)
6628 return tem;
6629
6630 /* Otherwise we have (A & C) != 0 where C is a single bit,
6631 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6632 Similarly for (A & C) == 0. */
6633
6634 /* If INNER is a right shift of a constant and it plus BITNUM does
6635 not overflow, adjust BITNUM and INNER. */
6636 if (TREE_CODE (inner) == RSHIFT_EXPR
6637 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6638 && bitnum < TYPE_PRECISION (type)
6639 && wi::ltu_p (TREE_OPERAND (inner, 1),
6640 TYPE_PRECISION (type) - bitnum))
6641 {
6642 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6643 inner = TREE_OPERAND (inner, 0);
6644 }
6645
6646 /* If we are going to be able to omit the AND below, we must do our
6647 operations as unsigned. If we must use the AND, we have a choice.
6648 Normally unsigned is faster, but for some machines signed is. */
6649 #ifdef LOAD_EXTEND_OP
6650 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6651 && !flag_syntax_only) ? 0 : 1;
6652 #else
6653 ops_unsigned = 1;
6654 #endif
6655
6656 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6657 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6658 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6659 inner = fold_convert_loc (loc, intermediate_type, inner);
6660
6661 if (bitnum != 0)
6662 inner = build2 (RSHIFT_EXPR, intermediate_type,
6663 inner, size_int (bitnum));
6664
6665 one = build_int_cst (intermediate_type, 1);
6666
6667 if (code == EQ_EXPR)
6668 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6669
6670 /* Put the AND last so it can combine with more things. */
6671 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6672
6673 /* Make sure to return the proper type. */
6674 inner = fold_convert_loc (loc, result_type, inner);
6675
6676 return inner;
6677 }
6678 return NULL_TREE;
6679 }
6680
6681 /* Check whether we are allowed to reorder operands arg0 and arg1,
6682 such that the evaluation of arg1 occurs before arg0. */
6683
6684 static bool
6685 reorder_operands_p (const_tree arg0, const_tree arg1)
6686 {
6687 if (! flag_evaluation_order)
6688 return true;
6689 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6690 return true;
6691 return ! TREE_SIDE_EFFECTS (arg0)
6692 && ! TREE_SIDE_EFFECTS (arg1);
6693 }
6694
6695 /* Test whether it is preferable two swap two operands, ARG0 and
6696 ARG1, for example because ARG0 is an integer constant and ARG1
6697 isn't. If REORDER is true, only recommend swapping if we can
6698 evaluate the operands in reverse order. */
6699
6700 bool
6701 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6702 {
6703 if (CONSTANT_CLASS_P (arg1))
6704 return 0;
6705 if (CONSTANT_CLASS_P (arg0))
6706 return 1;
6707
6708 STRIP_NOPS (arg0);
6709 STRIP_NOPS (arg1);
6710
6711 if (TREE_CONSTANT (arg1))
6712 return 0;
6713 if (TREE_CONSTANT (arg0))
6714 return 1;
6715
6716 if (reorder && flag_evaluation_order
6717 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6718 return 0;
6719
6720 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6721 for commutative and comparison operators. Ensuring a canonical
6722 form allows the optimizers to find additional redundancies without
6723 having to explicitly check for both orderings. */
6724 if (TREE_CODE (arg0) == SSA_NAME
6725 && TREE_CODE (arg1) == SSA_NAME
6726 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6727 return 1;
6728
6729 /* Put SSA_NAMEs last. */
6730 if (TREE_CODE (arg1) == SSA_NAME)
6731 return 0;
6732 if (TREE_CODE (arg0) == SSA_NAME)
6733 return 1;
6734
6735 /* Put variables last. */
6736 if (DECL_P (arg1))
6737 return 0;
6738 if (DECL_P (arg0))
6739 return 1;
6740
6741 return 0;
6742 }
6743
6744 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6745 ARG0 is extended to a wider type. */
6746
6747 static tree
6748 fold_widened_comparison (location_t loc, enum tree_code code,
6749 tree type, tree arg0, tree arg1)
6750 {
6751 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6752 tree arg1_unw;
6753 tree shorter_type, outer_type;
6754 tree min, max;
6755 bool above, below;
6756
6757 if (arg0_unw == arg0)
6758 return NULL_TREE;
6759 shorter_type = TREE_TYPE (arg0_unw);
6760
6761 /* Disable this optimization if we're casting a function pointer
6762 type on targets that require function pointer canonicalization. */
6763 if (targetm.have_canonicalize_funcptr_for_compare ()
6764 && TREE_CODE (shorter_type) == POINTER_TYPE
6765 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6766 return NULL_TREE;
6767
6768 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6769 return NULL_TREE;
6770
6771 arg1_unw = get_unwidened (arg1, NULL_TREE);
6772
6773 /* If possible, express the comparison in the shorter mode. */
6774 if ((code == EQ_EXPR || code == NE_EXPR
6775 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6776 && (TREE_TYPE (arg1_unw) == shorter_type
6777 || ((TYPE_PRECISION (shorter_type)
6778 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6779 && (TYPE_UNSIGNED (shorter_type)
6780 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6781 || (TREE_CODE (arg1_unw) == INTEGER_CST
6782 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6783 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6784 && int_fits_type_p (arg1_unw, shorter_type))))
6785 return fold_build2_loc (loc, code, type, arg0_unw,
6786 fold_convert_loc (loc, shorter_type, arg1_unw));
6787
6788 if (TREE_CODE (arg1_unw) != INTEGER_CST
6789 || TREE_CODE (shorter_type) != INTEGER_TYPE
6790 || !int_fits_type_p (arg1_unw, shorter_type))
6791 return NULL_TREE;
6792
6793 /* If we are comparing with the integer that does not fit into the range
6794 of the shorter type, the result is known. */
6795 outer_type = TREE_TYPE (arg1_unw);
6796 min = lower_bound_in_type (outer_type, shorter_type);
6797 max = upper_bound_in_type (outer_type, shorter_type);
6798
6799 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6800 max, arg1_unw));
6801 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6802 arg1_unw, min));
6803
6804 switch (code)
6805 {
6806 case EQ_EXPR:
6807 if (above || below)
6808 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6809 break;
6810
6811 case NE_EXPR:
6812 if (above || below)
6813 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6814 break;
6815
6816 case LT_EXPR:
6817 case LE_EXPR:
6818 if (above)
6819 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6820 else if (below)
6821 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6822
6823 case GT_EXPR:
6824 case GE_EXPR:
6825 if (above)
6826 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6827 else if (below)
6828 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6829
6830 default:
6831 break;
6832 }
6833
6834 return NULL_TREE;
6835 }
6836
6837 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6838 ARG0 just the signedness is changed. */
6839
6840 static tree
6841 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6842 tree arg0, tree arg1)
6843 {
6844 tree arg0_inner;
6845 tree inner_type, outer_type;
6846
6847 if (!CONVERT_EXPR_P (arg0))
6848 return NULL_TREE;
6849
6850 outer_type = TREE_TYPE (arg0);
6851 arg0_inner = TREE_OPERAND (arg0, 0);
6852 inner_type = TREE_TYPE (arg0_inner);
6853
6854 /* Disable this optimization if we're casting a function pointer
6855 type on targets that require function pointer canonicalization. */
6856 if (targetm.have_canonicalize_funcptr_for_compare ()
6857 && TREE_CODE (inner_type) == POINTER_TYPE
6858 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6859 return NULL_TREE;
6860
6861 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6862 return NULL_TREE;
6863
6864 if (TREE_CODE (arg1) != INTEGER_CST
6865 && !(CONVERT_EXPR_P (arg1)
6866 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6867 return NULL_TREE;
6868
6869 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6870 && code != NE_EXPR
6871 && code != EQ_EXPR)
6872 return NULL_TREE;
6873
6874 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6875 return NULL_TREE;
6876
6877 if (TREE_CODE (arg1) == INTEGER_CST)
6878 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6879 TREE_OVERFLOW (arg1));
6880 else
6881 arg1 = fold_convert_loc (loc, inner_type, arg1);
6882
6883 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6884 }
6885
6886
6887 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6888 means A >= Y && A != MAX, but in this case we know that
6889 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6890
6891 static tree
6892 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6893 {
6894 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6895
6896 if (TREE_CODE (bound) == LT_EXPR)
6897 a = TREE_OPERAND (bound, 0);
6898 else if (TREE_CODE (bound) == GT_EXPR)
6899 a = TREE_OPERAND (bound, 1);
6900 else
6901 return NULL_TREE;
6902
6903 typea = TREE_TYPE (a);
6904 if (!INTEGRAL_TYPE_P (typea)
6905 && !POINTER_TYPE_P (typea))
6906 return NULL_TREE;
6907
6908 if (TREE_CODE (ineq) == LT_EXPR)
6909 {
6910 a1 = TREE_OPERAND (ineq, 1);
6911 y = TREE_OPERAND (ineq, 0);
6912 }
6913 else if (TREE_CODE (ineq) == GT_EXPR)
6914 {
6915 a1 = TREE_OPERAND (ineq, 0);
6916 y = TREE_OPERAND (ineq, 1);
6917 }
6918 else
6919 return NULL_TREE;
6920
6921 if (TREE_TYPE (a1) != typea)
6922 return NULL_TREE;
6923
6924 if (POINTER_TYPE_P (typea))
6925 {
6926 /* Convert the pointer types into integer before taking the difference. */
6927 tree ta = fold_convert_loc (loc, ssizetype, a);
6928 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6929 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6930 }
6931 else
6932 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6933
6934 if (!diff || !integer_onep (diff))
6935 return NULL_TREE;
6936
6937 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6938 }
6939
6940 /* Fold a sum or difference of at least one multiplication.
6941 Returns the folded tree or NULL if no simplification could be made. */
6942
6943 static tree
6944 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6945 tree arg0, tree arg1)
6946 {
6947 tree arg00, arg01, arg10, arg11;
6948 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6949
6950 /* (A * C) +- (B * C) -> (A+-B) * C.
6951 (A * C) +- A -> A * (C+-1).
6952 We are most concerned about the case where C is a constant,
6953 but other combinations show up during loop reduction. Since
6954 it is not difficult, try all four possibilities. */
6955
6956 if (TREE_CODE (arg0) == MULT_EXPR)
6957 {
6958 arg00 = TREE_OPERAND (arg0, 0);
6959 arg01 = TREE_OPERAND (arg0, 1);
6960 }
6961 else if (TREE_CODE (arg0) == INTEGER_CST)
6962 {
6963 arg00 = build_one_cst (type);
6964 arg01 = arg0;
6965 }
6966 else
6967 {
6968 /* We cannot generate constant 1 for fract. */
6969 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6970 return NULL_TREE;
6971 arg00 = arg0;
6972 arg01 = build_one_cst (type);
6973 }
6974 if (TREE_CODE (arg1) == MULT_EXPR)
6975 {
6976 arg10 = TREE_OPERAND (arg1, 0);
6977 arg11 = TREE_OPERAND (arg1, 1);
6978 }
6979 else if (TREE_CODE (arg1) == INTEGER_CST)
6980 {
6981 arg10 = build_one_cst (type);
6982 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6983 the purpose of this canonicalization. */
6984 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6985 && negate_expr_p (arg1)
6986 && code == PLUS_EXPR)
6987 {
6988 arg11 = negate_expr (arg1);
6989 code = MINUS_EXPR;
6990 }
6991 else
6992 arg11 = arg1;
6993 }
6994 else
6995 {
6996 /* We cannot generate constant 1 for fract. */
6997 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6998 return NULL_TREE;
6999 arg10 = arg1;
7000 arg11 = build_one_cst (type);
7001 }
7002 same = NULL_TREE;
7003
7004 if (operand_equal_p (arg01, arg11, 0))
7005 same = arg01, alt0 = arg00, alt1 = arg10;
7006 else if (operand_equal_p (arg00, arg10, 0))
7007 same = arg00, alt0 = arg01, alt1 = arg11;
7008 else if (operand_equal_p (arg00, arg11, 0))
7009 same = arg00, alt0 = arg01, alt1 = arg10;
7010 else if (operand_equal_p (arg01, arg10, 0))
7011 same = arg01, alt0 = arg00, alt1 = arg11;
7012
7013 /* No identical multiplicands; see if we can find a common
7014 power-of-two factor in non-power-of-two multiplies. This
7015 can help in multi-dimensional array access. */
7016 else if (tree_fits_shwi_p (arg01)
7017 && tree_fits_shwi_p (arg11))
7018 {
7019 HOST_WIDE_INT int01, int11, tmp;
7020 bool swap = false;
7021 tree maybe_same;
7022 int01 = tree_to_shwi (arg01);
7023 int11 = tree_to_shwi (arg11);
7024
7025 /* Move min of absolute values to int11. */
7026 if (absu_hwi (int01) < absu_hwi (int11))
7027 {
7028 tmp = int01, int01 = int11, int11 = tmp;
7029 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7030 maybe_same = arg01;
7031 swap = true;
7032 }
7033 else
7034 maybe_same = arg11;
7035
7036 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7037 /* The remainder should not be a constant, otherwise we
7038 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7039 increased the number of multiplications necessary. */
7040 && TREE_CODE (arg10) != INTEGER_CST)
7041 {
7042 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7043 build_int_cst (TREE_TYPE (arg00),
7044 int01 / int11));
7045 alt1 = arg10;
7046 same = maybe_same;
7047 if (swap)
7048 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7049 }
7050 }
7051
7052 if (same)
7053 return fold_build2_loc (loc, MULT_EXPR, type,
7054 fold_build2_loc (loc, code, type,
7055 fold_convert_loc (loc, type, alt0),
7056 fold_convert_loc (loc, type, alt1)),
7057 fold_convert_loc (loc, type, same));
7058
7059 return NULL_TREE;
7060 }
7061
7062 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7063 specified by EXPR into the buffer PTR of length LEN bytes.
7064 Return the number of bytes placed in the buffer, or zero
7065 upon failure. */
7066
7067 static int
7068 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7069 {
7070 tree type = TREE_TYPE (expr);
7071 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7072 int byte, offset, word, words;
7073 unsigned char value;
7074
7075 if ((off == -1 && total_bytes > len)
7076 || off >= total_bytes)
7077 return 0;
7078 if (off == -1)
7079 off = 0;
7080 words = total_bytes / UNITS_PER_WORD;
7081
7082 for (byte = 0; byte < total_bytes; byte++)
7083 {
7084 int bitpos = byte * BITS_PER_UNIT;
7085 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7086 number of bytes. */
7087 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7088
7089 if (total_bytes > UNITS_PER_WORD)
7090 {
7091 word = byte / UNITS_PER_WORD;
7092 if (WORDS_BIG_ENDIAN)
7093 word = (words - 1) - word;
7094 offset = word * UNITS_PER_WORD;
7095 if (BYTES_BIG_ENDIAN)
7096 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7097 else
7098 offset += byte % UNITS_PER_WORD;
7099 }
7100 else
7101 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7102 if (offset >= off
7103 && offset - off < len)
7104 ptr[offset - off] = value;
7105 }
7106 return MIN (len, total_bytes - off);
7107 }
7108
7109
7110 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7111 specified by EXPR into the buffer PTR of length LEN bytes.
7112 Return the number of bytes placed in the buffer, or zero
7113 upon failure. */
7114
7115 static int
7116 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7117 {
7118 tree type = TREE_TYPE (expr);
7119 machine_mode mode = TYPE_MODE (type);
7120 int total_bytes = GET_MODE_SIZE (mode);
7121 FIXED_VALUE_TYPE value;
7122 tree i_value, i_type;
7123
7124 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7125 return 0;
7126
7127 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7128
7129 if (NULL_TREE == i_type
7130 || TYPE_PRECISION (i_type) != total_bytes)
7131 return 0;
7132
7133 value = TREE_FIXED_CST (expr);
7134 i_value = double_int_to_tree (i_type, value.data);
7135
7136 return native_encode_int (i_value, ptr, len, off);
7137 }
7138
7139
7140 /* Subroutine of native_encode_expr. Encode the REAL_CST
7141 specified by EXPR into the buffer PTR of length LEN bytes.
7142 Return the number of bytes placed in the buffer, or zero
7143 upon failure. */
7144
7145 static int
7146 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7147 {
7148 tree type = TREE_TYPE (expr);
7149 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7150 int byte, offset, word, words, bitpos;
7151 unsigned char value;
7152
7153 /* There are always 32 bits in each long, no matter the size of
7154 the hosts long. We handle floating point representations with
7155 up to 192 bits. */
7156 long tmp[6];
7157
7158 if ((off == -1 && total_bytes > len)
7159 || off >= total_bytes)
7160 return 0;
7161 if (off == -1)
7162 off = 0;
7163 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7164
7165 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7166
7167 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7168 bitpos += BITS_PER_UNIT)
7169 {
7170 byte = (bitpos / BITS_PER_UNIT) & 3;
7171 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7172
7173 if (UNITS_PER_WORD < 4)
7174 {
7175 word = byte / UNITS_PER_WORD;
7176 if (WORDS_BIG_ENDIAN)
7177 word = (words - 1) - word;
7178 offset = word * UNITS_PER_WORD;
7179 if (BYTES_BIG_ENDIAN)
7180 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7181 else
7182 offset += byte % UNITS_PER_WORD;
7183 }
7184 else
7185 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7186 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7187 if (offset >= off
7188 && offset - off < len)
7189 ptr[offset - off] = value;
7190 }
7191 return MIN (len, total_bytes - off);
7192 }
7193
7194 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7195 specified by EXPR into the buffer PTR of length LEN bytes.
7196 Return the number of bytes placed in the buffer, or zero
7197 upon failure. */
7198
7199 static int
7200 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7201 {
7202 int rsize, isize;
7203 tree part;
7204
7205 part = TREE_REALPART (expr);
7206 rsize = native_encode_expr (part, ptr, len, off);
7207 if (off == -1
7208 && rsize == 0)
7209 return 0;
7210 part = TREE_IMAGPART (expr);
7211 if (off != -1)
7212 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7213 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7214 if (off == -1
7215 && isize != rsize)
7216 return 0;
7217 return rsize + isize;
7218 }
7219
7220
7221 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7222 specified by EXPR into the buffer PTR of length LEN bytes.
7223 Return the number of bytes placed in the buffer, or zero
7224 upon failure. */
7225
7226 static int
7227 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7228 {
7229 unsigned i, count;
7230 int size, offset;
7231 tree itype, elem;
7232
7233 offset = 0;
7234 count = VECTOR_CST_NELTS (expr);
7235 itype = TREE_TYPE (TREE_TYPE (expr));
7236 size = GET_MODE_SIZE (TYPE_MODE (itype));
7237 for (i = 0; i < count; i++)
7238 {
7239 if (off >= size)
7240 {
7241 off -= size;
7242 continue;
7243 }
7244 elem = VECTOR_CST_ELT (expr, i);
7245 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7246 if ((off == -1 && res != size)
7247 || res == 0)
7248 return 0;
7249 offset += res;
7250 if (offset >= len)
7251 return offset;
7252 if (off != -1)
7253 off = 0;
7254 }
7255 return offset;
7256 }
7257
7258
7259 /* Subroutine of native_encode_expr. Encode the STRING_CST
7260 specified by EXPR into the buffer PTR of length LEN bytes.
7261 Return the number of bytes placed in the buffer, or zero
7262 upon failure. */
7263
7264 static int
7265 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7266 {
7267 tree type = TREE_TYPE (expr);
7268 HOST_WIDE_INT total_bytes;
7269
7270 if (TREE_CODE (type) != ARRAY_TYPE
7271 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7272 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7273 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7274 return 0;
7275 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7276 if ((off == -1 && total_bytes > len)
7277 || off >= total_bytes)
7278 return 0;
7279 if (off == -1)
7280 off = 0;
7281 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7282 {
7283 int written = 0;
7284 if (off < TREE_STRING_LENGTH (expr))
7285 {
7286 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7287 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7288 }
7289 memset (ptr + written, 0,
7290 MIN (total_bytes - written, len - written));
7291 }
7292 else
7293 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7294 return MIN (total_bytes - off, len);
7295 }
7296
7297
7298 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7299 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7300 buffer PTR of length LEN bytes. If OFF is not -1 then start
7301 the encoding at byte offset OFF and encode at most LEN bytes.
7302 Return the number of bytes placed in the buffer, or zero upon failure. */
7303
7304 int
7305 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7306 {
7307 switch (TREE_CODE (expr))
7308 {
7309 case INTEGER_CST:
7310 return native_encode_int (expr, ptr, len, off);
7311
7312 case REAL_CST:
7313 return native_encode_real (expr, ptr, len, off);
7314
7315 case FIXED_CST:
7316 return native_encode_fixed (expr, ptr, len, off);
7317
7318 case COMPLEX_CST:
7319 return native_encode_complex (expr, ptr, len, off);
7320
7321 case VECTOR_CST:
7322 return native_encode_vector (expr, ptr, len, off);
7323
7324 case STRING_CST:
7325 return native_encode_string (expr, ptr, len, off);
7326
7327 default:
7328 return 0;
7329 }
7330 }
7331
7332
7333 /* Subroutine of native_interpret_expr. Interpret the contents of
7334 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7335 If the buffer cannot be interpreted, return NULL_TREE. */
7336
7337 static tree
7338 native_interpret_int (tree type, const unsigned char *ptr, int len)
7339 {
7340 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7341
7342 if (total_bytes > len
7343 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7344 return NULL_TREE;
7345
7346 wide_int result = wi::from_buffer (ptr, total_bytes);
7347
7348 return wide_int_to_tree (type, result);
7349 }
7350
7351
7352 /* Subroutine of native_interpret_expr. Interpret the contents of
7353 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7354 If the buffer cannot be interpreted, return NULL_TREE. */
7355
7356 static tree
7357 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7358 {
7359 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7360 double_int result;
7361 FIXED_VALUE_TYPE fixed_value;
7362
7363 if (total_bytes > len
7364 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7365 return NULL_TREE;
7366
7367 result = double_int::from_buffer (ptr, total_bytes);
7368 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7369
7370 return build_fixed (type, fixed_value);
7371 }
7372
7373
7374 /* Subroutine of native_interpret_expr. Interpret the contents of
7375 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7376 If the buffer cannot be interpreted, return NULL_TREE. */
7377
7378 static tree
7379 native_interpret_real (tree type, const unsigned char *ptr, int len)
7380 {
7381 machine_mode mode = TYPE_MODE (type);
7382 int total_bytes = GET_MODE_SIZE (mode);
7383 int byte, offset, word, words, bitpos;
7384 unsigned char value;
7385 /* There are always 32 bits in each long, no matter the size of
7386 the hosts long. We handle floating point representations with
7387 up to 192 bits. */
7388 REAL_VALUE_TYPE r;
7389 long tmp[6];
7390
7391 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7392 if (total_bytes > len || total_bytes > 24)
7393 return NULL_TREE;
7394 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7395
7396 memset (tmp, 0, sizeof (tmp));
7397 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7398 bitpos += BITS_PER_UNIT)
7399 {
7400 byte = (bitpos / BITS_PER_UNIT) & 3;
7401 if (UNITS_PER_WORD < 4)
7402 {
7403 word = byte / UNITS_PER_WORD;
7404 if (WORDS_BIG_ENDIAN)
7405 word = (words - 1) - word;
7406 offset = word * UNITS_PER_WORD;
7407 if (BYTES_BIG_ENDIAN)
7408 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7409 else
7410 offset += byte % UNITS_PER_WORD;
7411 }
7412 else
7413 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7414 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7415
7416 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7417 }
7418
7419 real_from_target (&r, tmp, mode);
7420 return build_real (type, r);
7421 }
7422
7423
7424 /* Subroutine of native_interpret_expr. Interpret the contents of
7425 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7426 If the buffer cannot be interpreted, return NULL_TREE. */
7427
7428 static tree
7429 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7430 {
7431 tree etype, rpart, ipart;
7432 int size;
7433
7434 etype = TREE_TYPE (type);
7435 size = GET_MODE_SIZE (TYPE_MODE (etype));
7436 if (size * 2 > len)
7437 return NULL_TREE;
7438 rpart = native_interpret_expr (etype, ptr, size);
7439 if (!rpart)
7440 return NULL_TREE;
7441 ipart = native_interpret_expr (etype, ptr+size, size);
7442 if (!ipart)
7443 return NULL_TREE;
7444 return build_complex (type, rpart, ipart);
7445 }
7446
7447
7448 /* Subroutine of native_interpret_expr. Interpret the contents of
7449 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7450 If the buffer cannot be interpreted, return NULL_TREE. */
7451
7452 static tree
7453 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7454 {
7455 tree etype, elem;
7456 int i, size, count;
7457 tree *elements;
7458
7459 etype = TREE_TYPE (type);
7460 size = GET_MODE_SIZE (TYPE_MODE (etype));
7461 count = TYPE_VECTOR_SUBPARTS (type);
7462 if (size * count > len)
7463 return NULL_TREE;
7464
7465 elements = XALLOCAVEC (tree, count);
7466 for (i = count - 1; i >= 0; i--)
7467 {
7468 elem = native_interpret_expr (etype, ptr+(i*size), size);
7469 if (!elem)
7470 return NULL_TREE;
7471 elements[i] = elem;
7472 }
7473 return build_vector (type, elements);
7474 }
7475
7476
7477 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7478 the buffer PTR of length LEN as a constant of type TYPE. For
7479 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7480 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7481 return NULL_TREE. */
7482
7483 tree
7484 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7485 {
7486 switch (TREE_CODE (type))
7487 {
7488 case INTEGER_TYPE:
7489 case ENUMERAL_TYPE:
7490 case BOOLEAN_TYPE:
7491 case POINTER_TYPE:
7492 case REFERENCE_TYPE:
7493 return native_interpret_int (type, ptr, len);
7494
7495 case REAL_TYPE:
7496 return native_interpret_real (type, ptr, len);
7497
7498 case FIXED_POINT_TYPE:
7499 return native_interpret_fixed (type, ptr, len);
7500
7501 case COMPLEX_TYPE:
7502 return native_interpret_complex (type, ptr, len);
7503
7504 case VECTOR_TYPE:
7505 return native_interpret_vector (type, ptr, len);
7506
7507 default:
7508 return NULL_TREE;
7509 }
7510 }
7511
7512 /* Returns true if we can interpret the contents of a native encoding
7513 as TYPE. */
7514
7515 static bool
7516 can_native_interpret_type_p (tree type)
7517 {
7518 switch (TREE_CODE (type))
7519 {
7520 case INTEGER_TYPE:
7521 case ENUMERAL_TYPE:
7522 case BOOLEAN_TYPE:
7523 case POINTER_TYPE:
7524 case REFERENCE_TYPE:
7525 case FIXED_POINT_TYPE:
7526 case REAL_TYPE:
7527 case COMPLEX_TYPE:
7528 case VECTOR_TYPE:
7529 return true;
7530 default:
7531 return false;
7532 }
7533 }
7534
7535 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7536 TYPE at compile-time. If we're unable to perform the conversion
7537 return NULL_TREE. */
7538
7539 static tree
7540 fold_view_convert_expr (tree type, tree expr)
7541 {
7542 /* We support up to 512-bit values (for V8DFmode). */
7543 unsigned char buffer[64];
7544 int len;
7545
7546 /* Check that the host and target are sane. */
7547 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7548 return NULL_TREE;
7549
7550 len = native_encode_expr (expr, buffer, sizeof (buffer));
7551 if (len == 0)
7552 return NULL_TREE;
7553
7554 return native_interpret_expr (type, buffer, len);
7555 }
7556
7557 /* Build an expression for the address of T. Folds away INDIRECT_REF
7558 to avoid confusing the gimplify process. */
7559
7560 tree
7561 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7562 {
7563 /* The size of the object is not relevant when talking about its address. */
7564 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7565 t = TREE_OPERAND (t, 0);
7566
7567 if (TREE_CODE (t) == INDIRECT_REF)
7568 {
7569 t = TREE_OPERAND (t, 0);
7570
7571 if (TREE_TYPE (t) != ptrtype)
7572 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7573 }
7574 else if (TREE_CODE (t) == MEM_REF
7575 && integer_zerop (TREE_OPERAND (t, 1)))
7576 return TREE_OPERAND (t, 0);
7577 else if (TREE_CODE (t) == MEM_REF
7578 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7579 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7580 TREE_OPERAND (t, 0),
7581 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7582 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7583 {
7584 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7585
7586 if (TREE_TYPE (t) != ptrtype)
7587 t = fold_convert_loc (loc, ptrtype, t);
7588 }
7589 else
7590 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7591
7592 return t;
7593 }
7594
7595 /* Build an expression for the address of T. */
7596
7597 tree
7598 build_fold_addr_expr_loc (location_t loc, tree t)
7599 {
7600 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7601
7602 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7603 }
7604
7605 /* Fold a unary expression of code CODE and type TYPE with operand
7606 OP0. Return the folded expression if folding is successful.
7607 Otherwise, return NULL_TREE. */
7608
7609 tree
7610 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7611 {
7612 tree tem;
7613 tree arg0;
7614 enum tree_code_class kind = TREE_CODE_CLASS (code);
7615
7616 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7617 && TREE_CODE_LENGTH (code) == 1);
7618
7619 arg0 = op0;
7620 if (arg0)
7621 {
7622 if (CONVERT_EXPR_CODE_P (code)
7623 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7624 {
7625 /* Don't use STRIP_NOPS, because signedness of argument type
7626 matters. */
7627 STRIP_SIGN_NOPS (arg0);
7628 }
7629 else
7630 {
7631 /* Strip any conversions that don't change the mode. This
7632 is safe for every expression, except for a comparison
7633 expression because its signedness is derived from its
7634 operands.
7635
7636 Note that this is done as an internal manipulation within
7637 the constant folder, in order to find the simplest
7638 representation of the arguments so that their form can be
7639 studied. In any cases, the appropriate type conversions
7640 should be put back in the tree that will get out of the
7641 constant folder. */
7642 STRIP_NOPS (arg0);
7643 }
7644
7645 if (CONSTANT_CLASS_P (arg0))
7646 {
7647 tree tem = const_unop (code, type, arg0);
7648 if (tem)
7649 {
7650 if (TREE_TYPE (tem) != type)
7651 tem = fold_convert_loc (loc, type, tem);
7652 return tem;
7653 }
7654 }
7655 }
7656
7657 tem = generic_simplify (loc, code, type, op0);
7658 if (tem)
7659 return tem;
7660
7661 if (TREE_CODE_CLASS (code) == tcc_unary)
7662 {
7663 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7664 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7665 fold_build1_loc (loc, code, type,
7666 fold_convert_loc (loc, TREE_TYPE (op0),
7667 TREE_OPERAND (arg0, 1))));
7668 else if (TREE_CODE (arg0) == COND_EXPR)
7669 {
7670 tree arg01 = TREE_OPERAND (arg0, 1);
7671 tree arg02 = TREE_OPERAND (arg0, 2);
7672 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7673 arg01 = fold_build1_loc (loc, code, type,
7674 fold_convert_loc (loc,
7675 TREE_TYPE (op0), arg01));
7676 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7677 arg02 = fold_build1_loc (loc, code, type,
7678 fold_convert_loc (loc,
7679 TREE_TYPE (op0), arg02));
7680 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7681 arg01, arg02);
7682
7683 /* If this was a conversion, and all we did was to move into
7684 inside the COND_EXPR, bring it back out. But leave it if
7685 it is a conversion from integer to integer and the
7686 result precision is no wider than a word since such a
7687 conversion is cheap and may be optimized away by combine,
7688 while it couldn't if it were outside the COND_EXPR. Then return
7689 so we don't get into an infinite recursion loop taking the
7690 conversion out and then back in. */
7691
7692 if ((CONVERT_EXPR_CODE_P (code)
7693 || code == NON_LVALUE_EXPR)
7694 && TREE_CODE (tem) == COND_EXPR
7695 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7696 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7697 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7698 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7699 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7700 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7701 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7702 && (INTEGRAL_TYPE_P
7703 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7704 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7705 || flag_syntax_only))
7706 tem = build1_loc (loc, code, type,
7707 build3 (COND_EXPR,
7708 TREE_TYPE (TREE_OPERAND
7709 (TREE_OPERAND (tem, 1), 0)),
7710 TREE_OPERAND (tem, 0),
7711 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7712 TREE_OPERAND (TREE_OPERAND (tem, 2),
7713 0)));
7714 return tem;
7715 }
7716 }
7717
7718 switch (code)
7719 {
7720 case NON_LVALUE_EXPR:
7721 if (!maybe_lvalue_p (op0))
7722 return fold_convert_loc (loc, type, op0);
7723 return NULL_TREE;
7724
7725 CASE_CONVERT:
7726 case FLOAT_EXPR:
7727 case FIX_TRUNC_EXPR:
7728 if (COMPARISON_CLASS_P (op0))
7729 {
7730 /* If we have (type) (a CMP b) and type is an integral type, return
7731 new expression involving the new type. Canonicalize
7732 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7733 non-integral type.
7734 Do not fold the result as that would not simplify further, also
7735 folding again results in recursions. */
7736 if (TREE_CODE (type) == BOOLEAN_TYPE)
7737 return build2_loc (loc, TREE_CODE (op0), type,
7738 TREE_OPERAND (op0, 0),
7739 TREE_OPERAND (op0, 1));
7740 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7741 && TREE_CODE (type) != VECTOR_TYPE)
7742 return build3_loc (loc, COND_EXPR, type, op0,
7743 constant_boolean_node (true, type),
7744 constant_boolean_node (false, type));
7745 }
7746
7747 /* Handle (T *)&A.B.C for A being of type T and B and C
7748 living at offset zero. This occurs frequently in
7749 C++ upcasting and then accessing the base. */
7750 if (TREE_CODE (op0) == ADDR_EXPR
7751 && POINTER_TYPE_P (type)
7752 && handled_component_p (TREE_OPERAND (op0, 0)))
7753 {
7754 HOST_WIDE_INT bitsize, bitpos;
7755 tree offset;
7756 machine_mode mode;
7757 int unsignedp, volatilep;
7758 tree base = TREE_OPERAND (op0, 0);
7759 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7760 &mode, &unsignedp, &volatilep, false);
7761 /* If the reference was to a (constant) zero offset, we can use
7762 the address of the base if it has the same base type
7763 as the result type and the pointer type is unqualified. */
7764 if (! offset && bitpos == 0
7765 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7766 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7767 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7768 return fold_convert_loc (loc, type,
7769 build_fold_addr_expr_loc (loc, base));
7770 }
7771
7772 if (TREE_CODE (op0) == MODIFY_EXPR
7773 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7774 /* Detect assigning a bitfield. */
7775 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7776 && DECL_BIT_FIELD
7777 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7778 {
7779 /* Don't leave an assignment inside a conversion
7780 unless assigning a bitfield. */
7781 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7782 /* First do the assignment, then return converted constant. */
7783 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7784 TREE_NO_WARNING (tem) = 1;
7785 TREE_USED (tem) = 1;
7786 return tem;
7787 }
7788
7789 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7790 constants (if x has signed type, the sign bit cannot be set
7791 in c). This folds extension into the BIT_AND_EXPR.
7792 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7793 very likely don't have maximal range for their precision and this
7794 transformation effectively doesn't preserve non-maximal ranges. */
7795 if (TREE_CODE (type) == INTEGER_TYPE
7796 && TREE_CODE (op0) == BIT_AND_EXPR
7797 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7798 {
7799 tree and_expr = op0;
7800 tree and0 = TREE_OPERAND (and_expr, 0);
7801 tree and1 = TREE_OPERAND (and_expr, 1);
7802 int change = 0;
7803
7804 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7805 || (TYPE_PRECISION (type)
7806 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7807 change = 1;
7808 else if (TYPE_PRECISION (TREE_TYPE (and1))
7809 <= HOST_BITS_PER_WIDE_INT
7810 && tree_fits_uhwi_p (and1))
7811 {
7812 unsigned HOST_WIDE_INT cst;
7813
7814 cst = tree_to_uhwi (and1);
7815 cst &= HOST_WIDE_INT_M1U
7816 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7817 change = (cst == 0);
7818 #ifdef LOAD_EXTEND_OP
7819 if (change
7820 && !flag_syntax_only
7821 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7822 == ZERO_EXTEND))
7823 {
7824 tree uns = unsigned_type_for (TREE_TYPE (and0));
7825 and0 = fold_convert_loc (loc, uns, and0);
7826 and1 = fold_convert_loc (loc, uns, and1);
7827 }
7828 #endif
7829 }
7830 if (change)
7831 {
7832 tem = force_fit_type (type, wi::to_widest (and1), 0,
7833 TREE_OVERFLOW (and1));
7834 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7835 fold_convert_loc (loc, type, and0), tem);
7836 }
7837 }
7838
7839 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7840 when one of the new casts will fold away. Conservatively we assume
7841 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7842 if (POINTER_TYPE_P (type)
7843 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7844 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7845 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7846 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7847 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7848 {
7849 tree arg00 = TREE_OPERAND (arg0, 0);
7850 tree arg01 = TREE_OPERAND (arg0, 1);
7851
7852 return fold_build_pointer_plus_loc
7853 (loc, fold_convert_loc (loc, type, arg00), arg01);
7854 }
7855
7856 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7857 of the same precision, and X is an integer type not narrower than
7858 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7859 if (INTEGRAL_TYPE_P (type)
7860 && TREE_CODE (op0) == BIT_NOT_EXPR
7861 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7862 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7863 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7864 {
7865 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7866 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7867 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7868 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7869 fold_convert_loc (loc, type, tem));
7870 }
7871
7872 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7873 type of X and Y (integer types only). */
7874 if (INTEGRAL_TYPE_P (type)
7875 && TREE_CODE (op0) == MULT_EXPR
7876 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7877 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7878 {
7879 /* Be careful not to introduce new overflows. */
7880 tree mult_type;
7881 if (TYPE_OVERFLOW_WRAPS (type))
7882 mult_type = type;
7883 else
7884 mult_type = unsigned_type_for (type);
7885
7886 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7887 {
7888 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7889 fold_convert_loc (loc, mult_type,
7890 TREE_OPERAND (op0, 0)),
7891 fold_convert_loc (loc, mult_type,
7892 TREE_OPERAND (op0, 1)));
7893 return fold_convert_loc (loc, type, tem);
7894 }
7895 }
7896
7897 return NULL_TREE;
7898
7899 case VIEW_CONVERT_EXPR:
7900 if (TREE_CODE (op0) == MEM_REF)
7901 return fold_build2_loc (loc, MEM_REF, type,
7902 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7903
7904 return NULL_TREE;
7905
7906 case NEGATE_EXPR:
7907 tem = fold_negate_expr (loc, arg0);
7908 if (tem)
7909 return fold_convert_loc (loc, type, tem);
7910 return NULL_TREE;
7911
7912 case ABS_EXPR:
7913 /* Convert fabs((double)float) into (double)fabsf(float). */
7914 if (TREE_CODE (arg0) == NOP_EXPR
7915 && TREE_CODE (type) == REAL_TYPE)
7916 {
7917 tree targ0 = strip_float_extensions (arg0);
7918 if (targ0 != arg0)
7919 return fold_convert_loc (loc, type,
7920 fold_build1_loc (loc, ABS_EXPR,
7921 TREE_TYPE (targ0),
7922 targ0));
7923 }
7924
7925 /* Strip sign ops from argument. */
7926 if (TREE_CODE (type) == REAL_TYPE)
7927 {
7928 tem = fold_strip_sign_ops (arg0);
7929 if (tem)
7930 return fold_build1_loc (loc, ABS_EXPR, type,
7931 fold_convert_loc (loc, type, tem));
7932 }
7933 return NULL_TREE;
7934
7935 case CONJ_EXPR:
7936 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7937 return fold_convert_loc (loc, type, arg0);
7938 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7939 {
7940 tree itype = TREE_TYPE (type);
7941 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7942 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7943 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7944 negate_expr (ipart));
7945 }
7946 if (TREE_CODE (arg0) == CONJ_EXPR)
7947 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7948 return NULL_TREE;
7949
7950 case BIT_NOT_EXPR:
7951 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7952 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7953 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7954 fold_convert_loc (loc, type,
7955 TREE_OPERAND (arg0, 0)))))
7956 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7957 fold_convert_loc (loc, type,
7958 TREE_OPERAND (arg0, 1)));
7959 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7960 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7961 fold_convert_loc (loc, type,
7962 TREE_OPERAND (arg0, 1)))))
7963 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7964 fold_convert_loc (loc, type,
7965 TREE_OPERAND (arg0, 0)), tem);
7966
7967 return NULL_TREE;
7968
7969 case TRUTH_NOT_EXPR:
7970 /* Note that the operand of this must be an int
7971 and its values must be 0 or 1.
7972 ("true" is a fixed value perhaps depending on the language,
7973 but we don't handle values other than 1 correctly yet.) */
7974 tem = fold_truth_not_expr (loc, arg0);
7975 if (!tem)
7976 return NULL_TREE;
7977 return fold_convert_loc (loc, type, tem);
7978
7979 case REALPART_EXPR:
7980 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7981 return fold_convert_loc (loc, type, arg0);
7982 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7983 {
7984 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7985 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7986 fold_build1_loc (loc, REALPART_EXPR, itype,
7987 TREE_OPERAND (arg0, 0)),
7988 fold_build1_loc (loc, REALPART_EXPR, itype,
7989 TREE_OPERAND (arg0, 1)));
7990 return fold_convert_loc (loc, type, tem);
7991 }
7992 if (TREE_CODE (arg0) == CONJ_EXPR)
7993 {
7994 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7995 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
7996 TREE_OPERAND (arg0, 0));
7997 return fold_convert_loc (loc, type, tem);
7998 }
7999 if (TREE_CODE (arg0) == CALL_EXPR)
8000 {
8001 tree fn = get_callee_fndecl (arg0);
8002 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8003 switch (DECL_FUNCTION_CODE (fn))
8004 {
8005 CASE_FLT_FN (BUILT_IN_CEXPI):
8006 fn = mathfn_built_in (type, BUILT_IN_COS);
8007 if (fn)
8008 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8009 break;
8010
8011 default:
8012 break;
8013 }
8014 }
8015 return NULL_TREE;
8016
8017 case IMAGPART_EXPR:
8018 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8019 return build_zero_cst (type);
8020 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8021 {
8022 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8023 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8024 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8025 TREE_OPERAND (arg0, 0)),
8026 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8027 TREE_OPERAND (arg0, 1)));
8028 return fold_convert_loc (loc, type, tem);
8029 }
8030 if (TREE_CODE (arg0) == CONJ_EXPR)
8031 {
8032 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8033 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8034 return fold_convert_loc (loc, type, negate_expr (tem));
8035 }
8036 if (TREE_CODE (arg0) == CALL_EXPR)
8037 {
8038 tree fn = get_callee_fndecl (arg0);
8039 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8040 switch (DECL_FUNCTION_CODE (fn))
8041 {
8042 CASE_FLT_FN (BUILT_IN_CEXPI):
8043 fn = mathfn_built_in (type, BUILT_IN_SIN);
8044 if (fn)
8045 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8046 break;
8047
8048 default:
8049 break;
8050 }
8051 }
8052 return NULL_TREE;
8053
8054 case INDIRECT_REF:
8055 /* Fold *&X to X if X is an lvalue. */
8056 if (TREE_CODE (op0) == ADDR_EXPR)
8057 {
8058 tree op00 = TREE_OPERAND (op0, 0);
8059 if ((TREE_CODE (op00) == VAR_DECL
8060 || TREE_CODE (op00) == PARM_DECL
8061 || TREE_CODE (op00) == RESULT_DECL)
8062 && !TREE_READONLY (op00))
8063 return op00;
8064 }
8065 return NULL_TREE;
8066
8067 default:
8068 return NULL_TREE;
8069 } /* switch (code) */
8070 }
8071
8072
8073 /* If the operation was a conversion do _not_ mark a resulting constant
8074 with TREE_OVERFLOW if the original constant was not. These conversions
8075 have implementation defined behavior and retaining the TREE_OVERFLOW
8076 flag here would confuse later passes such as VRP. */
8077 tree
8078 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8079 tree type, tree op0)
8080 {
8081 tree res = fold_unary_loc (loc, code, type, op0);
8082 if (res
8083 && TREE_CODE (res) == INTEGER_CST
8084 && TREE_CODE (op0) == INTEGER_CST
8085 && CONVERT_EXPR_CODE_P (code))
8086 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8087
8088 return res;
8089 }
8090
8091 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8092 operands OP0 and OP1. LOC is the location of the resulting expression.
8093 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8094 Return the folded expression if folding is successful. Otherwise,
8095 return NULL_TREE. */
8096 static tree
8097 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8098 tree arg0, tree arg1, tree op0, tree op1)
8099 {
8100 tree tem;
8101
8102 /* We only do these simplifications if we are optimizing. */
8103 if (!optimize)
8104 return NULL_TREE;
8105
8106 /* Check for things like (A || B) && (A || C). We can convert this
8107 to A || (B && C). Note that either operator can be any of the four
8108 truth and/or operations and the transformation will still be
8109 valid. Also note that we only care about order for the
8110 ANDIF and ORIF operators. If B contains side effects, this
8111 might change the truth-value of A. */
8112 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8113 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8114 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8115 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8116 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8117 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8118 {
8119 tree a00 = TREE_OPERAND (arg0, 0);
8120 tree a01 = TREE_OPERAND (arg0, 1);
8121 tree a10 = TREE_OPERAND (arg1, 0);
8122 tree a11 = TREE_OPERAND (arg1, 1);
8123 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8124 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8125 && (code == TRUTH_AND_EXPR
8126 || code == TRUTH_OR_EXPR));
8127
8128 if (operand_equal_p (a00, a10, 0))
8129 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8130 fold_build2_loc (loc, code, type, a01, a11));
8131 else if (commutative && operand_equal_p (a00, a11, 0))
8132 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8133 fold_build2_loc (loc, code, type, a01, a10));
8134 else if (commutative && operand_equal_p (a01, a10, 0))
8135 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8136 fold_build2_loc (loc, code, type, a00, a11));
8137
8138 /* This case if tricky because we must either have commutative
8139 operators or else A10 must not have side-effects. */
8140
8141 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8142 && operand_equal_p (a01, a11, 0))
8143 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8144 fold_build2_loc (loc, code, type, a00, a10),
8145 a01);
8146 }
8147
8148 /* See if we can build a range comparison. */
8149 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8150 return tem;
8151
8152 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8153 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8154 {
8155 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8156 if (tem)
8157 return fold_build2_loc (loc, code, type, tem, arg1);
8158 }
8159
8160 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8161 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8162 {
8163 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8164 if (tem)
8165 return fold_build2_loc (loc, code, type, arg0, tem);
8166 }
8167
8168 /* Check for the possibility of merging component references. If our
8169 lhs is another similar operation, try to merge its rhs with our
8170 rhs. Then try to merge our lhs and rhs. */
8171 if (TREE_CODE (arg0) == code
8172 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8173 TREE_OPERAND (arg0, 1), arg1)))
8174 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8175
8176 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8177 return tem;
8178
8179 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8180 && (code == TRUTH_AND_EXPR
8181 || code == TRUTH_ANDIF_EXPR
8182 || code == TRUTH_OR_EXPR
8183 || code == TRUTH_ORIF_EXPR))
8184 {
8185 enum tree_code ncode, icode;
8186
8187 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8188 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8189 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8190
8191 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8192 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8193 We don't want to pack more than two leafs to a non-IF AND/OR
8194 expression.
8195 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8196 equal to IF-CODE, then we don't want to add right-hand operand.
8197 If the inner right-hand side of left-hand operand has
8198 side-effects, or isn't simple, then we can't add to it,
8199 as otherwise we might destroy if-sequence. */
8200 if (TREE_CODE (arg0) == icode
8201 && simple_operand_p_2 (arg1)
8202 /* Needed for sequence points to handle trappings, and
8203 side-effects. */
8204 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8205 {
8206 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8207 arg1);
8208 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8209 tem);
8210 }
8211 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8212 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8213 else if (TREE_CODE (arg1) == icode
8214 && simple_operand_p_2 (arg0)
8215 /* Needed for sequence points to handle trappings, and
8216 side-effects. */
8217 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8218 {
8219 tem = fold_build2_loc (loc, ncode, type,
8220 arg0, TREE_OPERAND (arg1, 0));
8221 return fold_build2_loc (loc, icode, type, tem,
8222 TREE_OPERAND (arg1, 1));
8223 }
8224 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8225 into (A OR B).
8226 For sequence point consistancy, we need to check for trapping,
8227 and side-effects. */
8228 else if (code == icode && simple_operand_p_2 (arg0)
8229 && simple_operand_p_2 (arg1))
8230 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8231 }
8232
8233 return NULL_TREE;
8234 }
8235
8236 /* Fold a binary expression of code CODE and type TYPE with operands
8237 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8238 Return the folded expression if folding is successful. Otherwise,
8239 return NULL_TREE. */
8240
8241 static tree
8242 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8243 {
8244 enum tree_code compl_code;
8245
8246 if (code == MIN_EXPR)
8247 compl_code = MAX_EXPR;
8248 else if (code == MAX_EXPR)
8249 compl_code = MIN_EXPR;
8250 else
8251 gcc_unreachable ();
8252
8253 /* MIN (MAX (a, b), b) == b. */
8254 if (TREE_CODE (op0) == compl_code
8255 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8256 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8257
8258 /* MIN (MAX (b, a), b) == b. */
8259 if (TREE_CODE (op0) == compl_code
8260 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8261 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8262 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8263
8264 /* MIN (a, MAX (a, b)) == a. */
8265 if (TREE_CODE (op1) == compl_code
8266 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8267 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8268 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8269
8270 /* MIN (a, MAX (b, a)) == a. */
8271 if (TREE_CODE (op1) == compl_code
8272 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8273 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8274 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8275
8276 return NULL_TREE;
8277 }
8278
8279 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8280 by changing CODE to reduce the magnitude of constants involved in
8281 ARG0 of the comparison.
8282 Returns a canonicalized comparison tree if a simplification was
8283 possible, otherwise returns NULL_TREE.
8284 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8285 valid if signed overflow is undefined. */
8286
8287 static tree
8288 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8289 tree arg0, tree arg1,
8290 bool *strict_overflow_p)
8291 {
8292 enum tree_code code0 = TREE_CODE (arg0);
8293 tree t, cst0 = NULL_TREE;
8294 int sgn0;
8295 bool swap = false;
8296
8297 /* Match A +- CST code arg1 and CST code arg1. We can change the
8298 first form only if overflow is undefined. */
8299 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8300 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8301 /* In principle pointers also have undefined overflow behavior,
8302 but that causes problems elsewhere. */
8303 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8304 && (code0 == MINUS_EXPR
8305 || code0 == PLUS_EXPR)
8306 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8307 || code0 == INTEGER_CST))
8308 return NULL_TREE;
8309
8310 /* Identify the constant in arg0 and its sign. */
8311 if (code0 == INTEGER_CST)
8312 cst0 = arg0;
8313 else
8314 cst0 = TREE_OPERAND (arg0, 1);
8315 sgn0 = tree_int_cst_sgn (cst0);
8316
8317 /* Overflowed constants and zero will cause problems. */
8318 if (integer_zerop (cst0)
8319 || TREE_OVERFLOW (cst0))
8320 return NULL_TREE;
8321
8322 /* See if we can reduce the magnitude of the constant in
8323 arg0 by changing the comparison code. */
8324 if (code0 == INTEGER_CST)
8325 {
8326 /* CST <= arg1 -> CST-1 < arg1. */
8327 if (code == LE_EXPR && sgn0 == 1)
8328 code = LT_EXPR;
8329 /* -CST < arg1 -> -CST-1 <= arg1. */
8330 else if (code == LT_EXPR && sgn0 == -1)
8331 code = LE_EXPR;
8332 /* CST > arg1 -> CST-1 >= arg1. */
8333 else if (code == GT_EXPR && sgn0 == 1)
8334 code = GE_EXPR;
8335 /* -CST >= arg1 -> -CST-1 > arg1. */
8336 else if (code == GE_EXPR && sgn0 == -1)
8337 code = GT_EXPR;
8338 else
8339 return NULL_TREE;
8340 /* arg1 code' CST' might be more canonical. */
8341 swap = true;
8342 }
8343 else
8344 {
8345 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8346 if (code == LT_EXPR
8347 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8348 code = LE_EXPR;
8349 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8350 else if (code == GT_EXPR
8351 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8352 code = GE_EXPR;
8353 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8354 else if (code == LE_EXPR
8355 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8356 code = LT_EXPR;
8357 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8358 else if (code == GE_EXPR
8359 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8360 code = GT_EXPR;
8361 else
8362 return NULL_TREE;
8363 *strict_overflow_p = true;
8364 }
8365
8366 /* Now build the constant reduced in magnitude. But not if that
8367 would produce one outside of its types range. */
8368 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8369 && ((sgn0 == 1
8370 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8371 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8372 || (sgn0 == -1
8373 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8374 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8375 /* We cannot swap the comparison here as that would cause us to
8376 endlessly recurse. */
8377 return NULL_TREE;
8378
8379 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8380 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8381 if (code0 != INTEGER_CST)
8382 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8383 t = fold_convert (TREE_TYPE (arg1), t);
8384
8385 /* If swapping might yield to a more canonical form, do so. */
8386 if (swap)
8387 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8388 else
8389 return fold_build2_loc (loc, code, type, t, arg1);
8390 }
8391
8392 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8393 overflow further. Try to decrease the magnitude of constants involved
8394 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8395 and put sole constants at the second argument position.
8396 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8397
8398 static tree
8399 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8400 tree arg0, tree arg1)
8401 {
8402 tree t;
8403 bool strict_overflow_p;
8404 const char * const warnmsg = G_("assuming signed overflow does not occur "
8405 "when reducing constant in comparison");
8406
8407 /* Try canonicalization by simplifying arg0. */
8408 strict_overflow_p = false;
8409 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8410 &strict_overflow_p);
8411 if (t)
8412 {
8413 if (strict_overflow_p)
8414 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8415 return t;
8416 }
8417
8418 /* Try canonicalization by simplifying arg1 using the swapped
8419 comparison. */
8420 code = swap_tree_comparison (code);
8421 strict_overflow_p = false;
8422 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8423 &strict_overflow_p);
8424 if (t && strict_overflow_p)
8425 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8426 return t;
8427 }
8428
8429 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8430 space. This is used to avoid issuing overflow warnings for
8431 expressions like &p->x which can not wrap. */
8432
8433 static bool
8434 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8435 {
8436 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8437 return true;
8438
8439 if (bitpos < 0)
8440 return true;
8441
8442 wide_int wi_offset;
8443 int precision = TYPE_PRECISION (TREE_TYPE (base));
8444 if (offset == NULL_TREE)
8445 wi_offset = wi::zero (precision);
8446 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8447 return true;
8448 else
8449 wi_offset = offset;
8450
8451 bool overflow;
8452 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8453 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8454 if (overflow)
8455 return true;
8456
8457 if (!wi::fits_uhwi_p (total))
8458 return true;
8459
8460 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8461 if (size <= 0)
8462 return true;
8463
8464 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8465 array. */
8466 if (TREE_CODE (base) == ADDR_EXPR)
8467 {
8468 HOST_WIDE_INT base_size;
8469
8470 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8471 if (base_size > 0 && size < base_size)
8472 size = base_size;
8473 }
8474
8475 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8476 }
8477
8478 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8479 kind INTEGER_CST. This makes sure to properly sign-extend the
8480 constant. */
8481
8482 static HOST_WIDE_INT
8483 size_low_cst (const_tree t)
8484 {
8485 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8486 int prec = TYPE_PRECISION (TREE_TYPE (t));
8487 if (prec < HOST_BITS_PER_WIDE_INT)
8488 return sext_hwi (w, prec);
8489 return w;
8490 }
8491
8492 /* Subroutine of fold_binary. This routine performs all of the
8493 transformations that are common to the equality/inequality
8494 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8495 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8496 fold_binary should call fold_binary. Fold a comparison with
8497 tree code CODE and type TYPE with operands OP0 and OP1. Return
8498 the folded comparison or NULL_TREE. */
8499
8500 static tree
8501 fold_comparison (location_t loc, enum tree_code code, tree type,
8502 tree op0, tree op1)
8503 {
8504 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8505 tree arg0, arg1, tem;
8506
8507 arg0 = op0;
8508 arg1 = op1;
8509
8510 STRIP_SIGN_NOPS (arg0);
8511 STRIP_SIGN_NOPS (arg1);
8512
8513 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8514 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8515 && (equality_code
8516 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8517 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8519 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8520 && TREE_CODE (arg1) == INTEGER_CST
8521 && !TREE_OVERFLOW (arg1))
8522 {
8523 const enum tree_code
8524 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8525 tree const1 = TREE_OPERAND (arg0, 1);
8526 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8527 tree variable = TREE_OPERAND (arg0, 0);
8528 tree new_const = int_const_binop (reverse_op, const2, const1);
8529
8530 /* If the constant operation overflowed this can be
8531 simplified as a comparison against INT_MAX/INT_MIN. */
8532 if (TREE_OVERFLOW (new_const)
8533 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8534 {
8535 int const1_sgn = tree_int_cst_sgn (const1);
8536 enum tree_code code2 = code;
8537
8538 /* Get the sign of the constant on the lhs if the
8539 operation were VARIABLE + CONST1. */
8540 if (TREE_CODE (arg0) == MINUS_EXPR)
8541 const1_sgn = -const1_sgn;
8542
8543 /* The sign of the constant determines if we overflowed
8544 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8545 Canonicalize to the INT_MIN overflow by swapping the comparison
8546 if necessary. */
8547 if (const1_sgn == -1)
8548 code2 = swap_tree_comparison (code);
8549
8550 /* We now can look at the canonicalized case
8551 VARIABLE + 1 CODE2 INT_MIN
8552 and decide on the result. */
8553 switch (code2)
8554 {
8555 case EQ_EXPR:
8556 case LT_EXPR:
8557 case LE_EXPR:
8558 return
8559 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8560
8561 case NE_EXPR:
8562 case GE_EXPR:
8563 case GT_EXPR:
8564 return
8565 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8566
8567 default:
8568 gcc_unreachable ();
8569 }
8570 }
8571 else
8572 {
8573 if (!equality_code)
8574 fold_overflow_warning ("assuming signed overflow does not occur "
8575 "when changing X +- C1 cmp C2 to "
8576 "X cmp C2 -+ C1",
8577 WARN_STRICT_OVERFLOW_COMPARISON);
8578 return fold_build2_loc (loc, code, type, variable, new_const);
8579 }
8580 }
8581
8582 /* For comparisons of pointers we can decompose it to a compile time
8583 comparison of the base objects and the offsets into the object.
8584 This requires at least one operand being an ADDR_EXPR or a
8585 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8586 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8587 && (TREE_CODE (arg0) == ADDR_EXPR
8588 || TREE_CODE (arg1) == ADDR_EXPR
8589 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8590 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8591 {
8592 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8593 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8594 machine_mode mode;
8595 int volatilep, unsignedp;
8596 bool indirect_base0 = false, indirect_base1 = false;
8597
8598 /* Get base and offset for the access. Strip ADDR_EXPR for
8599 get_inner_reference, but put it back by stripping INDIRECT_REF
8600 off the base object if possible. indirect_baseN will be true
8601 if baseN is not an address but refers to the object itself. */
8602 base0 = arg0;
8603 if (TREE_CODE (arg0) == ADDR_EXPR)
8604 {
8605 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8606 &bitsize, &bitpos0, &offset0, &mode,
8607 &unsignedp, &volatilep, false);
8608 if (TREE_CODE (base0) == INDIRECT_REF)
8609 base0 = TREE_OPERAND (base0, 0);
8610 else
8611 indirect_base0 = true;
8612 }
8613 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8614 {
8615 base0 = TREE_OPERAND (arg0, 0);
8616 STRIP_SIGN_NOPS (base0);
8617 if (TREE_CODE (base0) == ADDR_EXPR)
8618 {
8619 base0 = TREE_OPERAND (base0, 0);
8620 indirect_base0 = true;
8621 }
8622 offset0 = TREE_OPERAND (arg0, 1);
8623 if (tree_fits_shwi_p (offset0))
8624 {
8625 HOST_WIDE_INT off = size_low_cst (offset0);
8626 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8627 * BITS_PER_UNIT)
8628 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8629 {
8630 bitpos0 = off * BITS_PER_UNIT;
8631 offset0 = NULL_TREE;
8632 }
8633 }
8634 }
8635
8636 base1 = arg1;
8637 if (TREE_CODE (arg1) == ADDR_EXPR)
8638 {
8639 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8640 &bitsize, &bitpos1, &offset1, &mode,
8641 &unsignedp, &volatilep, false);
8642 if (TREE_CODE (base1) == INDIRECT_REF)
8643 base1 = TREE_OPERAND (base1, 0);
8644 else
8645 indirect_base1 = true;
8646 }
8647 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8648 {
8649 base1 = TREE_OPERAND (arg1, 0);
8650 STRIP_SIGN_NOPS (base1);
8651 if (TREE_CODE (base1) == ADDR_EXPR)
8652 {
8653 base1 = TREE_OPERAND (base1, 0);
8654 indirect_base1 = true;
8655 }
8656 offset1 = TREE_OPERAND (arg1, 1);
8657 if (tree_fits_shwi_p (offset1))
8658 {
8659 HOST_WIDE_INT off = size_low_cst (offset1);
8660 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8661 * BITS_PER_UNIT)
8662 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8663 {
8664 bitpos1 = off * BITS_PER_UNIT;
8665 offset1 = NULL_TREE;
8666 }
8667 }
8668 }
8669
8670 /* A local variable can never be pointed to by
8671 the default SSA name of an incoming parameter. */
8672 if ((TREE_CODE (arg0) == ADDR_EXPR
8673 && indirect_base0
8674 && TREE_CODE (base0) == VAR_DECL
8675 && auto_var_in_fn_p (base0, current_function_decl)
8676 && !indirect_base1
8677 && TREE_CODE (base1) == SSA_NAME
8678 && SSA_NAME_IS_DEFAULT_DEF (base1)
8679 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8680 || (TREE_CODE (arg1) == ADDR_EXPR
8681 && indirect_base1
8682 && TREE_CODE (base1) == VAR_DECL
8683 && auto_var_in_fn_p (base1, current_function_decl)
8684 && !indirect_base0
8685 && TREE_CODE (base0) == SSA_NAME
8686 && SSA_NAME_IS_DEFAULT_DEF (base0)
8687 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8688 {
8689 if (code == NE_EXPR)
8690 return constant_boolean_node (1, type);
8691 else if (code == EQ_EXPR)
8692 return constant_boolean_node (0, type);
8693 }
8694 /* If we have equivalent bases we might be able to simplify. */
8695 else if (indirect_base0 == indirect_base1
8696 && operand_equal_p (base0, base1, 0))
8697 {
8698 /* We can fold this expression to a constant if the non-constant
8699 offset parts are equal. */
8700 if ((offset0 == offset1
8701 || (offset0 && offset1
8702 && operand_equal_p (offset0, offset1, 0)))
8703 && (code == EQ_EXPR
8704 || code == NE_EXPR
8705 || (indirect_base0 && DECL_P (base0))
8706 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8707
8708 {
8709 if (!equality_code
8710 && bitpos0 != bitpos1
8711 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8712 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8713 fold_overflow_warning (("assuming pointer wraparound does not "
8714 "occur when comparing P +- C1 with "
8715 "P +- C2"),
8716 WARN_STRICT_OVERFLOW_CONDITIONAL);
8717
8718 switch (code)
8719 {
8720 case EQ_EXPR:
8721 return constant_boolean_node (bitpos0 == bitpos1, type);
8722 case NE_EXPR:
8723 return constant_boolean_node (bitpos0 != bitpos1, type);
8724 case LT_EXPR:
8725 return constant_boolean_node (bitpos0 < bitpos1, type);
8726 case LE_EXPR:
8727 return constant_boolean_node (bitpos0 <= bitpos1, type);
8728 case GE_EXPR:
8729 return constant_boolean_node (bitpos0 >= bitpos1, type);
8730 case GT_EXPR:
8731 return constant_boolean_node (bitpos0 > bitpos1, type);
8732 default:;
8733 }
8734 }
8735 /* We can simplify the comparison to a comparison of the variable
8736 offset parts if the constant offset parts are equal.
8737 Be careful to use signed sizetype here because otherwise we
8738 mess with array offsets in the wrong way. This is possible
8739 because pointer arithmetic is restricted to retain within an
8740 object and overflow on pointer differences is undefined as of
8741 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8742 else if (bitpos0 == bitpos1
8743 && (equality_code
8744 || (indirect_base0 && DECL_P (base0))
8745 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8746 {
8747 /* By converting to signed sizetype we cover middle-end pointer
8748 arithmetic which operates on unsigned pointer types of size
8749 type size and ARRAY_REF offsets which are properly sign or
8750 zero extended from their type in case it is narrower than
8751 sizetype. */
8752 if (offset0 == NULL_TREE)
8753 offset0 = build_int_cst (ssizetype, 0);
8754 else
8755 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8756 if (offset1 == NULL_TREE)
8757 offset1 = build_int_cst (ssizetype, 0);
8758 else
8759 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8760
8761 if (!equality_code
8762 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8763 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8764 fold_overflow_warning (("assuming pointer wraparound does not "
8765 "occur when comparing P +- C1 with "
8766 "P +- C2"),
8767 WARN_STRICT_OVERFLOW_COMPARISON);
8768
8769 return fold_build2_loc (loc, code, type, offset0, offset1);
8770 }
8771 }
8772 /* For non-equal bases we can simplify if they are addresses
8773 declarations with different addresses. */
8774 else if (indirect_base0 && indirect_base1
8775 /* We know that !operand_equal_p (base0, base1, 0)
8776 because the if condition was false. But make
8777 sure two decls are not the same. */
8778 && base0 != base1
8779 && TREE_CODE (arg0) == ADDR_EXPR
8780 && TREE_CODE (arg1) == ADDR_EXPR
8781 && DECL_P (base0)
8782 && DECL_P (base1)
8783 /* Watch for aliases. */
8784 && (!decl_in_symtab_p (base0)
8785 || !decl_in_symtab_p (base1)
8786 || !symtab_node::get_create (base0)->equal_address_to
8787 (symtab_node::get_create (base1))))
8788 {
8789 if (code == EQ_EXPR)
8790 return omit_two_operands_loc (loc, type, boolean_false_node,
8791 arg0, arg1);
8792 else if (code == NE_EXPR)
8793 return omit_two_operands_loc (loc, type, boolean_true_node,
8794 arg0, arg1);
8795 }
8796 /* For equal offsets we can simplify to a comparison of the
8797 base addresses. */
8798 else if (bitpos0 == bitpos1
8799 && (indirect_base0
8800 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8801 && (indirect_base1
8802 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8803 && ((offset0 == offset1)
8804 || (offset0 && offset1
8805 && operand_equal_p (offset0, offset1, 0))))
8806 {
8807 if (indirect_base0)
8808 base0 = build_fold_addr_expr_loc (loc, base0);
8809 if (indirect_base1)
8810 base1 = build_fold_addr_expr_loc (loc, base1);
8811 return fold_build2_loc (loc, code, type, base0, base1);
8812 }
8813 }
8814
8815 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8816 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8817 the resulting offset is smaller in absolute value than the
8818 original one and has the same sign. */
8819 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8820 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8821 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8822 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8823 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8824 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8825 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8826 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8827 {
8828 tree const1 = TREE_OPERAND (arg0, 1);
8829 tree const2 = TREE_OPERAND (arg1, 1);
8830 tree variable1 = TREE_OPERAND (arg0, 0);
8831 tree variable2 = TREE_OPERAND (arg1, 0);
8832 tree cst;
8833 const char * const warnmsg = G_("assuming signed overflow does not "
8834 "occur when combining constants around "
8835 "a comparison");
8836
8837 /* Put the constant on the side where it doesn't overflow and is
8838 of lower absolute value and of same sign than before. */
8839 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8840 ? MINUS_EXPR : PLUS_EXPR,
8841 const2, const1);
8842 if (!TREE_OVERFLOW (cst)
8843 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8844 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8845 {
8846 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8847 return fold_build2_loc (loc, code, type,
8848 variable1,
8849 fold_build2_loc (loc, TREE_CODE (arg1),
8850 TREE_TYPE (arg1),
8851 variable2, cst));
8852 }
8853
8854 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8855 ? MINUS_EXPR : PLUS_EXPR,
8856 const1, const2);
8857 if (!TREE_OVERFLOW (cst)
8858 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8859 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8860 {
8861 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8862 return fold_build2_loc (loc, code, type,
8863 fold_build2_loc (loc, TREE_CODE (arg0),
8864 TREE_TYPE (arg0),
8865 variable1, cst),
8866 variable2);
8867 }
8868 }
8869
8870 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8871 if (tem)
8872 return tem;
8873
8874 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8875 && CONVERT_EXPR_P (arg0))
8876 {
8877 /* If we are widening one operand of an integer comparison,
8878 see if the other operand is similarly being widened. Perhaps we
8879 can do the comparison in the narrower type. */
8880 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8881 if (tem)
8882 return tem;
8883
8884 /* Or if we are changing signedness. */
8885 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
8886 if (tem)
8887 return tem;
8888 }
8889
8890 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8891 constant, we can simplify it. */
8892 if (TREE_CODE (arg1) == INTEGER_CST
8893 && (TREE_CODE (arg0) == MIN_EXPR
8894 || TREE_CODE (arg0) == MAX_EXPR)
8895 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8896 {
8897 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8898 if (tem)
8899 return tem;
8900 }
8901
8902 /* If we are comparing an expression that just has comparisons
8903 of two integer values, arithmetic expressions of those comparisons,
8904 and constants, we can simplify it. There are only three cases
8905 to check: the two values can either be equal, the first can be
8906 greater, or the second can be greater. Fold the expression for
8907 those three values. Since each value must be 0 or 1, we have
8908 eight possibilities, each of which corresponds to the constant 0
8909 or 1 or one of the six possible comparisons.
8910
8911 This handles common cases like (a > b) == 0 but also handles
8912 expressions like ((x > y) - (y > x)) > 0, which supposedly
8913 occur in macroized code. */
8914
8915 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8916 {
8917 tree cval1 = 0, cval2 = 0;
8918 int save_p = 0;
8919
8920 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8921 /* Don't handle degenerate cases here; they should already
8922 have been handled anyway. */
8923 && cval1 != 0 && cval2 != 0
8924 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8925 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8926 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8927 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8928 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8929 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8930 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8931 {
8932 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8933 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8934
8935 /* We can't just pass T to eval_subst in case cval1 or cval2
8936 was the same as ARG1. */
8937
8938 tree high_result
8939 = fold_build2_loc (loc, code, type,
8940 eval_subst (loc, arg0, cval1, maxval,
8941 cval2, minval),
8942 arg1);
8943 tree equal_result
8944 = fold_build2_loc (loc, code, type,
8945 eval_subst (loc, arg0, cval1, maxval,
8946 cval2, maxval),
8947 arg1);
8948 tree low_result
8949 = fold_build2_loc (loc, code, type,
8950 eval_subst (loc, arg0, cval1, minval,
8951 cval2, maxval),
8952 arg1);
8953
8954 /* All three of these results should be 0 or 1. Confirm they are.
8955 Then use those values to select the proper code to use. */
8956
8957 if (TREE_CODE (high_result) == INTEGER_CST
8958 && TREE_CODE (equal_result) == INTEGER_CST
8959 && TREE_CODE (low_result) == INTEGER_CST)
8960 {
8961 /* Make a 3-bit mask with the high-order bit being the
8962 value for `>', the next for '=', and the low for '<'. */
8963 switch ((integer_onep (high_result) * 4)
8964 + (integer_onep (equal_result) * 2)
8965 + integer_onep (low_result))
8966 {
8967 case 0:
8968 /* Always false. */
8969 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8970 case 1:
8971 code = LT_EXPR;
8972 break;
8973 case 2:
8974 code = EQ_EXPR;
8975 break;
8976 case 3:
8977 code = LE_EXPR;
8978 break;
8979 case 4:
8980 code = GT_EXPR;
8981 break;
8982 case 5:
8983 code = NE_EXPR;
8984 break;
8985 case 6:
8986 code = GE_EXPR;
8987 break;
8988 case 7:
8989 /* Always true. */
8990 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8991 }
8992
8993 if (save_p)
8994 {
8995 tem = save_expr (build2 (code, type, cval1, cval2));
8996 SET_EXPR_LOCATION (tem, loc);
8997 return tem;
8998 }
8999 return fold_build2_loc (loc, code, type, cval1, cval2);
9000 }
9001 }
9002 }
9003
9004 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9005 into a single range test. */
9006 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9007 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9008 && TREE_CODE (arg1) == INTEGER_CST
9009 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9010 && !integer_zerop (TREE_OPERAND (arg0, 1))
9011 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9012 && !TREE_OVERFLOW (arg1))
9013 {
9014 tem = fold_div_compare (loc, code, type, arg0, arg1);
9015 if (tem != NULL_TREE)
9016 return tem;
9017 }
9018
9019 return NULL_TREE;
9020 }
9021
9022
9023 /* Subroutine of fold_binary. Optimize complex multiplications of the
9024 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9025 argument EXPR represents the expression "z" of type TYPE. */
9026
9027 static tree
9028 fold_mult_zconjz (location_t loc, tree type, tree expr)
9029 {
9030 tree itype = TREE_TYPE (type);
9031 tree rpart, ipart, tem;
9032
9033 if (TREE_CODE (expr) == COMPLEX_EXPR)
9034 {
9035 rpart = TREE_OPERAND (expr, 0);
9036 ipart = TREE_OPERAND (expr, 1);
9037 }
9038 else if (TREE_CODE (expr) == COMPLEX_CST)
9039 {
9040 rpart = TREE_REALPART (expr);
9041 ipart = TREE_IMAGPART (expr);
9042 }
9043 else
9044 {
9045 expr = save_expr (expr);
9046 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9047 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9048 }
9049
9050 rpart = save_expr (rpart);
9051 ipart = save_expr (ipart);
9052 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9053 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9054 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9055 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9056 build_zero_cst (itype));
9057 }
9058
9059
9060 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9061 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9062
9063 static bool
9064 vec_cst_ctor_to_array (tree arg, tree *elts)
9065 {
9066 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9067
9068 if (TREE_CODE (arg) == VECTOR_CST)
9069 {
9070 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9071 elts[i] = VECTOR_CST_ELT (arg, i);
9072 }
9073 else if (TREE_CODE (arg) == CONSTRUCTOR)
9074 {
9075 constructor_elt *elt;
9076
9077 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9078 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9079 return false;
9080 else
9081 elts[i] = elt->value;
9082 }
9083 else
9084 return false;
9085 for (; i < nelts; i++)
9086 elts[i]
9087 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9088 return true;
9089 }
9090
9091 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9092 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9093 NULL_TREE otherwise. */
9094
9095 static tree
9096 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9097 {
9098 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9099 tree *elts;
9100 bool need_ctor = false;
9101
9102 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9103 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9104 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9105 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9106 return NULL_TREE;
9107
9108 elts = XALLOCAVEC (tree, nelts * 3);
9109 if (!vec_cst_ctor_to_array (arg0, elts)
9110 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9111 return NULL_TREE;
9112
9113 for (i = 0; i < nelts; i++)
9114 {
9115 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9116 need_ctor = true;
9117 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9118 }
9119
9120 if (need_ctor)
9121 {
9122 vec<constructor_elt, va_gc> *v;
9123 vec_alloc (v, nelts);
9124 for (i = 0; i < nelts; i++)
9125 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9126 return build_constructor (type, v);
9127 }
9128 else
9129 return build_vector (type, &elts[2 * nelts]);
9130 }
9131
9132 /* Try to fold a pointer difference of type TYPE two address expressions of
9133 array references AREF0 and AREF1 using location LOC. Return a
9134 simplified expression for the difference or NULL_TREE. */
9135
9136 static tree
9137 fold_addr_of_array_ref_difference (location_t loc, tree type,
9138 tree aref0, tree aref1)
9139 {
9140 tree base0 = TREE_OPERAND (aref0, 0);
9141 tree base1 = TREE_OPERAND (aref1, 0);
9142 tree base_offset = build_int_cst (type, 0);
9143
9144 /* If the bases are array references as well, recurse. If the bases
9145 are pointer indirections compute the difference of the pointers.
9146 If the bases are equal, we are set. */
9147 if ((TREE_CODE (base0) == ARRAY_REF
9148 && TREE_CODE (base1) == ARRAY_REF
9149 && (base_offset
9150 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9151 || (INDIRECT_REF_P (base0)
9152 && INDIRECT_REF_P (base1)
9153 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9154 TREE_OPERAND (base0, 0),
9155 TREE_OPERAND (base1, 0))))
9156 || operand_equal_p (base0, base1, 0))
9157 {
9158 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9159 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9160 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9161 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9162 return fold_build2_loc (loc, PLUS_EXPR, type,
9163 base_offset,
9164 fold_build2_loc (loc, MULT_EXPR, type,
9165 diff, esz));
9166 }
9167 return NULL_TREE;
9168 }
9169
9170 /* If the real or vector real constant CST of type TYPE has an exact
9171 inverse, return it, else return NULL. */
9172
9173 tree
9174 exact_inverse (tree type, tree cst)
9175 {
9176 REAL_VALUE_TYPE r;
9177 tree unit_type, *elts;
9178 machine_mode mode;
9179 unsigned vec_nelts, i;
9180
9181 switch (TREE_CODE (cst))
9182 {
9183 case REAL_CST:
9184 r = TREE_REAL_CST (cst);
9185
9186 if (exact_real_inverse (TYPE_MODE (type), &r))
9187 return build_real (type, r);
9188
9189 return NULL_TREE;
9190
9191 case VECTOR_CST:
9192 vec_nelts = VECTOR_CST_NELTS (cst);
9193 elts = XALLOCAVEC (tree, vec_nelts);
9194 unit_type = TREE_TYPE (type);
9195 mode = TYPE_MODE (unit_type);
9196
9197 for (i = 0; i < vec_nelts; i++)
9198 {
9199 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9200 if (!exact_real_inverse (mode, &r))
9201 return NULL_TREE;
9202 elts[i] = build_real (unit_type, r);
9203 }
9204
9205 return build_vector (type, elts);
9206
9207 default:
9208 return NULL_TREE;
9209 }
9210 }
9211
9212 /* Mask out the tz least significant bits of X of type TYPE where
9213 tz is the number of trailing zeroes in Y. */
9214 static wide_int
9215 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9216 {
9217 int tz = wi::ctz (y);
9218 if (tz > 0)
9219 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9220 return x;
9221 }
9222
9223 /* Return true when T is an address and is known to be nonzero.
9224 For floating point we further ensure that T is not denormal.
9225 Similar logic is present in nonzero_address in rtlanal.h.
9226
9227 If the return value is based on the assumption that signed overflow
9228 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9229 change *STRICT_OVERFLOW_P. */
9230
9231 static bool
9232 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9233 {
9234 tree type = TREE_TYPE (t);
9235 enum tree_code code;
9236
9237 /* Doing something useful for floating point would need more work. */
9238 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9239 return false;
9240
9241 code = TREE_CODE (t);
9242 switch (TREE_CODE_CLASS (code))
9243 {
9244 case tcc_unary:
9245 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9246 strict_overflow_p);
9247 case tcc_binary:
9248 case tcc_comparison:
9249 return tree_binary_nonzero_warnv_p (code, type,
9250 TREE_OPERAND (t, 0),
9251 TREE_OPERAND (t, 1),
9252 strict_overflow_p);
9253 case tcc_constant:
9254 case tcc_declaration:
9255 case tcc_reference:
9256 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9257
9258 default:
9259 break;
9260 }
9261
9262 switch (code)
9263 {
9264 case TRUTH_NOT_EXPR:
9265 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9266 strict_overflow_p);
9267
9268 case TRUTH_AND_EXPR:
9269 case TRUTH_OR_EXPR:
9270 case TRUTH_XOR_EXPR:
9271 return tree_binary_nonzero_warnv_p (code, type,
9272 TREE_OPERAND (t, 0),
9273 TREE_OPERAND (t, 1),
9274 strict_overflow_p);
9275
9276 case COND_EXPR:
9277 case CONSTRUCTOR:
9278 case OBJ_TYPE_REF:
9279 case ASSERT_EXPR:
9280 case ADDR_EXPR:
9281 case WITH_SIZE_EXPR:
9282 case SSA_NAME:
9283 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9284
9285 case COMPOUND_EXPR:
9286 case MODIFY_EXPR:
9287 case BIND_EXPR:
9288 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9289 strict_overflow_p);
9290
9291 case SAVE_EXPR:
9292 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9293 strict_overflow_p);
9294
9295 case CALL_EXPR:
9296 {
9297 tree fndecl = get_callee_fndecl (t);
9298 if (!fndecl) return false;
9299 if (flag_delete_null_pointer_checks && !flag_check_new
9300 && DECL_IS_OPERATOR_NEW (fndecl)
9301 && !TREE_NOTHROW (fndecl))
9302 return true;
9303 if (flag_delete_null_pointer_checks
9304 && lookup_attribute ("returns_nonnull",
9305 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9306 return true;
9307 return alloca_call_p (t);
9308 }
9309
9310 default:
9311 break;
9312 }
9313 return false;
9314 }
9315
9316 /* Return true when T is an address and is known to be nonzero.
9317 Handle warnings about undefined signed overflow. */
9318
9319 static bool
9320 tree_expr_nonzero_p (tree t)
9321 {
9322 bool ret, strict_overflow_p;
9323
9324 strict_overflow_p = false;
9325 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9326 if (strict_overflow_p)
9327 fold_overflow_warning (("assuming signed overflow does not occur when "
9328 "determining that expression is always "
9329 "non-zero"),
9330 WARN_STRICT_OVERFLOW_MISC);
9331 return ret;
9332 }
9333
9334 /* Fold a binary expression of code CODE and type TYPE with operands
9335 OP0 and OP1. LOC is the location of the resulting expression.
9336 Return the folded expression if folding is successful. Otherwise,
9337 return NULL_TREE. */
9338
9339 tree
9340 fold_binary_loc (location_t loc,
9341 enum tree_code code, tree type, tree op0, tree op1)
9342 {
9343 enum tree_code_class kind = TREE_CODE_CLASS (code);
9344 tree arg0, arg1, tem;
9345 tree t1 = NULL_TREE;
9346 bool strict_overflow_p;
9347 unsigned int prec;
9348
9349 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9350 && TREE_CODE_LENGTH (code) == 2
9351 && op0 != NULL_TREE
9352 && op1 != NULL_TREE);
9353
9354 arg0 = op0;
9355 arg1 = op1;
9356
9357 /* Strip any conversions that don't change the mode. This is
9358 safe for every expression, except for a comparison expression
9359 because its signedness is derived from its operands. So, in
9360 the latter case, only strip conversions that don't change the
9361 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9362 preserved.
9363
9364 Note that this is done as an internal manipulation within the
9365 constant folder, in order to find the simplest representation
9366 of the arguments so that their form can be studied. In any
9367 cases, the appropriate type conversions should be put back in
9368 the tree that will get out of the constant folder. */
9369
9370 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9371 {
9372 STRIP_SIGN_NOPS (arg0);
9373 STRIP_SIGN_NOPS (arg1);
9374 }
9375 else
9376 {
9377 STRIP_NOPS (arg0);
9378 STRIP_NOPS (arg1);
9379 }
9380
9381 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9382 constant but we can't do arithmetic on them. */
9383 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9384 {
9385 tem = const_binop (code, type, arg0, arg1);
9386 if (tem != NULL_TREE)
9387 {
9388 if (TREE_TYPE (tem) != type)
9389 tem = fold_convert_loc (loc, type, tem);
9390 return tem;
9391 }
9392 }
9393
9394 /* If this is a commutative operation, and ARG0 is a constant, move it
9395 to ARG1 to reduce the number of tests below. */
9396 if (commutative_tree_code (code)
9397 && tree_swap_operands_p (arg0, arg1, true))
9398 return fold_build2_loc (loc, code, type, op1, op0);
9399
9400 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9401 to ARG1 to reduce the number of tests below. */
9402 if (kind == tcc_comparison
9403 && tree_swap_operands_p (arg0, arg1, true))
9404 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9405
9406 tem = generic_simplify (loc, code, type, op0, op1);
9407 if (tem)
9408 return tem;
9409
9410 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9411
9412 First check for cases where an arithmetic operation is applied to a
9413 compound, conditional, or comparison operation. Push the arithmetic
9414 operation inside the compound or conditional to see if any folding
9415 can then be done. Convert comparison to conditional for this purpose.
9416 The also optimizes non-constant cases that used to be done in
9417 expand_expr.
9418
9419 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9420 one of the operands is a comparison and the other is a comparison, a
9421 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9422 code below would make the expression more complex. Change it to a
9423 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9424 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9425
9426 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9427 || code == EQ_EXPR || code == NE_EXPR)
9428 && TREE_CODE (type) != VECTOR_TYPE
9429 && ((truth_value_p (TREE_CODE (arg0))
9430 && (truth_value_p (TREE_CODE (arg1))
9431 || (TREE_CODE (arg1) == BIT_AND_EXPR
9432 && integer_onep (TREE_OPERAND (arg1, 1)))))
9433 || (truth_value_p (TREE_CODE (arg1))
9434 && (truth_value_p (TREE_CODE (arg0))
9435 || (TREE_CODE (arg0) == BIT_AND_EXPR
9436 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9437 {
9438 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9439 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9440 : TRUTH_XOR_EXPR,
9441 boolean_type_node,
9442 fold_convert_loc (loc, boolean_type_node, arg0),
9443 fold_convert_loc (loc, boolean_type_node, arg1));
9444
9445 if (code == EQ_EXPR)
9446 tem = invert_truthvalue_loc (loc, tem);
9447
9448 return fold_convert_loc (loc, type, tem);
9449 }
9450
9451 if (TREE_CODE_CLASS (code) == tcc_binary
9452 || TREE_CODE_CLASS (code) == tcc_comparison)
9453 {
9454 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9455 {
9456 tem = fold_build2_loc (loc, code, type,
9457 fold_convert_loc (loc, TREE_TYPE (op0),
9458 TREE_OPERAND (arg0, 1)), op1);
9459 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9460 tem);
9461 }
9462 if (TREE_CODE (arg1) == COMPOUND_EXPR
9463 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9464 {
9465 tem = fold_build2_loc (loc, code, type, op0,
9466 fold_convert_loc (loc, TREE_TYPE (op1),
9467 TREE_OPERAND (arg1, 1)));
9468 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9469 tem);
9470 }
9471
9472 if (TREE_CODE (arg0) == COND_EXPR
9473 || TREE_CODE (arg0) == VEC_COND_EXPR
9474 || COMPARISON_CLASS_P (arg0))
9475 {
9476 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9477 arg0, arg1,
9478 /*cond_first_p=*/1);
9479 if (tem != NULL_TREE)
9480 return tem;
9481 }
9482
9483 if (TREE_CODE (arg1) == COND_EXPR
9484 || TREE_CODE (arg1) == VEC_COND_EXPR
9485 || COMPARISON_CLASS_P (arg1))
9486 {
9487 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9488 arg1, arg0,
9489 /*cond_first_p=*/0);
9490 if (tem != NULL_TREE)
9491 return tem;
9492 }
9493 }
9494
9495 switch (code)
9496 {
9497 case MEM_REF:
9498 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9499 if (TREE_CODE (arg0) == ADDR_EXPR
9500 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9501 {
9502 tree iref = TREE_OPERAND (arg0, 0);
9503 return fold_build2 (MEM_REF, type,
9504 TREE_OPERAND (iref, 0),
9505 int_const_binop (PLUS_EXPR, arg1,
9506 TREE_OPERAND (iref, 1)));
9507 }
9508
9509 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9510 if (TREE_CODE (arg0) == ADDR_EXPR
9511 && handled_component_p (TREE_OPERAND (arg0, 0)))
9512 {
9513 tree base;
9514 HOST_WIDE_INT coffset;
9515 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9516 &coffset);
9517 if (!base)
9518 return NULL_TREE;
9519 return fold_build2 (MEM_REF, type,
9520 build_fold_addr_expr (base),
9521 int_const_binop (PLUS_EXPR, arg1,
9522 size_int (coffset)));
9523 }
9524
9525 return NULL_TREE;
9526
9527 case POINTER_PLUS_EXPR:
9528 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9529 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9530 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9531 return fold_convert_loc (loc, type,
9532 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9533 fold_convert_loc (loc, sizetype,
9534 arg1),
9535 fold_convert_loc (loc, sizetype,
9536 arg0)));
9537
9538 return NULL_TREE;
9539
9540 case PLUS_EXPR:
9541 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9542 {
9543 /* X + (X / CST) * -CST is X % CST. */
9544 if (TREE_CODE (arg1) == MULT_EXPR
9545 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9546 && operand_equal_p (arg0,
9547 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9548 {
9549 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9550 tree cst1 = TREE_OPERAND (arg1, 1);
9551 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9552 cst1, cst0);
9553 if (sum && integer_zerop (sum))
9554 return fold_convert_loc (loc, type,
9555 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9556 TREE_TYPE (arg0), arg0,
9557 cst0));
9558 }
9559 }
9560
9561 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9562 one. Make sure the type is not saturating and has the signedness of
9563 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9564 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9565 if ((TREE_CODE (arg0) == MULT_EXPR
9566 || TREE_CODE (arg1) == MULT_EXPR)
9567 && !TYPE_SATURATING (type)
9568 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9569 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9570 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9571 {
9572 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9573 if (tem)
9574 return tem;
9575 }
9576
9577 if (! FLOAT_TYPE_P (type))
9578 {
9579 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9580 with a constant, and the two constants have no bits in common,
9581 we should treat this as a BIT_IOR_EXPR since this may produce more
9582 simplifications. */
9583 if (TREE_CODE (arg0) == BIT_AND_EXPR
9584 && TREE_CODE (arg1) == BIT_AND_EXPR
9585 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9586 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9587 && wi::bit_and (TREE_OPERAND (arg0, 1),
9588 TREE_OPERAND (arg1, 1)) == 0)
9589 {
9590 code = BIT_IOR_EXPR;
9591 goto bit_ior;
9592 }
9593
9594 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9595 (plus (plus (mult) (mult)) (foo)) so that we can
9596 take advantage of the factoring cases below. */
9597 if (ANY_INTEGRAL_TYPE_P (type)
9598 && TYPE_OVERFLOW_WRAPS (type)
9599 && (((TREE_CODE (arg0) == PLUS_EXPR
9600 || TREE_CODE (arg0) == MINUS_EXPR)
9601 && TREE_CODE (arg1) == MULT_EXPR)
9602 || ((TREE_CODE (arg1) == PLUS_EXPR
9603 || TREE_CODE (arg1) == MINUS_EXPR)
9604 && TREE_CODE (arg0) == MULT_EXPR)))
9605 {
9606 tree parg0, parg1, parg, marg;
9607 enum tree_code pcode;
9608
9609 if (TREE_CODE (arg1) == MULT_EXPR)
9610 parg = arg0, marg = arg1;
9611 else
9612 parg = arg1, marg = arg0;
9613 pcode = TREE_CODE (parg);
9614 parg0 = TREE_OPERAND (parg, 0);
9615 parg1 = TREE_OPERAND (parg, 1);
9616 STRIP_NOPS (parg0);
9617 STRIP_NOPS (parg1);
9618
9619 if (TREE_CODE (parg0) == MULT_EXPR
9620 && TREE_CODE (parg1) != MULT_EXPR)
9621 return fold_build2_loc (loc, pcode, type,
9622 fold_build2_loc (loc, PLUS_EXPR, type,
9623 fold_convert_loc (loc, type,
9624 parg0),
9625 fold_convert_loc (loc, type,
9626 marg)),
9627 fold_convert_loc (loc, type, parg1));
9628 if (TREE_CODE (parg0) != MULT_EXPR
9629 && TREE_CODE (parg1) == MULT_EXPR)
9630 return
9631 fold_build2_loc (loc, PLUS_EXPR, type,
9632 fold_convert_loc (loc, type, parg0),
9633 fold_build2_loc (loc, pcode, type,
9634 fold_convert_loc (loc, type, marg),
9635 fold_convert_loc (loc, type,
9636 parg1)));
9637 }
9638 }
9639 else
9640 {
9641 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9642 to __complex__ ( x, y ). This is not the same for SNaNs or
9643 if signed zeros are involved. */
9644 if (!HONOR_SNANS (element_mode (arg0))
9645 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9646 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9647 {
9648 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9649 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9650 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9651 bool arg0rz = false, arg0iz = false;
9652 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9653 || (arg0i && (arg0iz = real_zerop (arg0i))))
9654 {
9655 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9656 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9657 if (arg0rz && arg1i && real_zerop (arg1i))
9658 {
9659 tree rp = arg1r ? arg1r
9660 : build1 (REALPART_EXPR, rtype, arg1);
9661 tree ip = arg0i ? arg0i
9662 : build1 (IMAGPART_EXPR, rtype, arg0);
9663 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9664 }
9665 else if (arg0iz && arg1r && real_zerop (arg1r))
9666 {
9667 tree rp = arg0r ? arg0r
9668 : build1 (REALPART_EXPR, rtype, arg0);
9669 tree ip = arg1i ? arg1i
9670 : build1 (IMAGPART_EXPR, rtype, arg1);
9671 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9672 }
9673 }
9674 }
9675
9676 if (flag_unsafe_math_optimizations
9677 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9678 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9679 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9680 return tem;
9681
9682 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9683 We associate floats only if the user has specified
9684 -fassociative-math. */
9685 if (flag_associative_math
9686 && TREE_CODE (arg1) == PLUS_EXPR
9687 && TREE_CODE (arg0) != MULT_EXPR)
9688 {
9689 tree tree10 = TREE_OPERAND (arg1, 0);
9690 tree tree11 = TREE_OPERAND (arg1, 1);
9691 if (TREE_CODE (tree11) == MULT_EXPR
9692 && TREE_CODE (tree10) == MULT_EXPR)
9693 {
9694 tree tree0;
9695 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9696 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9697 }
9698 }
9699 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9700 We associate floats only if the user has specified
9701 -fassociative-math. */
9702 if (flag_associative_math
9703 && TREE_CODE (arg0) == PLUS_EXPR
9704 && TREE_CODE (arg1) != MULT_EXPR)
9705 {
9706 tree tree00 = TREE_OPERAND (arg0, 0);
9707 tree tree01 = TREE_OPERAND (arg0, 1);
9708 if (TREE_CODE (tree01) == MULT_EXPR
9709 && TREE_CODE (tree00) == MULT_EXPR)
9710 {
9711 tree tree0;
9712 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9713 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9714 }
9715 }
9716 }
9717
9718 bit_rotate:
9719 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9720 is a rotate of A by C1 bits. */
9721 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9722 is a rotate of A by B bits. */
9723 {
9724 enum tree_code code0, code1;
9725 tree rtype;
9726 code0 = TREE_CODE (arg0);
9727 code1 = TREE_CODE (arg1);
9728 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9729 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9730 && operand_equal_p (TREE_OPERAND (arg0, 0),
9731 TREE_OPERAND (arg1, 0), 0)
9732 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9733 TYPE_UNSIGNED (rtype))
9734 /* Only create rotates in complete modes. Other cases are not
9735 expanded properly. */
9736 && (element_precision (rtype)
9737 == element_precision (TYPE_MODE (rtype))))
9738 {
9739 tree tree01, tree11;
9740 enum tree_code code01, code11;
9741
9742 tree01 = TREE_OPERAND (arg0, 1);
9743 tree11 = TREE_OPERAND (arg1, 1);
9744 STRIP_NOPS (tree01);
9745 STRIP_NOPS (tree11);
9746 code01 = TREE_CODE (tree01);
9747 code11 = TREE_CODE (tree11);
9748 if (code01 == INTEGER_CST
9749 && code11 == INTEGER_CST
9750 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9751 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9752 {
9753 tem = build2_loc (loc, LROTATE_EXPR,
9754 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9755 TREE_OPERAND (arg0, 0),
9756 code0 == LSHIFT_EXPR
9757 ? TREE_OPERAND (arg0, 1)
9758 : TREE_OPERAND (arg1, 1));
9759 return fold_convert_loc (loc, type, tem);
9760 }
9761 else if (code11 == MINUS_EXPR)
9762 {
9763 tree tree110, tree111;
9764 tree110 = TREE_OPERAND (tree11, 0);
9765 tree111 = TREE_OPERAND (tree11, 1);
9766 STRIP_NOPS (tree110);
9767 STRIP_NOPS (tree111);
9768 if (TREE_CODE (tree110) == INTEGER_CST
9769 && 0 == compare_tree_int (tree110,
9770 element_precision
9771 (TREE_TYPE (TREE_OPERAND
9772 (arg0, 0))))
9773 && operand_equal_p (tree01, tree111, 0))
9774 return
9775 fold_convert_loc (loc, type,
9776 build2 ((code0 == LSHIFT_EXPR
9777 ? LROTATE_EXPR
9778 : RROTATE_EXPR),
9779 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9780 TREE_OPERAND (arg0, 0),
9781 TREE_OPERAND (arg0, 1)));
9782 }
9783 else if (code01 == MINUS_EXPR)
9784 {
9785 tree tree010, tree011;
9786 tree010 = TREE_OPERAND (tree01, 0);
9787 tree011 = TREE_OPERAND (tree01, 1);
9788 STRIP_NOPS (tree010);
9789 STRIP_NOPS (tree011);
9790 if (TREE_CODE (tree010) == INTEGER_CST
9791 && 0 == compare_tree_int (tree010,
9792 element_precision
9793 (TREE_TYPE (TREE_OPERAND
9794 (arg0, 0))))
9795 && operand_equal_p (tree11, tree011, 0))
9796 return fold_convert_loc
9797 (loc, type,
9798 build2 ((code0 != LSHIFT_EXPR
9799 ? LROTATE_EXPR
9800 : RROTATE_EXPR),
9801 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9802 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9803 }
9804 }
9805 }
9806
9807 associate:
9808 /* In most languages, can't associate operations on floats through
9809 parentheses. Rather than remember where the parentheses were, we
9810 don't associate floats at all, unless the user has specified
9811 -fassociative-math.
9812 And, we need to make sure type is not saturating. */
9813
9814 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9815 && !TYPE_SATURATING (type))
9816 {
9817 tree var0, con0, lit0, minus_lit0;
9818 tree var1, con1, lit1, minus_lit1;
9819 tree atype = type;
9820 bool ok = true;
9821
9822 /* Split both trees into variables, constants, and literals. Then
9823 associate each group together, the constants with literals,
9824 then the result with variables. This increases the chances of
9825 literals being recombined later and of generating relocatable
9826 expressions for the sum of a constant and literal. */
9827 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9828 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9829 code == MINUS_EXPR);
9830
9831 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9832 if (code == MINUS_EXPR)
9833 code = PLUS_EXPR;
9834
9835 /* With undefined overflow prefer doing association in a type
9836 which wraps on overflow, if that is one of the operand types. */
9837 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9838 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9839 {
9840 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9841 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9842 atype = TREE_TYPE (arg0);
9843 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9844 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9845 atype = TREE_TYPE (arg1);
9846 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9847 }
9848
9849 /* With undefined overflow we can only associate constants with one
9850 variable, and constants whose association doesn't overflow. */
9851 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9852 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9853 {
9854 if (var0 && var1)
9855 {
9856 tree tmp0 = var0;
9857 tree tmp1 = var1;
9858
9859 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9860 tmp0 = TREE_OPERAND (tmp0, 0);
9861 if (CONVERT_EXPR_P (tmp0)
9862 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9863 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9864 <= TYPE_PRECISION (atype)))
9865 tmp0 = TREE_OPERAND (tmp0, 0);
9866 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9867 tmp1 = TREE_OPERAND (tmp1, 0);
9868 if (CONVERT_EXPR_P (tmp1)
9869 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9870 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9871 <= TYPE_PRECISION (atype)))
9872 tmp1 = TREE_OPERAND (tmp1, 0);
9873 /* The only case we can still associate with two variables
9874 is if they are the same, modulo negation and bit-pattern
9875 preserving conversions. */
9876 if (!operand_equal_p (tmp0, tmp1, 0))
9877 ok = false;
9878 }
9879 }
9880
9881 /* Only do something if we found more than two objects. Otherwise,
9882 nothing has changed and we risk infinite recursion. */
9883 if (ok
9884 && (2 < ((var0 != 0) + (var1 != 0)
9885 + (con0 != 0) + (con1 != 0)
9886 + (lit0 != 0) + (lit1 != 0)
9887 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9888 {
9889 bool any_overflows = false;
9890 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9891 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9892 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9893 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9894 var0 = associate_trees (loc, var0, var1, code, atype);
9895 con0 = associate_trees (loc, con0, con1, code, atype);
9896 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9897 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9898 code, atype);
9899
9900 /* Preserve the MINUS_EXPR if the negative part of the literal is
9901 greater than the positive part. Otherwise, the multiplicative
9902 folding code (i.e extract_muldiv) may be fooled in case
9903 unsigned constants are subtracted, like in the following
9904 example: ((X*2 + 4) - 8U)/2. */
9905 if (minus_lit0 && lit0)
9906 {
9907 if (TREE_CODE (lit0) == INTEGER_CST
9908 && TREE_CODE (minus_lit0) == INTEGER_CST
9909 && tree_int_cst_lt (lit0, minus_lit0))
9910 {
9911 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9912 MINUS_EXPR, atype);
9913 lit0 = 0;
9914 }
9915 else
9916 {
9917 lit0 = associate_trees (loc, lit0, minus_lit0,
9918 MINUS_EXPR, atype);
9919 minus_lit0 = 0;
9920 }
9921 }
9922
9923 /* Don't introduce overflows through reassociation. */
9924 if (!any_overflows
9925 && ((lit0 && TREE_OVERFLOW_P (lit0))
9926 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9927 return NULL_TREE;
9928
9929 if (minus_lit0)
9930 {
9931 if (con0 == 0)
9932 return
9933 fold_convert_loc (loc, type,
9934 associate_trees (loc, var0, minus_lit0,
9935 MINUS_EXPR, atype));
9936 else
9937 {
9938 con0 = associate_trees (loc, con0, minus_lit0,
9939 MINUS_EXPR, atype);
9940 return
9941 fold_convert_loc (loc, type,
9942 associate_trees (loc, var0, con0,
9943 PLUS_EXPR, atype));
9944 }
9945 }
9946
9947 con0 = associate_trees (loc, con0, lit0, code, atype);
9948 return
9949 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9950 code, atype));
9951 }
9952 }
9953
9954 return NULL_TREE;
9955
9956 case MINUS_EXPR:
9957 /* Pointer simplifications for subtraction, simple reassociations. */
9958 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9959 {
9960 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9961 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9962 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9963 {
9964 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9965 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9966 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9967 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9968 return fold_build2_loc (loc, PLUS_EXPR, type,
9969 fold_build2_loc (loc, MINUS_EXPR, type,
9970 arg00, arg10),
9971 fold_build2_loc (loc, MINUS_EXPR, type,
9972 arg01, arg11));
9973 }
9974 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9975 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9976 {
9977 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9978 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9979 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
9980 fold_convert_loc (loc, type, arg1));
9981 if (tmp)
9982 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
9983 }
9984 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
9985 simplifies. */
9986 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9987 {
9988 tree arg10 = fold_convert_loc (loc, type,
9989 TREE_OPERAND (arg1, 0));
9990 tree arg11 = fold_convert_loc (loc, type,
9991 TREE_OPERAND (arg1, 1));
9992 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
9993 fold_convert_loc (loc, type, arg0),
9994 arg10);
9995 if (tmp)
9996 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
9997 }
9998 }
9999 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10000 if (TREE_CODE (arg0) == NEGATE_EXPR
10001 && negate_expr_p (arg1)
10002 && reorder_operands_p (arg0, arg1))
10003 return fold_build2_loc (loc, MINUS_EXPR, type,
10004 fold_convert_loc (loc, type,
10005 negate_expr (arg1)),
10006 fold_convert_loc (loc, type,
10007 TREE_OPERAND (arg0, 0)));
10008
10009 if (! FLOAT_TYPE_P (type))
10010 {
10011 /* Fold A - (A & B) into ~B & A. */
10012 if (!TREE_SIDE_EFFECTS (arg0)
10013 && TREE_CODE (arg1) == BIT_AND_EXPR)
10014 {
10015 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10016 {
10017 tree arg10 = fold_convert_loc (loc, type,
10018 TREE_OPERAND (arg1, 0));
10019 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10020 fold_build1_loc (loc, BIT_NOT_EXPR,
10021 type, arg10),
10022 fold_convert_loc (loc, type, arg0));
10023 }
10024 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10025 {
10026 tree arg11 = fold_convert_loc (loc,
10027 type, TREE_OPERAND (arg1, 1));
10028 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10029 fold_build1_loc (loc, BIT_NOT_EXPR,
10030 type, arg11),
10031 fold_convert_loc (loc, type, arg0));
10032 }
10033 }
10034
10035 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10036 any power of 2 minus 1. */
10037 if (TREE_CODE (arg0) == BIT_AND_EXPR
10038 && TREE_CODE (arg1) == BIT_AND_EXPR
10039 && operand_equal_p (TREE_OPERAND (arg0, 0),
10040 TREE_OPERAND (arg1, 0), 0))
10041 {
10042 tree mask0 = TREE_OPERAND (arg0, 1);
10043 tree mask1 = TREE_OPERAND (arg1, 1);
10044 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10045
10046 if (operand_equal_p (tem, mask1, 0))
10047 {
10048 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10049 TREE_OPERAND (arg0, 0), mask1);
10050 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10051 }
10052 }
10053 }
10054
10055 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10056 __complex__ ( x, -y ). This is not the same for SNaNs or if
10057 signed zeros are involved. */
10058 if (!HONOR_SNANS (element_mode (arg0))
10059 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10060 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10061 {
10062 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10063 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10064 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10065 bool arg0rz = false, arg0iz = false;
10066 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10067 || (arg0i && (arg0iz = real_zerop (arg0i))))
10068 {
10069 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10070 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10071 if (arg0rz && arg1i && real_zerop (arg1i))
10072 {
10073 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10074 arg1r ? arg1r
10075 : build1 (REALPART_EXPR, rtype, arg1));
10076 tree ip = arg0i ? arg0i
10077 : build1 (IMAGPART_EXPR, rtype, arg0);
10078 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10079 }
10080 else if (arg0iz && arg1r && real_zerop (arg1r))
10081 {
10082 tree rp = arg0r ? arg0r
10083 : build1 (REALPART_EXPR, rtype, arg0);
10084 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10085 arg1i ? arg1i
10086 : build1 (IMAGPART_EXPR, rtype, arg1));
10087 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10088 }
10089 }
10090 }
10091
10092 /* A - B -> A + (-B) if B is easily negatable. */
10093 if (negate_expr_p (arg1)
10094 && !TYPE_OVERFLOW_SANITIZED (type)
10095 && ((FLOAT_TYPE_P (type)
10096 /* Avoid this transformation if B is a positive REAL_CST. */
10097 && (TREE_CODE (arg1) != REAL_CST
10098 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10099 || INTEGRAL_TYPE_P (type)))
10100 return fold_build2_loc (loc, PLUS_EXPR, type,
10101 fold_convert_loc (loc, type, arg0),
10102 fold_convert_loc (loc, type,
10103 negate_expr (arg1)));
10104
10105 /* Fold &a[i] - &a[j] to i-j. */
10106 if (TREE_CODE (arg0) == ADDR_EXPR
10107 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10108 && TREE_CODE (arg1) == ADDR_EXPR
10109 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10110 {
10111 tree tem = fold_addr_of_array_ref_difference (loc, type,
10112 TREE_OPERAND (arg0, 0),
10113 TREE_OPERAND (arg1, 0));
10114 if (tem)
10115 return tem;
10116 }
10117
10118 if (FLOAT_TYPE_P (type)
10119 && flag_unsafe_math_optimizations
10120 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10121 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10122 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10123 return tem;
10124
10125 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10126 one. Make sure the type is not saturating and has the signedness of
10127 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10128 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10129 if ((TREE_CODE (arg0) == MULT_EXPR
10130 || TREE_CODE (arg1) == MULT_EXPR)
10131 && !TYPE_SATURATING (type)
10132 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10133 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10134 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10135 {
10136 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10137 if (tem)
10138 return tem;
10139 }
10140
10141 goto associate;
10142
10143 case MULT_EXPR:
10144 /* (-A) * (-B) -> A * B */
10145 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10146 return fold_build2_loc (loc, MULT_EXPR, type,
10147 fold_convert_loc (loc, type,
10148 TREE_OPERAND (arg0, 0)),
10149 fold_convert_loc (loc, type,
10150 negate_expr (arg1)));
10151 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10152 return fold_build2_loc (loc, MULT_EXPR, type,
10153 fold_convert_loc (loc, type,
10154 negate_expr (arg0)),
10155 fold_convert_loc (loc, type,
10156 TREE_OPERAND (arg1, 0)));
10157
10158 if (! FLOAT_TYPE_P (type))
10159 {
10160 /* Transform x * -C into -x * C if x is easily negatable. */
10161 if (TREE_CODE (arg1) == INTEGER_CST
10162 && tree_int_cst_sgn (arg1) == -1
10163 && negate_expr_p (arg0)
10164 && (tem = negate_expr (arg1)) != arg1
10165 && !TREE_OVERFLOW (tem))
10166 return fold_build2_loc (loc, MULT_EXPR, type,
10167 fold_convert_loc (loc, type,
10168 negate_expr (arg0)),
10169 tem);
10170
10171 /* (a * (1 << b)) is (a << b) */
10172 if (TREE_CODE (arg1) == LSHIFT_EXPR
10173 && integer_onep (TREE_OPERAND (arg1, 0)))
10174 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10175 TREE_OPERAND (arg1, 1));
10176 if (TREE_CODE (arg0) == LSHIFT_EXPR
10177 && integer_onep (TREE_OPERAND (arg0, 0)))
10178 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10179 TREE_OPERAND (arg0, 1));
10180
10181 /* (A + A) * C -> A * 2 * C */
10182 if (TREE_CODE (arg0) == PLUS_EXPR
10183 && TREE_CODE (arg1) == INTEGER_CST
10184 && operand_equal_p (TREE_OPERAND (arg0, 0),
10185 TREE_OPERAND (arg0, 1), 0))
10186 return fold_build2_loc (loc, MULT_EXPR, type,
10187 omit_one_operand_loc (loc, type,
10188 TREE_OPERAND (arg0, 0),
10189 TREE_OPERAND (arg0, 1)),
10190 fold_build2_loc (loc, MULT_EXPR, type,
10191 build_int_cst (type, 2) , arg1));
10192
10193 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10194 sign-changing only. */
10195 if (TREE_CODE (arg1) == INTEGER_CST
10196 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10197 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10198 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10199
10200 strict_overflow_p = false;
10201 if (TREE_CODE (arg1) == INTEGER_CST
10202 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10203 &strict_overflow_p)))
10204 {
10205 if (strict_overflow_p)
10206 fold_overflow_warning (("assuming signed overflow does not "
10207 "occur when simplifying "
10208 "multiplication"),
10209 WARN_STRICT_OVERFLOW_MISC);
10210 return fold_convert_loc (loc, type, tem);
10211 }
10212
10213 /* Optimize z * conj(z) for integer complex numbers. */
10214 if (TREE_CODE (arg0) == CONJ_EXPR
10215 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10216 return fold_mult_zconjz (loc, type, arg1);
10217 if (TREE_CODE (arg1) == CONJ_EXPR
10218 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10219 return fold_mult_zconjz (loc, type, arg0);
10220 }
10221 else
10222 {
10223 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10224 the result for floating point types due to rounding so it is applied
10225 only if -fassociative-math was specify. */
10226 if (flag_associative_math
10227 && TREE_CODE (arg0) == RDIV_EXPR
10228 && TREE_CODE (arg1) == REAL_CST
10229 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10230 {
10231 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10232 arg1);
10233 if (tem)
10234 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10235 TREE_OPERAND (arg0, 1));
10236 }
10237
10238 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10239 if (operand_equal_p (arg0, arg1, 0))
10240 {
10241 tree tem = fold_strip_sign_ops (arg0);
10242 if (tem != NULL_TREE)
10243 {
10244 tem = fold_convert_loc (loc, type, tem);
10245 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10246 }
10247 }
10248
10249 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10250 This is not the same for NaNs or if signed zeros are
10251 involved. */
10252 if (!HONOR_NANS (arg0)
10253 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10254 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10255 && TREE_CODE (arg1) == COMPLEX_CST
10256 && real_zerop (TREE_REALPART (arg1)))
10257 {
10258 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10259 if (real_onep (TREE_IMAGPART (arg1)))
10260 return
10261 fold_build2_loc (loc, COMPLEX_EXPR, type,
10262 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10263 rtype, arg0)),
10264 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10265 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10266 return
10267 fold_build2_loc (loc, COMPLEX_EXPR, type,
10268 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10269 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10270 rtype, arg0)));
10271 }
10272
10273 /* Optimize z * conj(z) for floating point complex numbers.
10274 Guarded by flag_unsafe_math_optimizations as non-finite
10275 imaginary components don't produce scalar results. */
10276 if (flag_unsafe_math_optimizations
10277 && TREE_CODE (arg0) == CONJ_EXPR
10278 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10279 return fold_mult_zconjz (loc, type, arg1);
10280 if (flag_unsafe_math_optimizations
10281 && TREE_CODE (arg1) == CONJ_EXPR
10282 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10283 return fold_mult_zconjz (loc, type, arg0);
10284
10285 if (flag_unsafe_math_optimizations)
10286 {
10287 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10288 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10289
10290 /* Optimizations of root(...)*root(...). */
10291 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10292 {
10293 tree rootfn, arg;
10294 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10295 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10296
10297 /* Optimize sqrt(x)*sqrt(x) as x. */
10298 if (BUILTIN_SQRT_P (fcode0)
10299 && operand_equal_p (arg00, arg10, 0)
10300 && ! HONOR_SNANS (element_mode (type)))
10301 return arg00;
10302
10303 /* Optimize root(x)*root(y) as root(x*y). */
10304 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10305 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10306 return build_call_expr_loc (loc, rootfn, 1, arg);
10307 }
10308
10309 /* Optimize expN(x)*expN(y) as expN(x+y). */
10310 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10311 {
10312 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10313 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10314 CALL_EXPR_ARG (arg0, 0),
10315 CALL_EXPR_ARG (arg1, 0));
10316 return build_call_expr_loc (loc, expfn, 1, arg);
10317 }
10318
10319 /* Optimizations of pow(...)*pow(...). */
10320 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10321 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10322 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10323 {
10324 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10325 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10326 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10327 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10328
10329 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10330 if (operand_equal_p (arg01, arg11, 0))
10331 {
10332 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10333 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10334 arg00, arg10);
10335 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10336 }
10337
10338 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10339 if (operand_equal_p (arg00, arg10, 0))
10340 {
10341 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10342 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10343 arg01, arg11);
10344 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10345 }
10346 }
10347
10348 /* Optimize tan(x)*cos(x) as sin(x). */
10349 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10350 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10351 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10352 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10353 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10354 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10355 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10356 CALL_EXPR_ARG (arg1, 0), 0))
10357 {
10358 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10359
10360 if (sinfn != NULL_TREE)
10361 return build_call_expr_loc (loc, sinfn, 1,
10362 CALL_EXPR_ARG (arg0, 0));
10363 }
10364
10365 /* Optimize x*pow(x,c) as pow(x,c+1). */
10366 if (fcode1 == BUILT_IN_POW
10367 || fcode1 == BUILT_IN_POWF
10368 || fcode1 == BUILT_IN_POWL)
10369 {
10370 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10371 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10372 if (TREE_CODE (arg11) == REAL_CST
10373 && !TREE_OVERFLOW (arg11)
10374 && operand_equal_p (arg0, arg10, 0))
10375 {
10376 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10377 REAL_VALUE_TYPE c;
10378 tree arg;
10379
10380 c = TREE_REAL_CST (arg11);
10381 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10382 arg = build_real (type, c);
10383 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10384 }
10385 }
10386
10387 /* Optimize pow(x,c)*x as pow(x,c+1). */
10388 if (fcode0 == BUILT_IN_POW
10389 || fcode0 == BUILT_IN_POWF
10390 || fcode0 == BUILT_IN_POWL)
10391 {
10392 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10393 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10394 if (TREE_CODE (arg01) == REAL_CST
10395 && !TREE_OVERFLOW (arg01)
10396 && operand_equal_p (arg1, arg00, 0))
10397 {
10398 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10399 REAL_VALUE_TYPE c;
10400 tree arg;
10401
10402 c = TREE_REAL_CST (arg01);
10403 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10404 arg = build_real (type, c);
10405 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10406 }
10407 }
10408
10409 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10410 if (!in_gimple_form
10411 && optimize
10412 && operand_equal_p (arg0, arg1, 0))
10413 {
10414 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10415
10416 if (powfn)
10417 {
10418 tree arg = build_real (type, dconst2);
10419 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10420 }
10421 }
10422 }
10423 }
10424 goto associate;
10425
10426 case BIT_IOR_EXPR:
10427 bit_ior:
10428 /* Canonicalize (X & C1) | C2. */
10429 if (TREE_CODE (arg0) == BIT_AND_EXPR
10430 && TREE_CODE (arg1) == INTEGER_CST
10431 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10432 {
10433 int width = TYPE_PRECISION (type), w;
10434 wide_int c1 = TREE_OPERAND (arg0, 1);
10435 wide_int c2 = arg1;
10436
10437 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10438 if ((c1 & c2) == c1)
10439 return omit_one_operand_loc (loc, type, arg1,
10440 TREE_OPERAND (arg0, 0));
10441
10442 wide_int msk = wi::mask (width, false,
10443 TYPE_PRECISION (TREE_TYPE (arg1)));
10444
10445 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10446 if (msk.and_not (c1 | c2) == 0)
10447 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10448 TREE_OPERAND (arg0, 0), arg1);
10449
10450 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10451 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10452 mode which allows further optimizations. */
10453 c1 &= msk;
10454 c2 &= msk;
10455 wide_int c3 = c1.and_not (c2);
10456 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10457 {
10458 wide_int mask = wi::mask (w, false,
10459 TYPE_PRECISION (type));
10460 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10461 {
10462 c3 = mask;
10463 break;
10464 }
10465 }
10466
10467 if (c3 != c1)
10468 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10469 fold_build2_loc (loc, BIT_AND_EXPR, type,
10470 TREE_OPERAND (arg0, 0),
10471 wide_int_to_tree (type,
10472 c3)),
10473 arg1);
10474 }
10475
10476 /* (X & ~Y) | (~X & Y) is X ^ Y */
10477 if (TREE_CODE (arg0) == BIT_AND_EXPR
10478 && TREE_CODE (arg1) == BIT_AND_EXPR)
10479 {
10480 tree a0, a1, l0, l1, n0, n1;
10481
10482 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10483 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10484
10485 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10486 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10487
10488 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10489 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10490
10491 if ((operand_equal_p (n0, a0, 0)
10492 && operand_equal_p (n1, a1, 0))
10493 || (operand_equal_p (n0, a1, 0)
10494 && operand_equal_p (n1, a0, 0)))
10495 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10496 }
10497
10498 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10499 if (t1 != NULL_TREE)
10500 return t1;
10501
10502 /* See if this can be simplified into a rotate first. If that
10503 is unsuccessful continue in the association code. */
10504 goto bit_rotate;
10505
10506 case BIT_XOR_EXPR:
10507 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10508 if (TREE_CODE (arg0) == BIT_AND_EXPR
10509 && INTEGRAL_TYPE_P (type)
10510 && integer_onep (TREE_OPERAND (arg0, 1))
10511 && integer_onep (arg1))
10512 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10513 build_zero_cst (TREE_TYPE (arg0)));
10514
10515 /* See if this can be simplified into a rotate first. If that
10516 is unsuccessful continue in the association code. */
10517 goto bit_rotate;
10518
10519 case BIT_AND_EXPR:
10520 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
10521 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
10522 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
10523 || (TREE_CODE (arg0) == EQ_EXPR
10524 && integer_zerop (TREE_OPERAND (arg0, 1))))
10525 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10526 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10527
10528 /* X & ~X , X & (X == 0), and X & !X are always zero. */
10529 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
10530 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
10531 || (TREE_CODE (arg1) == EQ_EXPR
10532 && integer_zerop (TREE_OPERAND (arg1, 1))))
10533 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10534 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10535
10536 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10537 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10538 && INTEGRAL_TYPE_P (type)
10539 && integer_onep (TREE_OPERAND (arg0, 1))
10540 && integer_onep (arg1))
10541 {
10542 tree tem2;
10543 tem = TREE_OPERAND (arg0, 0);
10544 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10545 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10546 tem, tem2);
10547 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10548 build_zero_cst (TREE_TYPE (tem)));
10549 }
10550 /* Fold ~X & 1 as (X & 1) == 0. */
10551 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10552 && INTEGRAL_TYPE_P (type)
10553 && integer_onep (arg1))
10554 {
10555 tree tem2;
10556 tem = TREE_OPERAND (arg0, 0);
10557 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10558 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10559 tem, tem2);
10560 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10561 build_zero_cst (TREE_TYPE (tem)));
10562 }
10563 /* Fold !X & 1 as X == 0. */
10564 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10565 && integer_onep (arg1))
10566 {
10567 tem = TREE_OPERAND (arg0, 0);
10568 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10569 build_zero_cst (TREE_TYPE (tem)));
10570 }
10571
10572 /* Fold (X ^ Y) & Y as ~X & Y. */
10573 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10574 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10575 {
10576 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10577 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10578 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10579 fold_convert_loc (loc, type, arg1));
10580 }
10581 /* Fold (X ^ Y) & X as ~Y & X. */
10582 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10583 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10584 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10585 {
10586 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10587 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10588 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10589 fold_convert_loc (loc, type, arg1));
10590 }
10591 /* Fold X & (X ^ Y) as X & ~Y. */
10592 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10593 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10594 {
10595 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10596 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10597 fold_convert_loc (loc, type, arg0),
10598 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10599 }
10600 /* Fold X & (Y ^ X) as ~Y & X. */
10601 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10602 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10603 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10604 {
10605 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10606 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10607 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10608 fold_convert_loc (loc, type, arg0));
10609 }
10610
10611 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10612 multiple of 1 << CST. */
10613 if (TREE_CODE (arg1) == INTEGER_CST)
10614 {
10615 wide_int cst1 = arg1;
10616 wide_int ncst1 = -cst1;
10617 if ((cst1 & ncst1) == ncst1
10618 && multiple_of_p (type, arg0,
10619 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10620 return fold_convert_loc (loc, type, arg0);
10621 }
10622
10623 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10624 bits from CST2. */
10625 if (TREE_CODE (arg1) == INTEGER_CST
10626 && TREE_CODE (arg0) == MULT_EXPR
10627 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10628 {
10629 wide_int warg1 = arg1;
10630 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10631
10632 if (masked == 0)
10633 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10634 arg0, arg1);
10635 else if (masked != warg1)
10636 {
10637 /* Avoid the transform if arg1 is a mask of some
10638 mode which allows further optimizations. */
10639 int pop = wi::popcount (warg1);
10640 if (!(pop >= BITS_PER_UNIT
10641 && exact_log2 (pop) != -1
10642 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10643 return fold_build2_loc (loc, code, type, op0,
10644 wide_int_to_tree (type, masked));
10645 }
10646 }
10647
10648 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10649 ((A & N) + B) & M -> (A + B) & M
10650 Similarly if (N & M) == 0,
10651 ((A | N) + B) & M -> (A + B) & M
10652 and for - instead of + (or unary - instead of +)
10653 and/or ^ instead of |.
10654 If B is constant and (B & M) == 0, fold into A & M. */
10655 if (TREE_CODE (arg1) == INTEGER_CST)
10656 {
10657 wide_int cst1 = arg1;
10658 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10659 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10660 && (TREE_CODE (arg0) == PLUS_EXPR
10661 || TREE_CODE (arg0) == MINUS_EXPR
10662 || TREE_CODE (arg0) == NEGATE_EXPR)
10663 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10664 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10665 {
10666 tree pmop[2];
10667 int which = 0;
10668 wide_int cst0;
10669
10670 /* Now we know that arg0 is (C + D) or (C - D) or
10671 -C and arg1 (M) is == (1LL << cst) - 1.
10672 Store C into PMOP[0] and D into PMOP[1]. */
10673 pmop[0] = TREE_OPERAND (arg0, 0);
10674 pmop[1] = NULL;
10675 if (TREE_CODE (arg0) != NEGATE_EXPR)
10676 {
10677 pmop[1] = TREE_OPERAND (arg0, 1);
10678 which = 1;
10679 }
10680
10681 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10682 which = -1;
10683
10684 for (; which >= 0; which--)
10685 switch (TREE_CODE (pmop[which]))
10686 {
10687 case BIT_AND_EXPR:
10688 case BIT_IOR_EXPR:
10689 case BIT_XOR_EXPR:
10690 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10691 != INTEGER_CST)
10692 break;
10693 cst0 = TREE_OPERAND (pmop[which], 1);
10694 cst0 &= cst1;
10695 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10696 {
10697 if (cst0 != cst1)
10698 break;
10699 }
10700 else if (cst0 != 0)
10701 break;
10702 /* If C or D is of the form (A & N) where
10703 (N & M) == M, or of the form (A | N) or
10704 (A ^ N) where (N & M) == 0, replace it with A. */
10705 pmop[which] = TREE_OPERAND (pmop[which], 0);
10706 break;
10707 case INTEGER_CST:
10708 /* If C or D is a N where (N & M) == 0, it can be
10709 omitted (assumed 0). */
10710 if ((TREE_CODE (arg0) == PLUS_EXPR
10711 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10712 && (cst1 & pmop[which]) == 0)
10713 pmop[which] = NULL;
10714 break;
10715 default:
10716 break;
10717 }
10718
10719 /* Only build anything new if we optimized one or both arguments
10720 above. */
10721 if (pmop[0] != TREE_OPERAND (arg0, 0)
10722 || (TREE_CODE (arg0) != NEGATE_EXPR
10723 && pmop[1] != TREE_OPERAND (arg0, 1)))
10724 {
10725 tree utype = TREE_TYPE (arg0);
10726 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10727 {
10728 /* Perform the operations in a type that has defined
10729 overflow behavior. */
10730 utype = unsigned_type_for (TREE_TYPE (arg0));
10731 if (pmop[0] != NULL)
10732 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10733 if (pmop[1] != NULL)
10734 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10735 }
10736
10737 if (TREE_CODE (arg0) == NEGATE_EXPR)
10738 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10739 else if (TREE_CODE (arg0) == PLUS_EXPR)
10740 {
10741 if (pmop[0] != NULL && pmop[1] != NULL)
10742 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10743 pmop[0], pmop[1]);
10744 else if (pmop[0] != NULL)
10745 tem = pmop[0];
10746 else if (pmop[1] != NULL)
10747 tem = pmop[1];
10748 else
10749 return build_int_cst (type, 0);
10750 }
10751 else if (pmop[0] == NULL)
10752 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10753 else
10754 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10755 pmop[0], pmop[1]);
10756 /* TEM is now the new binary +, - or unary - replacement. */
10757 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10758 fold_convert_loc (loc, utype, arg1));
10759 return fold_convert_loc (loc, type, tem);
10760 }
10761 }
10762 }
10763
10764 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10765 if (t1 != NULL_TREE)
10766 return t1;
10767 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10768 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10769 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10770 {
10771 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10772
10773 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10774 if (mask == -1)
10775 return
10776 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10777 }
10778
10779 goto associate;
10780
10781 case RDIV_EXPR:
10782 /* Don't touch a floating-point divide by zero unless the mode
10783 of the constant can represent infinity. */
10784 if (TREE_CODE (arg1) == REAL_CST
10785 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10786 && real_zerop (arg1))
10787 return NULL_TREE;
10788
10789 /* (-A) / (-B) -> A / B */
10790 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10791 return fold_build2_loc (loc, RDIV_EXPR, type,
10792 TREE_OPERAND (arg0, 0),
10793 negate_expr (arg1));
10794 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10795 return fold_build2_loc (loc, RDIV_EXPR, type,
10796 negate_expr (arg0),
10797 TREE_OPERAND (arg1, 0));
10798
10799 /* Convert A/B/C to A/(B*C). */
10800 if (flag_reciprocal_math
10801 && TREE_CODE (arg0) == RDIV_EXPR)
10802 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10803 fold_build2_loc (loc, MULT_EXPR, type,
10804 TREE_OPERAND (arg0, 1), arg1));
10805
10806 /* Convert A/(B/C) to (A/B)*C. */
10807 if (flag_reciprocal_math
10808 && TREE_CODE (arg1) == RDIV_EXPR)
10809 return fold_build2_loc (loc, MULT_EXPR, type,
10810 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
10811 TREE_OPERAND (arg1, 0)),
10812 TREE_OPERAND (arg1, 1));
10813
10814 /* Convert C1/(X*C2) into (C1/C2)/X. */
10815 if (flag_reciprocal_math
10816 && TREE_CODE (arg1) == MULT_EXPR
10817 && TREE_CODE (arg0) == REAL_CST
10818 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10819 {
10820 tree tem = const_binop (RDIV_EXPR, arg0,
10821 TREE_OPERAND (arg1, 1));
10822 if (tem)
10823 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10824 TREE_OPERAND (arg1, 0));
10825 }
10826
10827 if (flag_unsafe_math_optimizations)
10828 {
10829 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10830 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10831
10832 /* Optimize sin(x)/cos(x) as tan(x). */
10833 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10834 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10835 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10836 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10837 CALL_EXPR_ARG (arg1, 0), 0))
10838 {
10839 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10840
10841 if (tanfn != NULL_TREE)
10842 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10843 }
10844
10845 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10846 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10847 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10848 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10849 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10850 CALL_EXPR_ARG (arg1, 0), 0))
10851 {
10852 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10853
10854 if (tanfn != NULL_TREE)
10855 {
10856 tree tmp = build_call_expr_loc (loc, tanfn, 1,
10857 CALL_EXPR_ARG (arg0, 0));
10858 return fold_build2_loc (loc, RDIV_EXPR, type,
10859 build_real (type, dconst1), tmp);
10860 }
10861 }
10862
10863 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10864 NaNs or Infinities. */
10865 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10866 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10867 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10868 {
10869 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10870 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10871
10872 if (! HONOR_NANS (arg00)
10873 && ! HONOR_INFINITIES (element_mode (arg00))
10874 && operand_equal_p (arg00, arg01, 0))
10875 {
10876 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10877
10878 if (cosfn != NULL_TREE)
10879 return build_call_expr_loc (loc, cosfn, 1, arg00);
10880 }
10881 }
10882
10883 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10884 NaNs or Infinities. */
10885 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10886 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10887 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10888 {
10889 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10890 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10891
10892 if (! HONOR_NANS (arg00)
10893 && ! HONOR_INFINITIES (element_mode (arg00))
10894 && operand_equal_p (arg00, arg01, 0))
10895 {
10896 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10897
10898 if (cosfn != NULL_TREE)
10899 {
10900 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
10901 return fold_build2_loc (loc, RDIV_EXPR, type,
10902 build_real (type, dconst1),
10903 tmp);
10904 }
10905 }
10906 }
10907
10908 /* Optimize pow(x,c)/x as pow(x,c-1). */
10909 if (fcode0 == BUILT_IN_POW
10910 || fcode0 == BUILT_IN_POWF
10911 || fcode0 == BUILT_IN_POWL)
10912 {
10913 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10914 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10915 if (TREE_CODE (arg01) == REAL_CST
10916 && !TREE_OVERFLOW (arg01)
10917 && operand_equal_p (arg1, arg00, 0))
10918 {
10919 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10920 REAL_VALUE_TYPE c;
10921 tree arg;
10922
10923 c = TREE_REAL_CST (arg01);
10924 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10925 arg = build_real (type, c);
10926 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10927 }
10928 }
10929
10930 /* Optimize a/root(b/c) into a*root(c/b). */
10931 if (BUILTIN_ROOT_P (fcode1))
10932 {
10933 tree rootarg = CALL_EXPR_ARG (arg1, 0);
10934
10935 if (TREE_CODE (rootarg) == RDIV_EXPR)
10936 {
10937 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10938 tree b = TREE_OPERAND (rootarg, 0);
10939 tree c = TREE_OPERAND (rootarg, 1);
10940
10941 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
10942
10943 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
10944 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
10945 }
10946 }
10947
10948 /* Optimize x/expN(y) into x*expN(-y). */
10949 if (BUILTIN_EXPONENT_P (fcode1))
10950 {
10951 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10952 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10953 arg1 = build_call_expr_loc (loc,
10954 expfn, 1,
10955 fold_convert_loc (loc, type, arg));
10956 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
10957 }
10958
10959 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10960 if (fcode1 == BUILT_IN_POW
10961 || fcode1 == BUILT_IN_POWF
10962 || fcode1 == BUILT_IN_POWL)
10963 {
10964 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10965 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10966 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10967 tree neg11 = fold_convert_loc (loc, type,
10968 negate_expr (arg11));
10969 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
10970 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
10971 }
10972 }
10973 return NULL_TREE;
10974
10975 case TRUNC_DIV_EXPR:
10976 /* Optimize (X & (-A)) / A where A is a power of 2,
10977 to X >> log2(A) */
10978 if (TREE_CODE (arg0) == BIT_AND_EXPR
10979 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
10980 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
10981 {
10982 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
10983 arg1, TREE_OPERAND (arg0, 1));
10984 if (sum && integer_zerop (sum)) {
10985 tree pow2 = build_int_cst (integer_type_node,
10986 wi::exact_log2 (arg1));
10987 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10988 TREE_OPERAND (arg0, 0), pow2);
10989 }
10990 }
10991
10992 /* Fall through */
10993
10994 case FLOOR_DIV_EXPR:
10995 /* Simplify A / (B << N) where A and B are positive and B is
10996 a power of 2, to A >> (N + log2(B)). */
10997 strict_overflow_p = false;
10998 if (TREE_CODE (arg1) == LSHIFT_EXPR
10999 && (TYPE_UNSIGNED (type)
11000 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11001 {
11002 tree sval = TREE_OPERAND (arg1, 0);
11003 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11004 {
11005 tree sh_cnt = TREE_OPERAND (arg1, 1);
11006 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11007 wi::exact_log2 (sval));
11008
11009 if (strict_overflow_p)
11010 fold_overflow_warning (("assuming signed overflow does not "
11011 "occur when simplifying A / (B << N)"),
11012 WARN_STRICT_OVERFLOW_MISC);
11013
11014 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11015 sh_cnt, pow2);
11016 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11017 fold_convert_loc (loc, type, arg0), sh_cnt);
11018 }
11019 }
11020
11021 /* Fall through */
11022
11023 case ROUND_DIV_EXPR:
11024 case CEIL_DIV_EXPR:
11025 case EXACT_DIV_EXPR:
11026 if (integer_zerop (arg1))
11027 return NULL_TREE;
11028
11029 /* Convert -A / -B to A / B when the type is signed and overflow is
11030 undefined. */
11031 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11032 && TREE_CODE (arg0) == NEGATE_EXPR
11033 && negate_expr_p (arg1))
11034 {
11035 if (INTEGRAL_TYPE_P (type))
11036 fold_overflow_warning (("assuming signed overflow does not occur "
11037 "when distributing negation across "
11038 "division"),
11039 WARN_STRICT_OVERFLOW_MISC);
11040 return fold_build2_loc (loc, code, type,
11041 fold_convert_loc (loc, type,
11042 TREE_OPERAND (arg0, 0)),
11043 fold_convert_loc (loc, type,
11044 negate_expr (arg1)));
11045 }
11046 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11047 && TREE_CODE (arg1) == NEGATE_EXPR
11048 && negate_expr_p (arg0))
11049 {
11050 if (INTEGRAL_TYPE_P (type))
11051 fold_overflow_warning (("assuming signed overflow does not occur "
11052 "when distributing negation across "
11053 "division"),
11054 WARN_STRICT_OVERFLOW_MISC);
11055 return fold_build2_loc (loc, code, type,
11056 fold_convert_loc (loc, type,
11057 negate_expr (arg0)),
11058 fold_convert_loc (loc, type,
11059 TREE_OPERAND (arg1, 0)));
11060 }
11061
11062 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11063 operation, EXACT_DIV_EXPR.
11064
11065 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11066 At one time others generated faster code, it's not clear if they do
11067 after the last round to changes to the DIV code in expmed.c. */
11068 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11069 && multiple_of_p (type, arg0, arg1))
11070 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11071
11072 strict_overflow_p = false;
11073 if (TREE_CODE (arg1) == INTEGER_CST
11074 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11075 &strict_overflow_p)))
11076 {
11077 if (strict_overflow_p)
11078 fold_overflow_warning (("assuming signed overflow does not occur "
11079 "when simplifying division"),
11080 WARN_STRICT_OVERFLOW_MISC);
11081 return fold_convert_loc (loc, type, tem);
11082 }
11083
11084 return NULL_TREE;
11085
11086 case CEIL_MOD_EXPR:
11087 case FLOOR_MOD_EXPR:
11088 case ROUND_MOD_EXPR:
11089 case TRUNC_MOD_EXPR:
11090 strict_overflow_p = false;
11091 if (TREE_CODE (arg1) == INTEGER_CST
11092 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11093 &strict_overflow_p)))
11094 {
11095 if (strict_overflow_p)
11096 fold_overflow_warning (("assuming signed overflow does not occur "
11097 "when simplifying modulus"),
11098 WARN_STRICT_OVERFLOW_MISC);
11099 return fold_convert_loc (loc, type, tem);
11100 }
11101
11102 return NULL_TREE;
11103
11104 case LROTATE_EXPR:
11105 case RROTATE_EXPR:
11106 case RSHIFT_EXPR:
11107 case LSHIFT_EXPR:
11108 /* Since negative shift count is not well-defined,
11109 don't try to compute it in the compiler. */
11110 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11111 return NULL_TREE;
11112
11113 prec = element_precision (type);
11114
11115 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11116 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11117 && tree_to_uhwi (arg1) < prec
11118 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11119 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11120 {
11121 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11122 + tree_to_uhwi (arg1));
11123
11124 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11125 being well defined. */
11126 if (low >= prec)
11127 {
11128 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11129 low = low % prec;
11130 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11131 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11132 TREE_OPERAND (arg0, 0));
11133 else
11134 low = prec - 1;
11135 }
11136
11137 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11138 build_int_cst (TREE_TYPE (arg1), low));
11139 }
11140
11141 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11142 into x & ((unsigned)-1 >> c) for unsigned types. */
11143 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11144 || (TYPE_UNSIGNED (type)
11145 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11146 && tree_fits_uhwi_p (arg1)
11147 && tree_to_uhwi (arg1) < prec
11148 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11149 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11150 {
11151 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11152 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
11153 tree lshift;
11154 tree arg00;
11155
11156 if (low0 == low1)
11157 {
11158 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11159
11160 lshift = build_minus_one_cst (type);
11161 lshift = const_binop (code, lshift, arg1);
11162
11163 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11164 }
11165 }
11166
11167 /* If we have a rotate of a bit operation with the rotate count and
11168 the second operand of the bit operation both constant,
11169 permute the two operations. */
11170 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11171 && (TREE_CODE (arg0) == BIT_AND_EXPR
11172 || TREE_CODE (arg0) == BIT_IOR_EXPR
11173 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11174 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11175 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11176 fold_build2_loc (loc, code, type,
11177 TREE_OPERAND (arg0, 0), arg1),
11178 fold_build2_loc (loc, code, type,
11179 TREE_OPERAND (arg0, 1), arg1));
11180
11181 /* Two consecutive rotates adding up to the some integer
11182 multiple of the precision of the type can be ignored. */
11183 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11184 && TREE_CODE (arg0) == RROTATE_EXPR
11185 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11186 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
11187 prec) == 0)
11188 return TREE_OPERAND (arg0, 0);
11189
11190 return NULL_TREE;
11191
11192 case MIN_EXPR:
11193 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11194 if (tem)
11195 return tem;
11196 goto associate;
11197
11198 case MAX_EXPR:
11199 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11200 if (tem)
11201 return tem;
11202 goto associate;
11203
11204 case TRUTH_ANDIF_EXPR:
11205 /* Note that the operands of this must be ints
11206 and their values must be 0 or 1.
11207 ("true" is a fixed value perhaps depending on the language.) */
11208 /* If first arg is constant zero, return it. */
11209 if (integer_zerop (arg0))
11210 return fold_convert_loc (loc, type, arg0);
11211 case TRUTH_AND_EXPR:
11212 /* If either arg is constant true, drop it. */
11213 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11214 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11215 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11216 /* Preserve sequence points. */
11217 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11218 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11219 /* If second arg is constant zero, result is zero, but first arg
11220 must be evaluated. */
11221 if (integer_zerop (arg1))
11222 return omit_one_operand_loc (loc, type, arg1, arg0);
11223 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11224 case will be handled here. */
11225 if (integer_zerop (arg0))
11226 return omit_one_operand_loc (loc, type, arg0, arg1);
11227
11228 /* !X && X is always false. */
11229 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11230 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11231 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11232 /* X && !X is always false. */
11233 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11234 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11235 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11236
11237 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11238 means A >= Y && A != MAX, but in this case we know that
11239 A < X <= MAX. */
11240
11241 if (!TREE_SIDE_EFFECTS (arg0)
11242 && !TREE_SIDE_EFFECTS (arg1))
11243 {
11244 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11245 if (tem && !operand_equal_p (tem, arg0, 0))
11246 return fold_build2_loc (loc, code, type, tem, arg1);
11247
11248 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11249 if (tem && !operand_equal_p (tem, arg1, 0))
11250 return fold_build2_loc (loc, code, type, arg0, tem);
11251 }
11252
11253 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11254 != NULL_TREE)
11255 return tem;
11256
11257 return NULL_TREE;
11258
11259 case TRUTH_ORIF_EXPR:
11260 /* Note that the operands of this must be ints
11261 and their values must be 0 or true.
11262 ("true" is a fixed value perhaps depending on the language.) */
11263 /* If first arg is constant true, return it. */
11264 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11265 return fold_convert_loc (loc, type, arg0);
11266 case TRUTH_OR_EXPR:
11267 /* If either arg is constant zero, drop it. */
11268 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11269 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11270 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11271 /* Preserve sequence points. */
11272 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11273 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11274 /* If second arg is constant true, result is true, but we must
11275 evaluate first arg. */
11276 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11277 return omit_one_operand_loc (loc, type, arg1, arg0);
11278 /* Likewise for first arg, but note this only occurs here for
11279 TRUTH_OR_EXPR. */
11280 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11281 return omit_one_operand_loc (loc, type, arg0, arg1);
11282
11283 /* !X || X is always true. */
11284 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11285 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11286 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11287 /* X || !X is always true. */
11288 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11289 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11290 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11291
11292 /* (X && !Y) || (!X && Y) is X ^ Y */
11293 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11294 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11295 {
11296 tree a0, a1, l0, l1, n0, n1;
11297
11298 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11299 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11300
11301 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11302 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11303
11304 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11305 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11306
11307 if ((operand_equal_p (n0, a0, 0)
11308 && operand_equal_p (n1, a1, 0))
11309 || (operand_equal_p (n0, a1, 0)
11310 && operand_equal_p (n1, a0, 0)))
11311 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11312 }
11313
11314 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11315 != NULL_TREE)
11316 return tem;
11317
11318 return NULL_TREE;
11319
11320 case TRUTH_XOR_EXPR:
11321 /* If the second arg is constant zero, drop it. */
11322 if (integer_zerop (arg1))
11323 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11324 /* If the second arg is constant true, this is a logical inversion. */
11325 if (integer_onep (arg1))
11326 {
11327 tem = invert_truthvalue_loc (loc, arg0);
11328 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11329 }
11330 /* Identical arguments cancel to zero. */
11331 if (operand_equal_p (arg0, arg1, 0))
11332 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11333
11334 /* !X ^ X is always true. */
11335 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11336 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11337 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11338
11339 /* X ^ !X is always true. */
11340 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11341 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11342 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11343
11344 return NULL_TREE;
11345
11346 case EQ_EXPR:
11347 case NE_EXPR:
11348 STRIP_NOPS (arg0);
11349 STRIP_NOPS (arg1);
11350
11351 tem = fold_comparison (loc, code, type, op0, op1);
11352 if (tem != NULL_TREE)
11353 return tem;
11354
11355 /* bool_var != 0 becomes bool_var. */
11356 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11357 && code == NE_EXPR)
11358 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11359
11360 /* bool_var == 1 becomes bool_var. */
11361 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11362 && code == EQ_EXPR)
11363 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11364
11365 /* bool_var != 1 becomes !bool_var. */
11366 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11367 && code == NE_EXPR)
11368 return fold_convert_loc (loc, type,
11369 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11370 TREE_TYPE (arg0), arg0));
11371
11372 /* bool_var == 0 becomes !bool_var. */
11373 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11374 && code == EQ_EXPR)
11375 return fold_convert_loc (loc, type,
11376 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11377 TREE_TYPE (arg0), arg0));
11378
11379 /* !exp != 0 becomes !exp */
11380 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11381 && code == NE_EXPR)
11382 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11383
11384 /* If this is an equality comparison of the address of two non-weak,
11385 unaliased symbols neither of which are extern (since we do not
11386 have access to attributes for externs), then we know the result. */
11387 if (TREE_CODE (arg0) == ADDR_EXPR
11388 && DECL_P (TREE_OPERAND (arg0, 0))
11389 && TREE_CODE (arg1) == ADDR_EXPR
11390 && DECL_P (TREE_OPERAND (arg1, 0)))
11391 {
11392 int equal;
11393
11394 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
11395 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
11396 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
11397 ->equal_address_to (symtab_node::get_create
11398 (TREE_OPERAND (arg1, 0)));
11399 else
11400 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11401 if (equal != 2)
11402 return constant_boolean_node (equal
11403 ? code == EQ_EXPR : code != EQ_EXPR,
11404 type);
11405 }
11406
11407 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11408 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11409 && TREE_CODE (arg1) == INTEGER_CST
11410 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11411 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11412 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
11413 fold_convert_loc (loc,
11414 TREE_TYPE (arg0),
11415 arg1),
11416 TREE_OPERAND (arg0, 1)));
11417
11418 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
11419 if ((TREE_CODE (arg0) == PLUS_EXPR
11420 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
11421 || TREE_CODE (arg0) == MINUS_EXPR)
11422 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
11423 0)),
11424 arg1, 0)
11425 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11426 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11427 {
11428 tree val = TREE_OPERAND (arg0, 1);
11429 return omit_two_operands_loc (loc, type,
11430 fold_build2_loc (loc, code, type,
11431 val,
11432 build_int_cst (TREE_TYPE (val),
11433 0)),
11434 TREE_OPERAND (arg0, 0), arg1);
11435 }
11436
11437 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
11438 if (TREE_CODE (arg0) == MINUS_EXPR
11439 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
11440 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
11441 1)),
11442 arg1, 0)
11443 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
11444 {
11445 return omit_two_operands_loc (loc, type,
11446 code == NE_EXPR
11447 ? boolean_true_node : boolean_false_node,
11448 TREE_OPERAND (arg0, 1), arg1);
11449 }
11450
11451 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11452 if (TREE_CODE (arg0) == ABS_EXPR
11453 && (integer_zerop (arg1) || real_zerop (arg1)))
11454 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
11455
11456 /* If this is an EQ or NE comparison with zero and ARG0 is
11457 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11458 two operations, but the latter can be done in one less insn
11459 on machines that have only two-operand insns or on which a
11460 constant cannot be the first operand. */
11461 if (TREE_CODE (arg0) == BIT_AND_EXPR
11462 && integer_zerop (arg1))
11463 {
11464 tree arg00 = TREE_OPERAND (arg0, 0);
11465 tree arg01 = TREE_OPERAND (arg0, 1);
11466 if (TREE_CODE (arg00) == LSHIFT_EXPR
11467 && integer_onep (TREE_OPERAND (arg00, 0)))
11468 {
11469 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11470 arg01, TREE_OPERAND (arg00, 1));
11471 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11472 build_int_cst (TREE_TYPE (arg0), 1));
11473 return fold_build2_loc (loc, code, type,
11474 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11475 arg1);
11476 }
11477 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11478 && integer_onep (TREE_OPERAND (arg01, 0)))
11479 {
11480 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11481 arg00, TREE_OPERAND (arg01, 1));
11482 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11483 build_int_cst (TREE_TYPE (arg0), 1));
11484 return fold_build2_loc (loc, code, type,
11485 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11486 arg1);
11487 }
11488 }
11489
11490 /* If this is an NE or EQ comparison of zero against the result of a
11491 signed MOD operation whose second operand is a power of 2, make
11492 the MOD operation unsigned since it is simpler and equivalent. */
11493 if (integer_zerop (arg1)
11494 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11495 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11496 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11497 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11498 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11499 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11500 {
11501 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11502 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
11503 fold_convert_loc (loc, newtype,
11504 TREE_OPERAND (arg0, 0)),
11505 fold_convert_loc (loc, newtype,
11506 TREE_OPERAND (arg0, 1)));
11507
11508 return fold_build2_loc (loc, code, type, newmod,
11509 fold_convert_loc (loc, newtype, arg1));
11510 }
11511
11512 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11513 C1 is a valid shift constant, and C2 is a power of two, i.e.
11514 a single bit. */
11515 if (TREE_CODE (arg0) == BIT_AND_EXPR
11516 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11517 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11518 == INTEGER_CST
11519 && integer_pow2p (TREE_OPERAND (arg0, 1))
11520 && integer_zerop (arg1))
11521 {
11522 tree itype = TREE_TYPE (arg0);
11523 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11524 prec = TYPE_PRECISION (itype);
11525
11526 /* Check for a valid shift count. */
11527 if (wi::ltu_p (arg001, prec))
11528 {
11529 tree arg01 = TREE_OPERAND (arg0, 1);
11530 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11531 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11532 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11533 can be rewritten as (X & (C2 << C1)) != 0. */
11534 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11535 {
11536 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11537 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11538 return fold_build2_loc (loc, code, type, tem,
11539 fold_convert_loc (loc, itype, arg1));
11540 }
11541 /* Otherwise, for signed (arithmetic) shifts,
11542 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11543 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11544 else if (!TYPE_UNSIGNED (itype))
11545 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11546 arg000, build_int_cst (itype, 0));
11547 /* Otherwise, of unsigned (logical) shifts,
11548 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11549 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11550 else
11551 return omit_one_operand_loc (loc, type,
11552 code == EQ_EXPR ? integer_one_node
11553 : integer_zero_node,
11554 arg000);
11555 }
11556 }
11557
11558 /* If we have (A & C) == C where C is a power of 2, convert this into
11559 (A & C) != 0. Similarly for NE_EXPR. */
11560 if (TREE_CODE (arg0) == BIT_AND_EXPR
11561 && integer_pow2p (TREE_OPERAND (arg0, 1))
11562 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11563 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11564 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
11565 integer_zero_node));
11566
11567 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11568 bit, then fold the expression into A < 0 or A >= 0. */
11569 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
11570 if (tem)
11571 return tem;
11572
11573 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11574 Similarly for NE_EXPR. */
11575 if (TREE_CODE (arg0) == BIT_AND_EXPR
11576 && TREE_CODE (arg1) == INTEGER_CST
11577 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11578 {
11579 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
11580 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11581 TREE_OPERAND (arg0, 1));
11582 tree dandnotc
11583 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11584 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
11585 notc);
11586 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11587 if (integer_nonzerop (dandnotc))
11588 return omit_one_operand_loc (loc, type, rslt, arg0);
11589 }
11590
11591 /* If this is a comparison of a field, we may be able to simplify it. */
11592 if ((TREE_CODE (arg0) == COMPONENT_REF
11593 || TREE_CODE (arg0) == BIT_FIELD_REF)
11594 /* Handle the constant case even without -O
11595 to make sure the warnings are given. */
11596 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11597 {
11598 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11599 if (t1)
11600 return t1;
11601 }
11602
11603 /* Optimize comparisons of strlen vs zero to a compare of the
11604 first character of the string vs zero. To wit,
11605 strlen(ptr) == 0 => *ptr == 0
11606 strlen(ptr) != 0 => *ptr != 0
11607 Other cases should reduce to one of these two (or a constant)
11608 due to the return value of strlen being unsigned. */
11609 if (TREE_CODE (arg0) == CALL_EXPR
11610 && integer_zerop (arg1))
11611 {
11612 tree fndecl = get_callee_fndecl (arg0);
11613
11614 if (fndecl
11615 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11616 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11617 && call_expr_nargs (arg0) == 1
11618 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11619 {
11620 tree iref = build_fold_indirect_ref_loc (loc,
11621 CALL_EXPR_ARG (arg0, 0));
11622 return fold_build2_loc (loc, code, type, iref,
11623 build_int_cst (TREE_TYPE (iref), 0));
11624 }
11625 }
11626
11627 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11628 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11629 if (TREE_CODE (arg0) == RSHIFT_EXPR
11630 && integer_zerop (arg1)
11631 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11632 {
11633 tree arg00 = TREE_OPERAND (arg0, 0);
11634 tree arg01 = TREE_OPERAND (arg0, 1);
11635 tree itype = TREE_TYPE (arg00);
11636 if (wi::eq_p (arg01, element_precision (itype) - 1))
11637 {
11638 if (TYPE_UNSIGNED (itype))
11639 {
11640 itype = signed_type_for (itype);
11641 arg00 = fold_convert_loc (loc, itype, arg00);
11642 }
11643 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11644 type, arg00, build_zero_cst (itype));
11645 }
11646 }
11647
11648 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11649 (X & C) == 0 when C is a single bit. */
11650 if (TREE_CODE (arg0) == BIT_AND_EXPR
11651 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11652 && integer_zerop (arg1)
11653 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11654 {
11655 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11656 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11657 TREE_OPERAND (arg0, 1));
11658 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11659 type, tem,
11660 fold_convert_loc (loc, TREE_TYPE (arg0),
11661 arg1));
11662 }
11663
11664 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11665 constant C is a power of two, i.e. a single bit. */
11666 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11667 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11668 && integer_zerop (arg1)
11669 && integer_pow2p (TREE_OPERAND (arg0, 1))
11670 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11671 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11672 {
11673 tree arg00 = TREE_OPERAND (arg0, 0);
11674 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11675 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11676 }
11677
11678 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11679 when is C is a power of two, i.e. a single bit. */
11680 if (TREE_CODE (arg0) == BIT_AND_EXPR
11681 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11682 && integer_zerop (arg1)
11683 && integer_pow2p (TREE_OPERAND (arg0, 1))
11684 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11685 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11686 {
11687 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11688 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11689 arg000, TREE_OPERAND (arg0, 1));
11690 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11691 tem, build_int_cst (TREE_TYPE (tem), 0));
11692 }
11693
11694 if (integer_zerop (arg1)
11695 && tree_expr_nonzero_p (arg0))
11696 {
11697 tree res = constant_boolean_node (code==NE_EXPR, type);
11698 return omit_one_operand_loc (loc, type, res, arg0);
11699 }
11700
11701 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11702 if (TREE_CODE (arg0) == BIT_AND_EXPR
11703 && TREE_CODE (arg1) == BIT_AND_EXPR)
11704 {
11705 tree arg00 = TREE_OPERAND (arg0, 0);
11706 tree arg01 = TREE_OPERAND (arg0, 1);
11707 tree arg10 = TREE_OPERAND (arg1, 0);
11708 tree arg11 = TREE_OPERAND (arg1, 1);
11709 tree itype = TREE_TYPE (arg0);
11710
11711 if (operand_equal_p (arg01, arg11, 0))
11712 return fold_build2_loc (loc, code, type,
11713 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11714 fold_build2_loc (loc,
11715 BIT_XOR_EXPR, itype,
11716 arg00, arg10),
11717 arg01),
11718 build_zero_cst (itype));
11719
11720 if (operand_equal_p (arg01, arg10, 0))
11721 return fold_build2_loc (loc, code, type,
11722 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11723 fold_build2_loc (loc,
11724 BIT_XOR_EXPR, itype,
11725 arg00, arg11),
11726 arg01),
11727 build_zero_cst (itype));
11728
11729 if (operand_equal_p (arg00, arg11, 0))
11730 return fold_build2_loc (loc, code, type,
11731 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11732 fold_build2_loc (loc,
11733 BIT_XOR_EXPR, itype,
11734 arg01, arg10),
11735 arg00),
11736 build_zero_cst (itype));
11737
11738 if (operand_equal_p (arg00, arg10, 0))
11739 return fold_build2_loc (loc, code, type,
11740 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11741 fold_build2_loc (loc,
11742 BIT_XOR_EXPR, itype,
11743 arg01, arg11),
11744 arg00),
11745 build_zero_cst (itype));
11746 }
11747
11748 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11749 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11750 {
11751 tree arg00 = TREE_OPERAND (arg0, 0);
11752 tree arg01 = TREE_OPERAND (arg0, 1);
11753 tree arg10 = TREE_OPERAND (arg1, 0);
11754 tree arg11 = TREE_OPERAND (arg1, 1);
11755 tree itype = TREE_TYPE (arg0);
11756
11757 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11758 operand_equal_p guarantees no side-effects so we don't need
11759 to use omit_one_operand on Z. */
11760 if (operand_equal_p (arg01, arg11, 0))
11761 return fold_build2_loc (loc, code, type, arg00,
11762 fold_convert_loc (loc, TREE_TYPE (arg00),
11763 arg10));
11764 if (operand_equal_p (arg01, arg10, 0))
11765 return fold_build2_loc (loc, code, type, arg00,
11766 fold_convert_loc (loc, TREE_TYPE (arg00),
11767 arg11));
11768 if (operand_equal_p (arg00, arg11, 0))
11769 return fold_build2_loc (loc, code, type, arg01,
11770 fold_convert_loc (loc, TREE_TYPE (arg01),
11771 arg10));
11772 if (operand_equal_p (arg00, arg10, 0))
11773 return fold_build2_loc (loc, code, type, arg01,
11774 fold_convert_loc (loc, TREE_TYPE (arg01),
11775 arg11));
11776
11777 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11778 if (TREE_CODE (arg01) == INTEGER_CST
11779 && TREE_CODE (arg11) == INTEGER_CST)
11780 {
11781 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11782 fold_convert_loc (loc, itype, arg11));
11783 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11784 return fold_build2_loc (loc, code, type, tem,
11785 fold_convert_loc (loc, itype, arg10));
11786 }
11787 }
11788
11789 /* Attempt to simplify equality/inequality comparisons of complex
11790 values. Only lower the comparison if the result is known or
11791 can be simplified to a single scalar comparison. */
11792 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11793 || TREE_CODE (arg0) == COMPLEX_CST)
11794 && (TREE_CODE (arg1) == COMPLEX_EXPR
11795 || TREE_CODE (arg1) == COMPLEX_CST))
11796 {
11797 tree real0, imag0, real1, imag1;
11798 tree rcond, icond;
11799
11800 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11801 {
11802 real0 = TREE_OPERAND (arg0, 0);
11803 imag0 = TREE_OPERAND (arg0, 1);
11804 }
11805 else
11806 {
11807 real0 = TREE_REALPART (arg0);
11808 imag0 = TREE_IMAGPART (arg0);
11809 }
11810
11811 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11812 {
11813 real1 = TREE_OPERAND (arg1, 0);
11814 imag1 = TREE_OPERAND (arg1, 1);
11815 }
11816 else
11817 {
11818 real1 = TREE_REALPART (arg1);
11819 imag1 = TREE_IMAGPART (arg1);
11820 }
11821
11822 rcond = fold_binary_loc (loc, code, type, real0, real1);
11823 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11824 {
11825 if (integer_zerop (rcond))
11826 {
11827 if (code == EQ_EXPR)
11828 return omit_two_operands_loc (loc, type, boolean_false_node,
11829 imag0, imag1);
11830 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11831 }
11832 else
11833 {
11834 if (code == NE_EXPR)
11835 return omit_two_operands_loc (loc, type, boolean_true_node,
11836 imag0, imag1);
11837 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11838 }
11839 }
11840
11841 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11842 if (icond && TREE_CODE (icond) == INTEGER_CST)
11843 {
11844 if (integer_zerop (icond))
11845 {
11846 if (code == EQ_EXPR)
11847 return omit_two_operands_loc (loc, type, boolean_false_node,
11848 real0, real1);
11849 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11850 }
11851 else
11852 {
11853 if (code == NE_EXPR)
11854 return omit_two_operands_loc (loc, type, boolean_true_node,
11855 real0, real1);
11856 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11857 }
11858 }
11859 }
11860
11861 return NULL_TREE;
11862
11863 case LT_EXPR:
11864 case GT_EXPR:
11865 case LE_EXPR:
11866 case GE_EXPR:
11867 tem = fold_comparison (loc, code, type, op0, op1);
11868 if (tem != NULL_TREE)
11869 return tem;
11870
11871 /* Transform comparisons of the form X +- C CMP X. */
11872 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11873 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11874 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11875 && !HONOR_SNANS (arg0))
11876 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11877 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11878 {
11879 tree arg01 = TREE_OPERAND (arg0, 1);
11880 enum tree_code code0 = TREE_CODE (arg0);
11881 int is_positive;
11882
11883 if (TREE_CODE (arg01) == REAL_CST)
11884 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11885 else
11886 is_positive = tree_int_cst_sgn (arg01);
11887
11888 /* (X - c) > X becomes false. */
11889 if (code == GT_EXPR
11890 && ((code0 == MINUS_EXPR && is_positive >= 0)
11891 || (code0 == PLUS_EXPR && is_positive <= 0)))
11892 {
11893 if (TREE_CODE (arg01) == INTEGER_CST
11894 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11895 fold_overflow_warning (("assuming signed overflow does not "
11896 "occur when assuming that (X - c) > X "
11897 "is always false"),
11898 WARN_STRICT_OVERFLOW_ALL);
11899 return constant_boolean_node (0, type);
11900 }
11901
11902 /* Likewise (X + c) < X becomes false. */
11903 if (code == LT_EXPR
11904 && ((code0 == PLUS_EXPR && is_positive >= 0)
11905 || (code0 == MINUS_EXPR && is_positive <= 0)))
11906 {
11907 if (TREE_CODE (arg01) == INTEGER_CST
11908 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11909 fold_overflow_warning (("assuming signed overflow does not "
11910 "occur when assuming that "
11911 "(X + c) < X is always false"),
11912 WARN_STRICT_OVERFLOW_ALL);
11913 return constant_boolean_node (0, type);
11914 }
11915
11916 /* Convert (X - c) <= X to true. */
11917 if (!HONOR_NANS (arg1)
11918 && code == LE_EXPR
11919 && ((code0 == MINUS_EXPR && is_positive >= 0)
11920 || (code0 == PLUS_EXPR && is_positive <= 0)))
11921 {
11922 if (TREE_CODE (arg01) == INTEGER_CST
11923 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11924 fold_overflow_warning (("assuming signed overflow does not "
11925 "occur when assuming that "
11926 "(X - c) <= X is always true"),
11927 WARN_STRICT_OVERFLOW_ALL);
11928 return constant_boolean_node (1, type);
11929 }
11930
11931 /* Convert (X + c) >= X to true. */
11932 if (!HONOR_NANS (arg1)
11933 && code == GE_EXPR
11934 && ((code0 == PLUS_EXPR && is_positive >= 0)
11935 || (code0 == MINUS_EXPR && is_positive <= 0)))
11936 {
11937 if (TREE_CODE (arg01) == INTEGER_CST
11938 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11939 fold_overflow_warning (("assuming signed overflow does not "
11940 "occur when assuming that "
11941 "(X + c) >= X is always true"),
11942 WARN_STRICT_OVERFLOW_ALL);
11943 return constant_boolean_node (1, type);
11944 }
11945
11946 if (TREE_CODE (arg01) == INTEGER_CST)
11947 {
11948 /* Convert X + c > X and X - c < X to true for integers. */
11949 if (code == GT_EXPR
11950 && ((code0 == PLUS_EXPR && is_positive > 0)
11951 || (code0 == MINUS_EXPR && is_positive < 0)))
11952 {
11953 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11954 fold_overflow_warning (("assuming signed overflow does "
11955 "not occur when assuming that "
11956 "(X + c) > X is always true"),
11957 WARN_STRICT_OVERFLOW_ALL);
11958 return constant_boolean_node (1, type);
11959 }
11960
11961 if (code == LT_EXPR
11962 && ((code0 == MINUS_EXPR && is_positive > 0)
11963 || (code0 == PLUS_EXPR && is_positive < 0)))
11964 {
11965 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11966 fold_overflow_warning (("assuming signed overflow does "
11967 "not occur when assuming that "
11968 "(X - c) < X is always true"),
11969 WARN_STRICT_OVERFLOW_ALL);
11970 return constant_boolean_node (1, type);
11971 }
11972
11973 /* Convert X + c <= X and X - c >= X to false for integers. */
11974 if (code == LE_EXPR
11975 && ((code0 == PLUS_EXPR && is_positive > 0)
11976 || (code0 == MINUS_EXPR && is_positive < 0)))
11977 {
11978 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11979 fold_overflow_warning (("assuming signed overflow does "
11980 "not occur when assuming that "
11981 "(X + c) <= X is always false"),
11982 WARN_STRICT_OVERFLOW_ALL);
11983 return constant_boolean_node (0, type);
11984 }
11985
11986 if (code == GE_EXPR
11987 && ((code0 == MINUS_EXPR && is_positive > 0)
11988 || (code0 == PLUS_EXPR && is_positive < 0)))
11989 {
11990 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11991 fold_overflow_warning (("assuming signed overflow does "
11992 "not occur when assuming that "
11993 "(X - c) >= X is always false"),
11994 WARN_STRICT_OVERFLOW_ALL);
11995 return constant_boolean_node (0, type);
11996 }
11997 }
11998 }
11999
12000 /* Comparisons with the highest or lowest possible integer of
12001 the specified precision will have known values. */
12002 {
12003 tree arg1_type = TREE_TYPE (arg1);
12004 unsigned int prec = TYPE_PRECISION (arg1_type);
12005
12006 if (TREE_CODE (arg1) == INTEGER_CST
12007 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12008 {
12009 wide_int max = wi::max_value (arg1_type);
12010 wide_int signed_max = wi::max_value (prec, SIGNED);
12011 wide_int min = wi::min_value (arg1_type);
12012
12013 if (wi::eq_p (arg1, max))
12014 switch (code)
12015 {
12016 case GT_EXPR:
12017 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12018
12019 case GE_EXPR:
12020 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12021
12022 case LE_EXPR:
12023 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12024
12025 case LT_EXPR:
12026 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12027
12028 /* The GE_EXPR and LT_EXPR cases above are not normally
12029 reached because of previous transformations. */
12030
12031 default:
12032 break;
12033 }
12034 else if (wi::eq_p (arg1, max - 1))
12035 switch (code)
12036 {
12037 case GT_EXPR:
12038 arg1 = const_binop (PLUS_EXPR, arg1,
12039 build_int_cst (TREE_TYPE (arg1), 1));
12040 return fold_build2_loc (loc, EQ_EXPR, type,
12041 fold_convert_loc (loc,
12042 TREE_TYPE (arg1), arg0),
12043 arg1);
12044 case LE_EXPR:
12045 arg1 = const_binop (PLUS_EXPR, arg1,
12046 build_int_cst (TREE_TYPE (arg1), 1));
12047 return fold_build2_loc (loc, NE_EXPR, type,
12048 fold_convert_loc (loc, TREE_TYPE (arg1),
12049 arg0),
12050 arg1);
12051 default:
12052 break;
12053 }
12054 else if (wi::eq_p (arg1, min))
12055 switch (code)
12056 {
12057 case LT_EXPR:
12058 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12059
12060 case LE_EXPR:
12061 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12062
12063 case GE_EXPR:
12064 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12065
12066 case GT_EXPR:
12067 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12068
12069 default:
12070 break;
12071 }
12072 else if (wi::eq_p (arg1, min + 1))
12073 switch (code)
12074 {
12075 case GE_EXPR:
12076 arg1 = const_binop (MINUS_EXPR, arg1,
12077 build_int_cst (TREE_TYPE (arg1), 1));
12078 return fold_build2_loc (loc, NE_EXPR, type,
12079 fold_convert_loc (loc,
12080 TREE_TYPE (arg1), arg0),
12081 arg1);
12082 case LT_EXPR:
12083 arg1 = const_binop (MINUS_EXPR, arg1,
12084 build_int_cst (TREE_TYPE (arg1), 1));
12085 return fold_build2_loc (loc, EQ_EXPR, type,
12086 fold_convert_loc (loc, TREE_TYPE (arg1),
12087 arg0),
12088 arg1);
12089 default:
12090 break;
12091 }
12092
12093 else if (wi::eq_p (arg1, signed_max)
12094 && TYPE_UNSIGNED (arg1_type)
12095 /* We will flip the signedness of the comparison operator
12096 associated with the mode of arg1, so the sign bit is
12097 specified by this mode. Check that arg1 is the signed
12098 max associated with this sign bit. */
12099 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
12100 /* signed_type does not work on pointer types. */
12101 && INTEGRAL_TYPE_P (arg1_type))
12102 {
12103 /* The following case also applies to X < signed_max+1
12104 and X >= signed_max+1 because previous transformations. */
12105 if (code == LE_EXPR || code == GT_EXPR)
12106 {
12107 tree st = signed_type_for (arg1_type);
12108 return fold_build2_loc (loc,
12109 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12110 type, fold_convert_loc (loc, st, arg0),
12111 build_int_cst (st, 0));
12112 }
12113 }
12114 }
12115 }
12116
12117 /* If we are comparing an ABS_EXPR with a constant, we can
12118 convert all the cases into explicit comparisons, but they may
12119 well not be faster than doing the ABS and one comparison.
12120 But ABS (X) <= C is a range comparison, which becomes a subtraction
12121 and a comparison, and is probably faster. */
12122 if (code == LE_EXPR
12123 && TREE_CODE (arg1) == INTEGER_CST
12124 && TREE_CODE (arg0) == ABS_EXPR
12125 && ! TREE_SIDE_EFFECTS (arg0)
12126 && (0 != (tem = negate_expr (arg1)))
12127 && TREE_CODE (tem) == INTEGER_CST
12128 && !TREE_OVERFLOW (tem))
12129 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12130 build2 (GE_EXPR, type,
12131 TREE_OPERAND (arg0, 0), tem),
12132 build2 (LE_EXPR, type,
12133 TREE_OPERAND (arg0, 0), arg1));
12134
12135 /* Convert ABS_EXPR<x> >= 0 to true. */
12136 strict_overflow_p = false;
12137 if (code == GE_EXPR
12138 && (integer_zerop (arg1)
12139 || (! HONOR_NANS (arg0)
12140 && real_zerop (arg1)))
12141 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12142 {
12143 if (strict_overflow_p)
12144 fold_overflow_warning (("assuming signed overflow does not occur "
12145 "when simplifying comparison of "
12146 "absolute value and zero"),
12147 WARN_STRICT_OVERFLOW_CONDITIONAL);
12148 return omit_one_operand_loc (loc, type,
12149 constant_boolean_node (true, type),
12150 arg0);
12151 }
12152
12153 /* Convert ABS_EXPR<x> < 0 to false. */
12154 strict_overflow_p = false;
12155 if (code == LT_EXPR
12156 && (integer_zerop (arg1) || real_zerop (arg1))
12157 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12158 {
12159 if (strict_overflow_p)
12160 fold_overflow_warning (("assuming signed overflow does not occur "
12161 "when simplifying comparison of "
12162 "absolute value and zero"),
12163 WARN_STRICT_OVERFLOW_CONDITIONAL);
12164 return omit_one_operand_loc (loc, type,
12165 constant_boolean_node (false, type),
12166 arg0);
12167 }
12168
12169 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12170 and similarly for >= into !=. */
12171 if ((code == LT_EXPR || code == GE_EXPR)
12172 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12173 && TREE_CODE (arg1) == LSHIFT_EXPR
12174 && integer_onep (TREE_OPERAND (arg1, 0)))
12175 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12176 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12177 TREE_OPERAND (arg1, 1)),
12178 build_zero_cst (TREE_TYPE (arg0)));
12179
12180 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12181 otherwise Y might be >= # of bits in X's type and thus e.g.
12182 (unsigned char) (1 << Y) for Y 15 might be 0.
12183 If the cast is widening, then 1 << Y should have unsigned type,
12184 otherwise if Y is number of bits in the signed shift type minus 1,
12185 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12186 31 might be 0xffffffff80000000. */
12187 if ((code == LT_EXPR || code == GE_EXPR)
12188 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12189 && CONVERT_EXPR_P (arg1)
12190 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12191 && (element_precision (TREE_TYPE (arg1))
12192 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12193 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12194 || (element_precision (TREE_TYPE (arg1))
12195 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12196 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12197 {
12198 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12199 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12200 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12201 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12202 build_zero_cst (TREE_TYPE (arg0)));
12203 }
12204
12205 return NULL_TREE;
12206
12207 case UNORDERED_EXPR:
12208 case ORDERED_EXPR:
12209 case UNLT_EXPR:
12210 case UNLE_EXPR:
12211 case UNGT_EXPR:
12212 case UNGE_EXPR:
12213 case UNEQ_EXPR:
12214 case LTGT_EXPR:
12215 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12216 {
12217 t1 = fold_relational_const (code, type, arg0, arg1);
12218 if (t1 != NULL_TREE)
12219 return t1;
12220 }
12221
12222 /* If the first operand is NaN, the result is constant. */
12223 if (TREE_CODE (arg0) == REAL_CST
12224 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12225 && (code != LTGT_EXPR || ! flag_trapping_math))
12226 {
12227 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12228 ? integer_zero_node
12229 : integer_one_node;
12230 return omit_one_operand_loc (loc, type, t1, arg1);
12231 }
12232
12233 /* If the second operand is NaN, the result is constant. */
12234 if (TREE_CODE (arg1) == REAL_CST
12235 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12236 && (code != LTGT_EXPR || ! flag_trapping_math))
12237 {
12238 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12239 ? integer_zero_node
12240 : integer_one_node;
12241 return omit_one_operand_loc (loc, type, t1, arg0);
12242 }
12243
12244 /* Simplify unordered comparison of something with itself. */
12245 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12246 && operand_equal_p (arg0, arg1, 0))
12247 return constant_boolean_node (1, type);
12248
12249 if (code == LTGT_EXPR
12250 && !flag_trapping_math
12251 && operand_equal_p (arg0, arg1, 0))
12252 return constant_boolean_node (0, type);
12253
12254 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12255 {
12256 tree targ0 = strip_float_extensions (arg0);
12257 tree targ1 = strip_float_extensions (arg1);
12258 tree newtype = TREE_TYPE (targ0);
12259
12260 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12261 newtype = TREE_TYPE (targ1);
12262
12263 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12264 return fold_build2_loc (loc, code, type,
12265 fold_convert_loc (loc, newtype, targ0),
12266 fold_convert_loc (loc, newtype, targ1));
12267 }
12268
12269 return NULL_TREE;
12270
12271 case COMPOUND_EXPR:
12272 /* When pedantic, a compound expression can be neither an lvalue
12273 nor an integer constant expression. */
12274 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12275 return NULL_TREE;
12276 /* Don't let (0, 0) be null pointer constant. */
12277 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12278 : fold_convert_loc (loc, type, arg1);
12279 return pedantic_non_lvalue_loc (loc, tem);
12280
12281 case ASSERT_EXPR:
12282 /* An ASSERT_EXPR should never be passed to fold_binary. */
12283 gcc_unreachable ();
12284
12285 default:
12286 return NULL_TREE;
12287 } /* switch (code) */
12288 }
12289
12290 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12291 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
12292 of GOTO_EXPR. */
12293
12294 static tree
12295 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
12296 {
12297 switch (TREE_CODE (*tp))
12298 {
12299 case LABEL_EXPR:
12300 return *tp;
12301
12302 case GOTO_EXPR:
12303 *walk_subtrees = 0;
12304
12305 /* ... fall through ... */
12306
12307 default:
12308 return NULL_TREE;
12309 }
12310 }
12311
12312 /* Return whether the sub-tree ST contains a label which is accessible from
12313 outside the sub-tree. */
12314
12315 static bool
12316 contains_label_p (tree st)
12317 {
12318 return
12319 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
12320 }
12321
12322 /* Fold a ternary expression of code CODE and type TYPE with operands
12323 OP0, OP1, and OP2. Return the folded expression if folding is
12324 successful. Otherwise, return NULL_TREE. */
12325
12326 tree
12327 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12328 tree op0, tree op1, tree op2)
12329 {
12330 tree tem;
12331 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12332 enum tree_code_class kind = TREE_CODE_CLASS (code);
12333
12334 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12335 && TREE_CODE_LENGTH (code) == 3);
12336
12337 /* If this is a commutative operation, and OP0 is a constant, move it
12338 to OP1 to reduce the number of tests below. */
12339 if (commutative_ternary_tree_code (code)
12340 && tree_swap_operands_p (op0, op1, true))
12341 return fold_build3_loc (loc, code, type, op1, op0, op2);
12342
12343 tem = generic_simplify (loc, code, type, op0, op1, op2);
12344 if (tem)
12345 return tem;
12346
12347 /* Strip any conversions that don't change the mode. This is safe
12348 for every expression, except for a comparison expression because
12349 its signedness is derived from its operands. So, in the latter
12350 case, only strip conversions that don't change the signedness.
12351
12352 Note that this is done as an internal manipulation within the
12353 constant folder, in order to find the simplest representation of
12354 the arguments so that their form can be studied. In any cases,
12355 the appropriate type conversions should be put back in the tree
12356 that will get out of the constant folder. */
12357 if (op0)
12358 {
12359 arg0 = op0;
12360 STRIP_NOPS (arg0);
12361 }
12362
12363 if (op1)
12364 {
12365 arg1 = op1;
12366 STRIP_NOPS (arg1);
12367 }
12368
12369 if (op2)
12370 {
12371 arg2 = op2;
12372 STRIP_NOPS (arg2);
12373 }
12374
12375 switch (code)
12376 {
12377 case COMPONENT_REF:
12378 if (TREE_CODE (arg0) == CONSTRUCTOR
12379 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12380 {
12381 unsigned HOST_WIDE_INT idx;
12382 tree field, value;
12383 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12384 if (field == arg1)
12385 return value;
12386 }
12387 return NULL_TREE;
12388
12389 case COND_EXPR:
12390 case VEC_COND_EXPR:
12391 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12392 so all simple results must be passed through pedantic_non_lvalue. */
12393 if (TREE_CODE (arg0) == INTEGER_CST)
12394 {
12395 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12396 tem = integer_zerop (arg0) ? op2 : op1;
12397 /* Only optimize constant conditions when the selected branch
12398 has the same type as the COND_EXPR. This avoids optimizing
12399 away "c ? x : throw", where the throw has a void type.
12400 Avoid throwing away that operand which contains label. */
12401 if ((!TREE_SIDE_EFFECTS (unused_op)
12402 || !contains_label_p (unused_op))
12403 && (! VOID_TYPE_P (TREE_TYPE (tem))
12404 || VOID_TYPE_P (type)))
12405 return pedantic_non_lvalue_loc (loc, tem);
12406 return NULL_TREE;
12407 }
12408 else if (TREE_CODE (arg0) == VECTOR_CST)
12409 {
12410 if ((TREE_CODE (arg1) == VECTOR_CST
12411 || TREE_CODE (arg1) == CONSTRUCTOR)
12412 && (TREE_CODE (arg2) == VECTOR_CST
12413 || TREE_CODE (arg2) == CONSTRUCTOR))
12414 {
12415 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
12416 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
12417 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
12418 for (i = 0; i < nelts; i++)
12419 {
12420 tree val = VECTOR_CST_ELT (arg0, i);
12421 if (integer_all_onesp (val))
12422 sel[i] = i;
12423 else if (integer_zerop (val))
12424 sel[i] = nelts + i;
12425 else /* Currently unreachable. */
12426 return NULL_TREE;
12427 }
12428 tree t = fold_vec_perm (type, arg1, arg2, sel);
12429 if (t != NULL_TREE)
12430 return t;
12431 }
12432 }
12433
12434 /* If we have A op B ? A : C, we may be able to convert this to a
12435 simpler expression, depending on the operation and the values
12436 of B and C. Signed zeros prevent all of these transformations,
12437 for reasons given above each one.
12438
12439 Also try swapping the arguments and inverting the conditional. */
12440 if (COMPARISON_CLASS_P (arg0)
12441 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12442 arg1, TREE_OPERAND (arg0, 1))
12443 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
12444 {
12445 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12446 if (tem)
12447 return tem;
12448 }
12449
12450 if (COMPARISON_CLASS_P (arg0)
12451 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12452 op2,
12453 TREE_OPERAND (arg0, 1))
12454 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12455 {
12456 location_t loc0 = expr_location_or (arg0, loc);
12457 tem = fold_invert_truthvalue (loc0, arg0);
12458 if (tem && COMPARISON_CLASS_P (tem))
12459 {
12460 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12461 if (tem)
12462 return tem;
12463 }
12464 }
12465
12466 /* If the second operand is simpler than the third, swap them
12467 since that produces better jump optimization results. */
12468 if (truth_value_p (TREE_CODE (arg0))
12469 && tree_swap_operands_p (op1, op2, false))
12470 {
12471 location_t loc0 = expr_location_or (arg0, loc);
12472 /* See if this can be inverted. If it can't, possibly because
12473 it was a floating-point inequality comparison, don't do
12474 anything. */
12475 tem = fold_invert_truthvalue (loc0, arg0);
12476 if (tem)
12477 return fold_build3_loc (loc, code, type, tem, op2, op1);
12478 }
12479
12480 /* Convert A ? 1 : 0 to simply A. */
12481 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12482 : (integer_onep (op1)
12483 && !VECTOR_TYPE_P (type)))
12484 && integer_zerop (op2)
12485 /* If we try to convert OP0 to our type, the
12486 call to fold will try to move the conversion inside
12487 a COND, which will recurse. In that case, the COND_EXPR
12488 is probably the best choice, so leave it alone. */
12489 && type == TREE_TYPE (arg0))
12490 return pedantic_non_lvalue_loc (loc, arg0);
12491
12492 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12493 over COND_EXPR in cases such as floating point comparisons. */
12494 if (integer_zerop (op1)
12495 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
12496 : (integer_onep (op2)
12497 && !VECTOR_TYPE_P (type)))
12498 && truth_value_p (TREE_CODE (arg0)))
12499 return pedantic_non_lvalue_loc (loc,
12500 fold_convert_loc (loc, type,
12501 invert_truthvalue_loc (loc,
12502 arg0)));
12503
12504 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12505 if (TREE_CODE (arg0) == LT_EXPR
12506 && integer_zerop (TREE_OPERAND (arg0, 1))
12507 && integer_zerop (op2)
12508 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12509 {
12510 /* sign_bit_p looks through both zero and sign extensions,
12511 but for this optimization only sign extensions are
12512 usable. */
12513 tree tem2 = TREE_OPERAND (arg0, 0);
12514 while (tem != tem2)
12515 {
12516 if (TREE_CODE (tem2) != NOP_EXPR
12517 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12518 {
12519 tem = NULL_TREE;
12520 break;
12521 }
12522 tem2 = TREE_OPERAND (tem2, 0);
12523 }
12524 /* sign_bit_p only checks ARG1 bits within A's precision.
12525 If <sign bit of A> has wider type than A, bits outside
12526 of A's precision in <sign bit of A> need to be checked.
12527 If they are all 0, this optimization needs to be done
12528 in unsigned A's type, if they are all 1 in signed A's type,
12529 otherwise this can't be done. */
12530 if (tem
12531 && TYPE_PRECISION (TREE_TYPE (tem))
12532 < TYPE_PRECISION (TREE_TYPE (arg1))
12533 && TYPE_PRECISION (TREE_TYPE (tem))
12534 < TYPE_PRECISION (type))
12535 {
12536 int inner_width, outer_width;
12537 tree tem_type;
12538
12539 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12540 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12541 if (outer_width > TYPE_PRECISION (type))
12542 outer_width = TYPE_PRECISION (type);
12543
12544 wide_int mask = wi::shifted_mask
12545 (inner_width, outer_width - inner_width, false,
12546 TYPE_PRECISION (TREE_TYPE (arg1)));
12547
12548 wide_int common = mask & arg1;
12549 if (common == mask)
12550 {
12551 tem_type = signed_type_for (TREE_TYPE (tem));
12552 tem = fold_convert_loc (loc, tem_type, tem);
12553 }
12554 else if (common == 0)
12555 {
12556 tem_type = unsigned_type_for (TREE_TYPE (tem));
12557 tem = fold_convert_loc (loc, tem_type, tem);
12558 }
12559 else
12560 tem = NULL;
12561 }
12562
12563 if (tem)
12564 return
12565 fold_convert_loc (loc, type,
12566 fold_build2_loc (loc, BIT_AND_EXPR,
12567 TREE_TYPE (tem), tem,
12568 fold_convert_loc (loc,
12569 TREE_TYPE (tem),
12570 arg1)));
12571 }
12572
12573 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12574 already handled above. */
12575 if (TREE_CODE (arg0) == BIT_AND_EXPR
12576 && integer_onep (TREE_OPERAND (arg0, 1))
12577 && integer_zerop (op2)
12578 && integer_pow2p (arg1))
12579 {
12580 tree tem = TREE_OPERAND (arg0, 0);
12581 STRIP_NOPS (tem);
12582 if (TREE_CODE (tem) == RSHIFT_EXPR
12583 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12584 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12585 tree_to_uhwi (TREE_OPERAND (tem, 1)))
12586 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12587 TREE_OPERAND (tem, 0), arg1);
12588 }
12589
12590 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12591 is probably obsolete because the first operand should be a
12592 truth value (that's why we have the two cases above), but let's
12593 leave it in until we can confirm this for all front-ends. */
12594 if (integer_zerop (op2)
12595 && TREE_CODE (arg0) == NE_EXPR
12596 && integer_zerop (TREE_OPERAND (arg0, 1))
12597 && integer_pow2p (arg1)
12598 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12599 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12600 arg1, OEP_ONLY_CONST))
12601 return pedantic_non_lvalue_loc (loc,
12602 fold_convert_loc (loc, type,
12603 TREE_OPERAND (arg0, 0)));
12604
12605 /* Disable the transformations below for vectors, since
12606 fold_binary_op_with_conditional_arg may undo them immediately,
12607 yielding an infinite loop. */
12608 if (code == VEC_COND_EXPR)
12609 return NULL_TREE;
12610
12611 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12612 if (integer_zerop (op2)
12613 && truth_value_p (TREE_CODE (arg0))
12614 && truth_value_p (TREE_CODE (arg1))
12615 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12616 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12617 : TRUTH_ANDIF_EXPR,
12618 type, fold_convert_loc (loc, type, arg0), arg1);
12619
12620 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12621 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12622 && truth_value_p (TREE_CODE (arg0))
12623 && truth_value_p (TREE_CODE (arg1))
12624 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12625 {
12626 location_t loc0 = expr_location_or (arg0, loc);
12627 /* Only perform transformation if ARG0 is easily inverted. */
12628 tem = fold_invert_truthvalue (loc0, arg0);
12629 if (tem)
12630 return fold_build2_loc (loc, code == VEC_COND_EXPR
12631 ? BIT_IOR_EXPR
12632 : TRUTH_ORIF_EXPR,
12633 type, fold_convert_loc (loc, type, tem),
12634 arg1);
12635 }
12636
12637 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12638 if (integer_zerop (arg1)
12639 && truth_value_p (TREE_CODE (arg0))
12640 && truth_value_p (TREE_CODE (op2))
12641 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12642 {
12643 location_t loc0 = expr_location_or (arg0, loc);
12644 /* Only perform transformation if ARG0 is easily inverted. */
12645 tem = fold_invert_truthvalue (loc0, arg0);
12646 if (tem)
12647 return fold_build2_loc (loc, code == VEC_COND_EXPR
12648 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12649 type, fold_convert_loc (loc, type, tem),
12650 op2);
12651 }
12652
12653 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12654 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12655 && truth_value_p (TREE_CODE (arg0))
12656 && truth_value_p (TREE_CODE (op2))
12657 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12658 return fold_build2_loc (loc, code == VEC_COND_EXPR
12659 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12660 type, fold_convert_loc (loc, type, arg0), op2);
12661
12662 return NULL_TREE;
12663
12664 case CALL_EXPR:
12665 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12666 of fold_ternary on them. */
12667 gcc_unreachable ();
12668
12669 case BIT_FIELD_REF:
12670 if ((TREE_CODE (arg0) == VECTOR_CST
12671 || (TREE_CODE (arg0) == CONSTRUCTOR
12672 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
12673 && (type == TREE_TYPE (TREE_TYPE (arg0))
12674 || (TREE_CODE (type) == VECTOR_TYPE
12675 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
12676 {
12677 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12678 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12679 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12680 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12681
12682 if (n != 0
12683 && (idx % width) == 0
12684 && (n % width) == 0
12685 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12686 {
12687 idx = idx / width;
12688 n = n / width;
12689
12690 if (TREE_CODE (arg0) == VECTOR_CST)
12691 {
12692 if (n == 1)
12693 return VECTOR_CST_ELT (arg0, idx);
12694
12695 tree *vals = XALLOCAVEC (tree, n);
12696 for (unsigned i = 0; i < n; ++i)
12697 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
12698 return build_vector (type, vals);
12699 }
12700
12701 /* Constructor elements can be subvectors. */
12702 unsigned HOST_WIDE_INT k = 1;
12703 if (CONSTRUCTOR_NELTS (arg0) != 0)
12704 {
12705 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
12706 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
12707 k = TYPE_VECTOR_SUBPARTS (cons_elem);
12708 }
12709
12710 /* We keep an exact subset of the constructor elements. */
12711 if ((idx % k) == 0 && (n % k) == 0)
12712 {
12713 if (CONSTRUCTOR_NELTS (arg0) == 0)
12714 return build_constructor (type, NULL);
12715 idx /= k;
12716 n /= k;
12717 if (n == 1)
12718 {
12719 if (idx < CONSTRUCTOR_NELTS (arg0))
12720 return CONSTRUCTOR_ELT (arg0, idx)->value;
12721 return build_zero_cst (type);
12722 }
12723
12724 vec<constructor_elt, va_gc> *vals;
12725 vec_alloc (vals, n);
12726 for (unsigned i = 0;
12727 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
12728 ++i)
12729 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
12730 CONSTRUCTOR_ELT
12731 (arg0, idx + i)->value);
12732 return build_constructor (type, vals);
12733 }
12734 /* The bitfield references a single constructor element. */
12735 else if (idx + n <= (idx / k + 1) * k)
12736 {
12737 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
12738 return build_zero_cst (type);
12739 else if (n == k)
12740 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
12741 else
12742 return fold_build3_loc (loc, code, type,
12743 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
12744 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
12745 }
12746 }
12747 }
12748
12749 /* A bit-field-ref that referenced the full argument can be stripped. */
12750 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12751 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
12752 && integer_zerop (op2))
12753 return fold_convert_loc (loc, type, arg0);
12754
12755 /* On constants we can use native encode/interpret to constant
12756 fold (nearly) all BIT_FIELD_REFs. */
12757 if (CONSTANT_CLASS_P (arg0)
12758 && can_native_interpret_type_p (type)
12759 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
12760 /* This limitation should not be necessary, we just need to
12761 round this up to mode size. */
12762 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
12763 /* Need bit-shifting of the buffer to relax the following. */
12764 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
12765 {
12766 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12767 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12768 unsigned HOST_WIDE_INT clen;
12769 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
12770 /* ??? We cannot tell native_encode_expr to start at
12771 some random byte only. So limit us to a reasonable amount
12772 of work. */
12773 if (clen <= 4096)
12774 {
12775 unsigned char *b = XALLOCAVEC (unsigned char, clen);
12776 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
12777 if (len > 0
12778 && len * BITS_PER_UNIT >= bitpos + bitsize)
12779 {
12780 tree v = native_interpret_expr (type,
12781 b + bitpos / BITS_PER_UNIT,
12782 bitsize / BITS_PER_UNIT);
12783 if (v)
12784 return v;
12785 }
12786 }
12787 }
12788
12789 return NULL_TREE;
12790
12791 case FMA_EXPR:
12792 /* For integers we can decompose the FMA if possible. */
12793 if (TREE_CODE (arg0) == INTEGER_CST
12794 && TREE_CODE (arg1) == INTEGER_CST)
12795 return fold_build2_loc (loc, PLUS_EXPR, type,
12796 const_binop (MULT_EXPR, arg0, arg1), arg2);
12797 if (integer_zerop (arg2))
12798 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12799
12800 return fold_fma (loc, type, arg0, arg1, arg2);
12801
12802 case VEC_PERM_EXPR:
12803 if (TREE_CODE (arg2) == VECTOR_CST)
12804 {
12805 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
12806 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
12807 unsigned char *sel2 = sel + nelts;
12808 bool need_mask_canon = false;
12809 bool need_mask_canon2 = false;
12810 bool all_in_vec0 = true;
12811 bool all_in_vec1 = true;
12812 bool maybe_identity = true;
12813 bool single_arg = (op0 == op1);
12814 bool changed = false;
12815
12816 mask2 = 2 * nelts - 1;
12817 mask = single_arg ? (nelts - 1) : mask2;
12818 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
12819 for (i = 0; i < nelts; i++)
12820 {
12821 tree val = VECTOR_CST_ELT (arg2, i);
12822 if (TREE_CODE (val) != INTEGER_CST)
12823 return NULL_TREE;
12824
12825 /* Make sure that the perm value is in an acceptable
12826 range. */
12827 wide_int t = val;
12828 need_mask_canon |= wi::gtu_p (t, mask);
12829 need_mask_canon2 |= wi::gtu_p (t, mask2);
12830 sel[i] = t.to_uhwi () & mask;
12831 sel2[i] = t.to_uhwi () & mask2;
12832
12833 if (sel[i] < nelts)
12834 all_in_vec1 = false;
12835 else
12836 all_in_vec0 = false;
12837
12838 if ((sel[i] & (nelts-1)) != i)
12839 maybe_identity = false;
12840 }
12841
12842 if (maybe_identity)
12843 {
12844 if (all_in_vec0)
12845 return op0;
12846 if (all_in_vec1)
12847 return op1;
12848 }
12849
12850 if (all_in_vec0)
12851 op1 = op0;
12852 else if (all_in_vec1)
12853 {
12854 op0 = op1;
12855 for (i = 0; i < nelts; i++)
12856 sel[i] -= nelts;
12857 need_mask_canon = true;
12858 }
12859
12860 if ((TREE_CODE (op0) == VECTOR_CST
12861 || TREE_CODE (op0) == CONSTRUCTOR)
12862 && (TREE_CODE (op1) == VECTOR_CST
12863 || TREE_CODE (op1) == CONSTRUCTOR))
12864 {
12865 tree t = fold_vec_perm (type, op0, op1, sel);
12866 if (t != NULL_TREE)
12867 return t;
12868 }
12869
12870 if (op0 == op1 && !single_arg)
12871 changed = true;
12872
12873 /* Some targets are deficient and fail to expand a single
12874 argument permutation while still allowing an equivalent
12875 2-argument version. */
12876 if (need_mask_canon && arg2 == op2
12877 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
12878 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
12879 {
12880 need_mask_canon = need_mask_canon2;
12881 sel = sel2;
12882 }
12883
12884 if (need_mask_canon && arg2 == op2)
12885 {
12886 tree *tsel = XALLOCAVEC (tree, nelts);
12887 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
12888 for (i = 0; i < nelts; i++)
12889 tsel[i] = build_int_cst (eltype, sel[i]);
12890 op2 = build_vector (TREE_TYPE (arg2), tsel);
12891 changed = true;
12892 }
12893
12894 if (changed)
12895 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
12896 }
12897 return NULL_TREE;
12898
12899 default:
12900 return NULL_TREE;
12901 } /* switch (code) */
12902 }
12903
12904 /* Perform constant folding and related simplification of EXPR.
12905 The related simplifications include x*1 => x, x*0 => 0, etc.,
12906 and application of the associative law.
12907 NOP_EXPR conversions may be removed freely (as long as we
12908 are careful not to change the type of the overall expression).
12909 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12910 but we can constant-fold them if they have constant operands. */
12911
12912 #ifdef ENABLE_FOLD_CHECKING
12913 # define fold(x) fold_1 (x)
12914 static tree fold_1 (tree);
12915 static
12916 #endif
12917 tree
12918 fold (tree expr)
12919 {
12920 const tree t = expr;
12921 enum tree_code code = TREE_CODE (t);
12922 enum tree_code_class kind = TREE_CODE_CLASS (code);
12923 tree tem;
12924 location_t loc = EXPR_LOCATION (expr);
12925
12926 /* Return right away if a constant. */
12927 if (kind == tcc_constant)
12928 return t;
12929
12930 /* CALL_EXPR-like objects with variable numbers of operands are
12931 treated specially. */
12932 if (kind == tcc_vl_exp)
12933 {
12934 if (code == CALL_EXPR)
12935 {
12936 tem = fold_call_expr (loc, expr, false);
12937 return tem ? tem : expr;
12938 }
12939 return expr;
12940 }
12941
12942 if (IS_EXPR_CODE_CLASS (kind))
12943 {
12944 tree type = TREE_TYPE (t);
12945 tree op0, op1, op2;
12946
12947 switch (TREE_CODE_LENGTH (code))
12948 {
12949 case 1:
12950 op0 = TREE_OPERAND (t, 0);
12951 tem = fold_unary_loc (loc, code, type, op0);
12952 return tem ? tem : expr;
12953 case 2:
12954 op0 = TREE_OPERAND (t, 0);
12955 op1 = TREE_OPERAND (t, 1);
12956 tem = fold_binary_loc (loc, code, type, op0, op1);
12957 return tem ? tem : expr;
12958 case 3:
12959 op0 = TREE_OPERAND (t, 0);
12960 op1 = TREE_OPERAND (t, 1);
12961 op2 = TREE_OPERAND (t, 2);
12962 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12963 return tem ? tem : expr;
12964 default:
12965 break;
12966 }
12967 }
12968
12969 switch (code)
12970 {
12971 case ARRAY_REF:
12972 {
12973 tree op0 = TREE_OPERAND (t, 0);
12974 tree op1 = TREE_OPERAND (t, 1);
12975
12976 if (TREE_CODE (op1) == INTEGER_CST
12977 && TREE_CODE (op0) == CONSTRUCTOR
12978 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12979 {
12980 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
12981 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
12982 unsigned HOST_WIDE_INT begin = 0;
12983
12984 /* Find a matching index by means of a binary search. */
12985 while (begin != end)
12986 {
12987 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
12988 tree index = (*elts)[middle].index;
12989
12990 if (TREE_CODE (index) == INTEGER_CST
12991 && tree_int_cst_lt (index, op1))
12992 begin = middle + 1;
12993 else if (TREE_CODE (index) == INTEGER_CST
12994 && tree_int_cst_lt (op1, index))
12995 end = middle;
12996 else if (TREE_CODE (index) == RANGE_EXPR
12997 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
12998 begin = middle + 1;
12999 else if (TREE_CODE (index) == RANGE_EXPR
13000 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13001 end = middle;
13002 else
13003 return (*elts)[middle].value;
13004 }
13005 }
13006
13007 return t;
13008 }
13009
13010 /* Return a VECTOR_CST if possible. */
13011 case CONSTRUCTOR:
13012 {
13013 tree type = TREE_TYPE (t);
13014 if (TREE_CODE (type) != VECTOR_TYPE)
13015 return t;
13016
13017 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13018 unsigned HOST_WIDE_INT idx, pos = 0;
13019 tree value;
13020
13021 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13022 {
13023 if (!CONSTANT_CLASS_P (value))
13024 return t;
13025 if (TREE_CODE (value) == VECTOR_CST)
13026 {
13027 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13028 vec[pos++] = VECTOR_CST_ELT (value, i);
13029 }
13030 else
13031 vec[pos++] = value;
13032 }
13033 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13034 vec[pos] = build_zero_cst (TREE_TYPE (type));
13035
13036 return build_vector (type, vec);
13037 }
13038
13039 case CONST_DECL:
13040 return fold (DECL_INITIAL (t));
13041
13042 default:
13043 return t;
13044 } /* switch (code) */
13045 }
13046
13047 #ifdef ENABLE_FOLD_CHECKING
13048 #undef fold
13049
13050 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13051 hash_table<nofree_ptr_hash<const tree_node> > *);
13052 static void fold_check_failed (const_tree, const_tree);
13053 void print_fold_checksum (const_tree);
13054
13055 /* When --enable-checking=fold, compute a digest of expr before
13056 and after actual fold call to see if fold did not accidentally
13057 change original expr. */
13058
13059 tree
13060 fold (tree expr)
13061 {
13062 tree ret;
13063 struct md5_ctx ctx;
13064 unsigned char checksum_before[16], checksum_after[16];
13065 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13066
13067 md5_init_ctx (&ctx);
13068 fold_checksum_tree (expr, &ctx, &ht);
13069 md5_finish_ctx (&ctx, checksum_before);
13070 ht.empty ();
13071
13072 ret = fold_1 (expr);
13073
13074 md5_init_ctx (&ctx);
13075 fold_checksum_tree (expr, &ctx, &ht);
13076 md5_finish_ctx (&ctx, checksum_after);
13077
13078 if (memcmp (checksum_before, checksum_after, 16))
13079 fold_check_failed (expr, ret);
13080
13081 return ret;
13082 }
13083
13084 void
13085 print_fold_checksum (const_tree expr)
13086 {
13087 struct md5_ctx ctx;
13088 unsigned char checksum[16], cnt;
13089 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13090
13091 md5_init_ctx (&ctx);
13092 fold_checksum_tree (expr, &ctx, &ht);
13093 md5_finish_ctx (&ctx, checksum);
13094 for (cnt = 0; cnt < 16; ++cnt)
13095 fprintf (stderr, "%02x", checksum[cnt]);
13096 putc ('\n', stderr);
13097 }
13098
13099 static void
13100 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13101 {
13102 internal_error ("fold check: original tree changed by fold");
13103 }
13104
13105 static void
13106 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13107 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13108 {
13109 const tree_node **slot;
13110 enum tree_code code;
13111 union tree_node buf;
13112 int i, len;
13113
13114 recursive_label:
13115 if (expr == NULL)
13116 return;
13117 slot = ht->find_slot (expr, INSERT);
13118 if (*slot != NULL)
13119 return;
13120 *slot = expr;
13121 code = TREE_CODE (expr);
13122 if (TREE_CODE_CLASS (code) == tcc_declaration
13123 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13124 {
13125 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13126 memcpy ((char *) &buf, expr, tree_size (expr));
13127 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13128 buf.decl_with_vis.symtab_node = NULL;
13129 expr = (tree) &buf;
13130 }
13131 else if (TREE_CODE_CLASS (code) == tcc_type
13132 && (TYPE_POINTER_TO (expr)
13133 || TYPE_REFERENCE_TO (expr)
13134 || TYPE_CACHED_VALUES_P (expr)
13135 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13136 || TYPE_NEXT_VARIANT (expr)))
13137 {
13138 /* Allow these fields to be modified. */
13139 tree tmp;
13140 memcpy ((char *) &buf, expr, tree_size (expr));
13141 expr = tmp = (tree) &buf;
13142 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13143 TYPE_POINTER_TO (tmp) = NULL;
13144 TYPE_REFERENCE_TO (tmp) = NULL;
13145 TYPE_NEXT_VARIANT (tmp) = NULL;
13146 if (TYPE_CACHED_VALUES_P (tmp))
13147 {
13148 TYPE_CACHED_VALUES_P (tmp) = 0;
13149 TYPE_CACHED_VALUES (tmp) = NULL;
13150 }
13151 }
13152 md5_process_bytes (expr, tree_size (expr), ctx);
13153 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13154 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13155 if (TREE_CODE_CLASS (code) != tcc_type
13156 && TREE_CODE_CLASS (code) != tcc_declaration
13157 && code != TREE_LIST
13158 && code != SSA_NAME
13159 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13160 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13161 switch (TREE_CODE_CLASS (code))
13162 {
13163 case tcc_constant:
13164 switch (code)
13165 {
13166 case STRING_CST:
13167 md5_process_bytes (TREE_STRING_POINTER (expr),
13168 TREE_STRING_LENGTH (expr), ctx);
13169 break;
13170 case COMPLEX_CST:
13171 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13172 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13173 break;
13174 case VECTOR_CST:
13175 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
13176 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
13177 break;
13178 default:
13179 break;
13180 }
13181 break;
13182 case tcc_exceptional:
13183 switch (code)
13184 {
13185 case TREE_LIST:
13186 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13187 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13188 expr = TREE_CHAIN (expr);
13189 goto recursive_label;
13190 break;
13191 case TREE_VEC:
13192 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13193 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13194 break;
13195 default:
13196 break;
13197 }
13198 break;
13199 case tcc_expression:
13200 case tcc_reference:
13201 case tcc_comparison:
13202 case tcc_unary:
13203 case tcc_binary:
13204 case tcc_statement:
13205 case tcc_vl_exp:
13206 len = TREE_OPERAND_LENGTH (expr);
13207 for (i = 0; i < len; ++i)
13208 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13209 break;
13210 case tcc_declaration:
13211 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13212 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13213 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13214 {
13215 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13216 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13217 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13218 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13219 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13220 }
13221
13222 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13223 {
13224 if (TREE_CODE (expr) == FUNCTION_DECL)
13225 {
13226 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13227 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13228 }
13229 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13230 }
13231 break;
13232 case tcc_type:
13233 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13234 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13235 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13236 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13237 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13238 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13239 if (INTEGRAL_TYPE_P (expr)
13240 || SCALAR_FLOAT_TYPE_P (expr))
13241 {
13242 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13243 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13244 }
13245 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13246 if (TREE_CODE (expr) == RECORD_TYPE
13247 || TREE_CODE (expr) == UNION_TYPE
13248 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13249 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13250 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13251 break;
13252 default:
13253 break;
13254 }
13255 }
13256
13257 /* Helper function for outputting the checksum of a tree T. When
13258 debugging with gdb, you can "define mynext" to be "next" followed
13259 by "call debug_fold_checksum (op0)", then just trace down till the
13260 outputs differ. */
13261
13262 DEBUG_FUNCTION void
13263 debug_fold_checksum (const_tree t)
13264 {
13265 int i;
13266 unsigned char checksum[16];
13267 struct md5_ctx ctx;
13268 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13269
13270 md5_init_ctx (&ctx);
13271 fold_checksum_tree (t, &ctx, &ht);
13272 md5_finish_ctx (&ctx, checksum);
13273 ht.empty ();
13274
13275 for (i = 0; i < 16; i++)
13276 fprintf (stderr, "%d ", checksum[i]);
13277
13278 fprintf (stderr, "\n");
13279 }
13280
13281 #endif
13282
13283 /* Fold a unary tree expression with code CODE of type TYPE with an
13284 operand OP0. LOC is the location of the resulting expression.
13285 Return a folded expression if successful. Otherwise, return a tree
13286 expression with code CODE of type TYPE with an operand OP0. */
13287
13288 tree
13289 fold_build1_stat_loc (location_t loc,
13290 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13291 {
13292 tree tem;
13293 #ifdef ENABLE_FOLD_CHECKING
13294 unsigned char checksum_before[16], checksum_after[16];
13295 struct md5_ctx ctx;
13296 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13297
13298 md5_init_ctx (&ctx);
13299 fold_checksum_tree (op0, &ctx, &ht);
13300 md5_finish_ctx (&ctx, checksum_before);
13301 ht.empty ();
13302 #endif
13303
13304 tem = fold_unary_loc (loc, code, type, op0);
13305 if (!tem)
13306 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13307
13308 #ifdef ENABLE_FOLD_CHECKING
13309 md5_init_ctx (&ctx);
13310 fold_checksum_tree (op0, &ctx, &ht);
13311 md5_finish_ctx (&ctx, checksum_after);
13312
13313 if (memcmp (checksum_before, checksum_after, 16))
13314 fold_check_failed (op0, tem);
13315 #endif
13316 return tem;
13317 }
13318
13319 /* Fold a binary tree expression with code CODE of type TYPE with
13320 operands OP0 and OP1. LOC is the location of the resulting
13321 expression. Return a folded expression if successful. Otherwise,
13322 return a tree expression with code CODE of type TYPE with operands
13323 OP0 and OP1. */
13324
13325 tree
13326 fold_build2_stat_loc (location_t loc,
13327 enum tree_code code, tree type, tree op0, tree op1
13328 MEM_STAT_DECL)
13329 {
13330 tree tem;
13331 #ifdef ENABLE_FOLD_CHECKING
13332 unsigned char checksum_before_op0[16],
13333 checksum_before_op1[16],
13334 checksum_after_op0[16],
13335 checksum_after_op1[16];
13336 struct md5_ctx ctx;
13337 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13338
13339 md5_init_ctx (&ctx);
13340 fold_checksum_tree (op0, &ctx, &ht);
13341 md5_finish_ctx (&ctx, checksum_before_op0);
13342 ht.empty ();
13343
13344 md5_init_ctx (&ctx);
13345 fold_checksum_tree (op1, &ctx, &ht);
13346 md5_finish_ctx (&ctx, checksum_before_op1);
13347 ht.empty ();
13348 #endif
13349
13350 tem = fold_binary_loc (loc, code, type, op0, op1);
13351 if (!tem)
13352 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13353
13354 #ifdef ENABLE_FOLD_CHECKING
13355 md5_init_ctx (&ctx);
13356 fold_checksum_tree (op0, &ctx, &ht);
13357 md5_finish_ctx (&ctx, checksum_after_op0);
13358 ht.empty ();
13359
13360 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13361 fold_check_failed (op0, tem);
13362
13363 md5_init_ctx (&ctx);
13364 fold_checksum_tree (op1, &ctx, &ht);
13365 md5_finish_ctx (&ctx, checksum_after_op1);
13366
13367 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13368 fold_check_failed (op1, tem);
13369 #endif
13370 return tem;
13371 }
13372
13373 /* Fold a ternary tree expression with code CODE of type TYPE with
13374 operands OP0, OP1, and OP2. Return a folded expression if
13375 successful. Otherwise, return a tree expression with code CODE of
13376 type TYPE with operands OP0, OP1, and OP2. */
13377
13378 tree
13379 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13380 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13381 {
13382 tree tem;
13383 #ifdef ENABLE_FOLD_CHECKING
13384 unsigned char checksum_before_op0[16],
13385 checksum_before_op1[16],
13386 checksum_before_op2[16],
13387 checksum_after_op0[16],
13388 checksum_after_op1[16],
13389 checksum_after_op2[16];
13390 struct md5_ctx ctx;
13391 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13392
13393 md5_init_ctx (&ctx);
13394 fold_checksum_tree (op0, &ctx, &ht);
13395 md5_finish_ctx (&ctx, checksum_before_op0);
13396 ht.empty ();
13397
13398 md5_init_ctx (&ctx);
13399 fold_checksum_tree (op1, &ctx, &ht);
13400 md5_finish_ctx (&ctx, checksum_before_op1);
13401 ht.empty ();
13402
13403 md5_init_ctx (&ctx);
13404 fold_checksum_tree (op2, &ctx, &ht);
13405 md5_finish_ctx (&ctx, checksum_before_op2);
13406 ht.empty ();
13407 #endif
13408
13409 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13410 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13411 if (!tem)
13412 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13413
13414 #ifdef ENABLE_FOLD_CHECKING
13415 md5_init_ctx (&ctx);
13416 fold_checksum_tree (op0, &ctx, &ht);
13417 md5_finish_ctx (&ctx, checksum_after_op0);
13418 ht.empty ();
13419
13420 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13421 fold_check_failed (op0, tem);
13422
13423 md5_init_ctx (&ctx);
13424 fold_checksum_tree (op1, &ctx, &ht);
13425 md5_finish_ctx (&ctx, checksum_after_op1);
13426 ht.empty ();
13427
13428 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13429 fold_check_failed (op1, tem);
13430
13431 md5_init_ctx (&ctx);
13432 fold_checksum_tree (op2, &ctx, &ht);
13433 md5_finish_ctx (&ctx, checksum_after_op2);
13434
13435 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13436 fold_check_failed (op2, tem);
13437 #endif
13438 return tem;
13439 }
13440
13441 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13442 arguments in ARGARRAY, and a null static chain.
13443 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13444 of type TYPE from the given operands as constructed by build_call_array. */
13445
13446 tree
13447 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13448 int nargs, tree *argarray)
13449 {
13450 tree tem;
13451 #ifdef ENABLE_FOLD_CHECKING
13452 unsigned char checksum_before_fn[16],
13453 checksum_before_arglist[16],
13454 checksum_after_fn[16],
13455 checksum_after_arglist[16];
13456 struct md5_ctx ctx;
13457 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13458 int i;
13459
13460 md5_init_ctx (&ctx);
13461 fold_checksum_tree (fn, &ctx, &ht);
13462 md5_finish_ctx (&ctx, checksum_before_fn);
13463 ht.empty ();
13464
13465 md5_init_ctx (&ctx);
13466 for (i = 0; i < nargs; i++)
13467 fold_checksum_tree (argarray[i], &ctx, &ht);
13468 md5_finish_ctx (&ctx, checksum_before_arglist);
13469 ht.empty ();
13470 #endif
13471
13472 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13473 if (!tem)
13474 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13475
13476 #ifdef ENABLE_FOLD_CHECKING
13477 md5_init_ctx (&ctx);
13478 fold_checksum_tree (fn, &ctx, &ht);
13479 md5_finish_ctx (&ctx, checksum_after_fn);
13480 ht.empty ();
13481
13482 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13483 fold_check_failed (fn, tem);
13484
13485 md5_init_ctx (&ctx);
13486 for (i = 0; i < nargs; i++)
13487 fold_checksum_tree (argarray[i], &ctx, &ht);
13488 md5_finish_ctx (&ctx, checksum_after_arglist);
13489
13490 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13491 fold_check_failed (NULL_TREE, tem);
13492 #endif
13493 return tem;
13494 }
13495
13496 /* Perform constant folding and related simplification of initializer
13497 expression EXPR. These behave identically to "fold_buildN" but ignore
13498 potential run-time traps and exceptions that fold must preserve. */
13499
13500 #define START_FOLD_INIT \
13501 int saved_signaling_nans = flag_signaling_nans;\
13502 int saved_trapping_math = flag_trapping_math;\
13503 int saved_rounding_math = flag_rounding_math;\
13504 int saved_trapv = flag_trapv;\
13505 int saved_folding_initializer = folding_initializer;\
13506 flag_signaling_nans = 0;\
13507 flag_trapping_math = 0;\
13508 flag_rounding_math = 0;\
13509 flag_trapv = 0;\
13510 folding_initializer = 1;
13511
13512 #define END_FOLD_INIT \
13513 flag_signaling_nans = saved_signaling_nans;\
13514 flag_trapping_math = saved_trapping_math;\
13515 flag_rounding_math = saved_rounding_math;\
13516 flag_trapv = saved_trapv;\
13517 folding_initializer = saved_folding_initializer;
13518
13519 tree
13520 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13521 tree type, tree op)
13522 {
13523 tree result;
13524 START_FOLD_INIT;
13525
13526 result = fold_build1_loc (loc, code, type, op);
13527
13528 END_FOLD_INIT;
13529 return result;
13530 }
13531
13532 tree
13533 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13534 tree type, tree op0, tree op1)
13535 {
13536 tree result;
13537 START_FOLD_INIT;
13538
13539 result = fold_build2_loc (loc, code, type, op0, op1);
13540
13541 END_FOLD_INIT;
13542 return result;
13543 }
13544
13545 tree
13546 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13547 int nargs, tree *argarray)
13548 {
13549 tree result;
13550 START_FOLD_INIT;
13551
13552 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13553
13554 END_FOLD_INIT;
13555 return result;
13556 }
13557
13558 #undef START_FOLD_INIT
13559 #undef END_FOLD_INIT
13560
13561 /* Determine if first argument is a multiple of second argument. Return 0 if
13562 it is not, or we cannot easily determined it to be.
13563
13564 An example of the sort of thing we care about (at this point; this routine
13565 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13566 fold cases do now) is discovering that
13567
13568 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13569
13570 is a multiple of
13571
13572 SAVE_EXPR (J * 8)
13573
13574 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13575
13576 This code also handles discovering that
13577
13578 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13579
13580 is a multiple of 8 so we don't have to worry about dealing with a
13581 possible remainder.
13582
13583 Note that we *look* inside a SAVE_EXPR only to determine how it was
13584 calculated; it is not safe for fold to do much of anything else with the
13585 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13586 at run time. For example, the latter example above *cannot* be implemented
13587 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13588 evaluation time of the original SAVE_EXPR is not necessarily the same at
13589 the time the new expression is evaluated. The only optimization of this
13590 sort that would be valid is changing
13591
13592 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13593
13594 divided by 8 to
13595
13596 SAVE_EXPR (I) * SAVE_EXPR (J)
13597
13598 (where the same SAVE_EXPR (J) is used in the original and the
13599 transformed version). */
13600
13601 int
13602 multiple_of_p (tree type, const_tree top, const_tree bottom)
13603 {
13604 if (operand_equal_p (top, bottom, 0))
13605 return 1;
13606
13607 if (TREE_CODE (type) != INTEGER_TYPE)
13608 return 0;
13609
13610 switch (TREE_CODE (top))
13611 {
13612 case BIT_AND_EXPR:
13613 /* Bitwise and provides a power of two multiple. If the mask is
13614 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13615 if (!integer_pow2p (bottom))
13616 return 0;
13617 /* FALLTHRU */
13618
13619 case MULT_EXPR:
13620 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13621 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13622
13623 case PLUS_EXPR:
13624 case MINUS_EXPR:
13625 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13626 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13627
13628 case LSHIFT_EXPR:
13629 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13630 {
13631 tree op1, t1;
13632
13633 op1 = TREE_OPERAND (top, 1);
13634 /* const_binop may not detect overflow correctly,
13635 so check for it explicitly here. */
13636 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
13637 && 0 != (t1 = fold_convert (type,
13638 const_binop (LSHIFT_EXPR,
13639 size_one_node,
13640 op1)))
13641 && !TREE_OVERFLOW (t1))
13642 return multiple_of_p (type, t1, bottom);
13643 }
13644 return 0;
13645
13646 case NOP_EXPR:
13647 /* Can't handle conversions from non-integral or wider integral type. */
13648 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13649 || (TYPE_PRECISION (type)
13650 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13651 return 0;
13652
13653 /* .. fall through ... */
13654
13655 case SAVE_EXPR:
13656 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13657
13658 case COND_EXPR:
13659 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13660 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13661
13662 case INTEGER_CST:
13663 if (TREE_CODE (bottom) != INTEGER_CST
13664 || integer_zerop (bottom)
13665 || (TYPE_UNSIGNED (type)
13666 && (tree_int_cst_sgn (top) < 0
13667 || tree_int_cst_sgn (bottom) < 0)))
13668 return 0;
13669 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13670 SIGNED);
13671
13672 default:
13673 return 0;
13674 }
13675 }
13676
13677 /* Return true if CODE or TYPE is known to be non-negative. */
13678
13679 static bool
13680 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13681 {
13682 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13683 && truth_value_p (code))
13684 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13685 have a signed:1 type (where the value is -1 and 0). */
13686 return true;
13687 return false;
13688 }
13689
13690 /* Return true if (CODE OP0) is known to be non-negative. If the return
13691 value is based on the assumption that signed overflow is undefined,
13692 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13693 *STRICT_OVERFLOW_P. */
13694
13695 bool
13696 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13697 bool *strict_overflow_p)
13698 {
13699 if (TYPE_UNSIGNED (type))
13700 return true;
13701
13702 switch (code)
13703 {
13704 case ABS_EXPR:
13705 /* We can't return 1 if flag_wrapv is set because
13706 ABS_EXPR<INT_MIN> = INT_MIN. */
13707 if (!ANY_INTEGRAL_TYPE_P (type))
13708 return true;
13709 if (TYPE_OVERFLOW_UNDEFINED (type))
13710 {
13711 *strict_overflow_p = true;
13712 return true;
13713 }
13714 break;
13715
13716 case NON_LVALUE_EXPR:
13717 case FLOAT_EXPR:
13718 case FIX_TRUNC_EXPR:
13719 return tree_expr_nonnegative_warnv_p (op0,
13720 strict_overflow_p);
13721
13722 CASE_CONVERT:
13723 {
13724 tree inner_type = TREE_TYPE (op0);
13725 tree outer_type = type;
13726
13727 if (TREE_CODE (outer_type) == REAL_TYPE)
13728 {
13729 if (TREE_CODE (inner_type) == REAL_TYPE)
13730 return tree_expr_nonnegative_warnv_p (op0,
13731 strict_overflow_p);
13732 if (INTEGRAL_TYPE_P (inner_type))
13733 {
13734 if (TYPE_UNSIGNED (inner_type))
13735 return true;
13736 return tree_expr_nonnegative_warnv_p (op0,
13737 strict_overflow_p);
13738 }
13739 }
13740 else if (INTEGRAL_TYPE_P (outer_type))
13741 {
13742 if (TREE_CODE (inner_type) == REAL_TYPE)
13743 return tree_expr_nonnegative_warnv_p (op0,
13744 strict_overflow_p);
13745 if (INTEGRAL_TYPE_P (inner_type))
13746 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13747 && TYPE_UNSIGNED (inner_type);
13748 }
13749 }
13750 break;
13751
13752 default:
13753 return tree_simple_nonnegative_warnv_p (code, type);
13754 }
13755
13756 /* We don't know sign of `t', so be conservative and return false. */
13757 return false;
13758 }
13759
13760 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13761 value is based on the assumption that signed overflow is undefined,
13762 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13763 *STRICT_OVERFLOW_P. */
13764
13765 bool
13766 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13767 tree op1, bool *strict_overflow_p)
13768 {
13769 if (TYPE_UNSIGNED (type))
13770 return true;
13771
13772 switch (code)
13773 {
13774 case POINTER_PLUS_EXPR:
13775 case PLUS_EXPR:
13776 if (FLOAT_TYPE_P (type))
13777 return (tree_expr_nonnegative_warnv_p (op0,
13778 strict_overflow_p)
13779 && tree_expr_nonnegative_warnv_p (op1,
13780 strict_overflow_p));
13781
13782 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13783 both unsigned and at least 2 bits shorter than the result. */
13784 if (TREE_CODE (type) == INTEGER_TYPE
13785 && TREE_CODE (op0) == NOP_EXPR
13786 && TREE_CODE (op1) == NOP_EXPR)
13787 {
13788 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13789 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13790 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13791 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13792 {
13793 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13794 TYPE_PRECISION (inner2)) + 1;
13795 return prec < TYPE_PRECISION (type);
13796 }
13797 }
13798 break;
13799
13800 case MULT_EXPR:
13801 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13802 {
13803 /* x * x is always non-negative for floating point x
13804 or without overflow. */
13805 if (operand_equal_p (op0, op1, 0)
13806 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
13807 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
13808 {
13809 if (ANY_INTEGRAL_TYPE_P (type)
13810 && TYPE_OVERFLOW_UNDEFINED (type))
13811 *strict_overflow_p = true;
13812 return true;
13813 }
13814 }
13815
13816 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13817 both unsigned and their total bits is shorter than the result. */
13818 if (TREE_CODE (type) == INTEGER_TYPE
13819 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13820 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13821 {
13822 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13823 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13824 : TREE_TYPE (op0);
13825 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13826 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13827 : TREE_TYPE (op1);
13828
13829 bool unsigned0 = TYPE_UNSIGNED (inner0);
13830 bool unsigned1 = TYPE_UNSIGNED (inner1);
13831
13832 if (TREE_CODE (op0) == INTEGER_CST)
13833 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13834
13835 if (TREE_CODE (op1) == INTEGER_CST)
13836 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13837
13838 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13839 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13840 {
13841 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13842 ? tree_int_cst_min_precision (op0, UNSIGNED)
13843 : TYPE_PRECISION (inner0);
13844
13845 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13846 ? tree_int_cst_min_precision (op1, UNSIGNED)
13847 : TYPE_PRECISION (inner1);
13848
13849 return precision0 + precision1 < TYPE_PRECISION (type);
13850 }
13851 }
13852 return false;
13853
13854 case BIT_AND_EXPR:
13855 case MAX_EXPR:
13856 return (tree_expr_nonnegative_warnv_p (op0,
13857 strict_overflow_p)
13858 || tree_expr_nonnegative_warnv_p (op1,
13859 strict_overflow_p));
13860
13861 case BIT_IOR_EXPR:
13862 case BIT_XOR_EXPR:
13863 case MIN_EXPR:
13864 case RDIV_EXPR:
13865 case TRUNC_DIV_EXPR:
13866 case CEIL_DIV_EXPR:
13867 case FLOOR_DIV_EXPR:
13868 case ROUND_DIV_EXPR:
13869 return (tree_expr_nonnegative_warnv_p (op0,
13870 strict_overflow_p)
13871 && tree_expr_nonnegative_warnv_p (op1,
13872 strict_overflow_p));
13873
13874 case TRUNC_MOD_EXPR:
13875 case CEIL_MOD_EXPR:
13876 case FLOOR_MOD_EXPR:
13877 case ROUND_MOD_EXPR:
13878 return tree_expr_nonnegative_warnv_p (op0,
13879 strict_overflow_p);
13880 default:
13881 return tree_simple_nonnegative_warnv_p (code, type);
13882 }
13883
13884 /* We don't know sign of `t', so be conservative and return false. */
13885 return false;
13886 }
13887
13888 /* Return true if T is known to be non-negative. If the return
13889 value is based on the assumption that signed overflow is undefined,
13890 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13891 *STRICT_OVERFLOW_P. */
13892
13893 bool
13894 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13895 {
13896 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13897 return true;
13898
13899 switch (TREE_CODE (t))
13900 {
13901 case INTEGER_CST:
13902 return tree_int_cst_sgn (t) >= 0;
13903
13904 case REAL_CST:
13905 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13906
13907 case FIXED_CST:
13908 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13909
13910 case COND_EXPR:
13911 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13912 strict_overflow_p)
13913 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13914 strict_overflow_p));
13915 default:
13916 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
13917 TREE_TYPE (t));
13918 }
13919 /* We don't know sign of `t', so be conservative and return false. */
13920 return false;
13921 }
13922
13923 /* Return true if T is known to be non-negative. If the return
13924 value is based on the assumption that signed overflow is undefined,
13925 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13926 *STRICT_OVERFLOW_P. */
13927
13928 bool
13929 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
13930 tree arg0, tree arg1, bool *strict_overflow_p)
13931 {
13932 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13933 switch (DECL_FUNCTION_CODE (fndecl))
13934 {
13935 CASE_FLT_FN (BUILT_IN_ACOS):
13936 CASE_FLT_FN (BUILT_IN_ACOSH):
13937 CASE_FLT_FN (BUILT_IN_CABS):
13938 CASE_FLT_FN (BUILT_IN_COSH):
13939 CASE_FLT_FN (BUILT_IN_ERFC):
13940 CASE_FLT_FN (BUILT_IN_EXP):
13941 CASE_FLT_FN (BUILT_IN_EXP10):
13942 CASE_FLT_FN (BUILT_IN_EXP2):
13943 CASE_FLT_FN (BUILT_IN_FABS):
13944 CASE_FLT_FN (BUILT_IN_FDIM):
13945 CASE_FLT_FN (BUILT_IN_HYPOT):
13946 CASE_FLT_FN (BUILT_IN_POW10):
13947 CASE_INT_FN (BUILT_IN_FFS):
13948 CASE_INT_FN (BUILT_IN_PARITY):
13949 CASE_INT_FN (BUILT_IN_POPCOUNT):
13950 CASE_INT_FN (BUILT_IN_CLZ):
13951 CASE_INT_FN (BUILT_IN_CLRSB):
13952 case BUILT_IN_BSWAP32:
13953 case BUILT_IN_BSWAP64:
13954 /* Always true. */
13955 return true;
13956
13957 CASE_FLT_FN (BUILT_IN_SQRT):
13958 /* sqrt(-0.0) is -0.0. */
13959 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13960 return true;
13961 return tree_expr_nonnegative_warnv_p (arg0,
13962 strict_overflow_p);
13963
13964 CASE_FLT_FN (BUILT_IN_ASINH):
13965 CASE_FLT_FN (BUILT_IN_ATAN):
13966 CASE_FLT_FN (BUILT_IN_ATANH):
13967 CASE_FLT_FN (BUILT_IN_CBRT):
13968 CASE_FLT_FN (BUILT_IN_CEIL):
13969 CASE_FLT_FN (BUILT_IN_ERF):
13970 CASE_FLT_FN (BUILT_IN_EXPM1):
13971 CASE_FLT_FN (BUILT_IN_FLOOR):
13972 CASE_FLT_FN (BUILT_IN_FMOD):
13973 CASE_FLT_FN (BUILT_IN_FREXP):
13974 CASE_FLT_FN (BUILT_IN_ICEIL):
13975 CASE_FLT_FN (BUILT_IN_IFLOOR):
13976 CASE_FLT_FN (BUILT_IN_IRINT):
13977 CASE_FLT_FN (BUILT_IN_IROUND):
13978 CASE_FLT_FN (BUILT_IN_LCEIL):
13979 CASE_FLT_FN (BUILT_IN_LDEXP):
13980 CASE_FLT_FN (BUILT_IN_LFLOOR):
13981 CASE_FLT_FN (BUILT_IN_LLCEIL):
13982 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13983 CASE_FLT_FN (BUILT_IN_LLRINT):
13984 CASE_FLT_FN (BUILT_IN_LLROUND):
13985 CASE_FLT_FN (BUILT_IN_LRINT):
13986 CASE_FLT_FN (BUILT_IN_LROUND):
13987 CASE_FLT_FN (BUILT_IN_MODF):
13988 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13989 CASE_FLT_FN (BUILT_IN_RINT):
13990 CASE_FLT_FN (BUILT_IN_ROUND):
13991 CASE_FLT_FN (BUILT_IN_SCALB):
13992 CASE_FLT_FN (BUILT_IN_SCALBLN):
13993 CASE_FLT_FN (BUILT_IN_SCALBN):
13994 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13995 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13996 CASE_FLT_FN (BUILT_IN_SINH):
13997 CASE_FLT_FN (BUILT_IN_TANH):
13998 CASE_FLT_FN (BUILT_IN_TRUNC):
13999 /* True if the 1st argument is nonnegative. */
14000 return tree_expr_nonnegative_warnv_p (arg0,
14001 strict_overflow_p);
14002
14003 CASE_FLT_FN (BUILT_IN_FMAX):
14004 /* True if the 1st OR 2nd arguments are nonnegative. */
14005 return (tree_expr_nonnegative_warnv_p (arg0,
14006 strict_overflow_p)
14007 || (tree_expr_nonnegative_warnv_p (arg1,
14008 strict_overflow_p)));
14009
14010 CASE_FLT_FN (BUILT_IN_FMIN):
14011 /* True if the 1st AND 2nd arguments are nonnegative. */
14012 return (tree_expr_nonnegative_warnv_p (arg0,
14013 strict_overflow_p)
14014 && (tree_expr_nonnegative_warnv_p (arg1,
14015 strict_overflow_p)));
14016
14017 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14018 /* True if the 2nd argument is nonnegative. */
14019 return tree_expr_nonnegative_warnv_p (arg1,
14020 strict_overflow_p);
14021
14022 CASE_FLT_FN (BUILT_IN_POWI):
14023 /* True if the 1st argument is nonnegative or the second
14024 argument is an even integer. */
14025 if (TREE_CODE (arg1) == INTEGER_CST
14026 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14027 return true;
14028 return tree_expr_nonnegative_warnv_p (arg0,
14029 strict_overflow_p);
14030
14031 CASE_FLT_FN (BUILT_IN_POW):
14032 /* True if the 1st argument is nonnegative or the second
14033 argument is an even integer valued real. */
14034 if (TREE_CODE (arg1) == REAL_CST)
14035 {
14036 REAL_VALUE_TYPE c;
14037 HOST_WIDE_INT n;
14038
14039 c = TREE_REAL_CST (arg1);
14040 n = real_to_integer (&c);
14041 if ((n & 1) == 0)
14042 {
14043 REAL_VALUE_TYPE cint;
14044 real_from_integer (&cint, VOIDmode, n, SIGNED);
14045 if (real_identical (&c, &cint))
14046 return true;
14047 }
14048 }
14049 return tree_expr_nonnegative_warnv_p (arg0,
14050 strict_overflow_p);
14051
14052 default:
14053 break;
14054 }
14055 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14056 type);
14057 }
14058
14059 /* Return true if T is known to be non-negative. If the return
14060 value is based on the assumption that signed overflow is undefined,
14061 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14062 *STRICT_OVERFLOW_P. */
14063
14064 static bool
14065 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14066 {
14067 enum tree_code code = TREE_CODE (t);
14068 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14069 return true;
14070
14071 switch (code)
14072 {
14073 case TARGET_EXPR:
14074 {
14075 tree temp = TARGET_EXPR_SLOT (t);
14076 t = TARGET_EXPR_INITIAL (t);
14077
14078 /* If the initializer is non-void, then it's a normal expression
14079 that will be assigned to the slot. */
14080 if (!VOID_TYPE_P (t))
14081 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14082
14083 /* Otherwise, the initializer sets the slot in some way. One common
14084 way is an assignment statement at the end of the initializer. */
14085 while (1)
14086 {
14087 if (TREE_CODE (t) == BIND_EXPR)
14088 t = expr_last (BIND_EXPR_BODY (t));
14089 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14090 || TREE_CODE (t) == TRY_CATCH_EXPR)
14091 t = expr_last (TREE_OPERAND (t, 0));
14092 else if (TREE_CODE (t) == STATEMENT_LIST)
14093 t = expr_last (t);
14094 else
14095 break;
14096 }
14097 if (TREE_CODE (t) == MODIFY_EXPR
14098 && TREE_OPERAND (t, 0) == temp)
14099 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14100 strict_overflow_p);
14101
14102 return false;
14103 }
14104
14105 case CALL_EXPR:
14106 {
14107 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14108 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14109
14110 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14111 get_callee_fndecl (t),
14112 arg0,
14113 arg1,
14114 strict_overflow_p);
14115 }
14116 case COMPOUND_EXPR:
14117 case MODIFY_EXPR:
14118 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14119 strict_overflow_p);
14120 case BIND_EXPR:
14121 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14122 strict_overflow_p);
14123 case SAVE_EXPR:
14124 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14125 strict_overflow_p);
14126
14127 default:
14128 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14129 TREE_TYPE (t));
14130 }
14131
14132 /* We don't know sign of `t', so be conservative and return false. */
14133 return false;
14134 }
14135
14136 /* Return true if T is known to be non-negative. If the return
14137 value is based on the assumption that signed overflow is undefined,
14138 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14139 *STRICT_OVERFLOW_P. */
14140
14141 bool
14142 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14143 {
14144 enum tree_code code;
14145 if (t == error_mark_node)
14146 return false;
14147
14148 code = TREE_CODE (t);
14149 switch (TREE_CODE_CLASS (code))
14150 {
14151 case tcc_binary:
14152 case tcc_comparison:
14153 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14154 TREE_TYPE (t),
14155 TREE_OPERAND (t, 0),
14156 TREE_OPERAND (t, 1),
14157 strict_overflow_p);
14158
14159 case tcc_unary:
14160 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14161 TREE_TYPE (t),
14162 TREE_OPERAND (t, 0),
14163 strict_overflow_p);
14164
14165 case tcc_constant:
14166 case tcc_declaration:
14167 case tcc_reference:
14168 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14169
14170 default:
14171 break;
14172 }
14173
14174 switch (code)
14175 {
14176 case TRUTH_AND_EXPR:
14177 case TRUTH_OR_EXPR:
14178 case TRUTH_XOR_EXPR:
14179 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14180 TREE_TYPE (t),
14181 TREE_OPERAND (t, 0),
14182 TREE_OPERAND (t, 1),
14183 strict_overflow_p);
14184 case TRUTH_NOT_EXPR:
14185 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14186 TREE_TYPE (t),
14187 TREE_OPERAND (t, 0),
14188 strict_overflow_p);
14189
14190 case COND_EXPR:
14191 case CONSTRUCTOR:
14192 case OBJ_TYPE_REF:
14193 case ASSERT_EXPR:
14194 case ADDR_EXPR:
14195 case WITH_SIZE_EXPR:
14196 case SSA_NAME:
14197 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14198
14199 default:
14200 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14201 }
14202 }
14203
14204 /* Return true if `t' is known to be non-negative. Handle warnings
14205 about undefined signed overflow. */
14206
14207 bool
14208 tree_expr_nonnegative_p (tree t)
14209 {
14210 bool ret, strict_overflow_p;
14211
14212 strict_overflow_p = false;
14213 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14214 if (strict_overflow_p)
14215 fold_overflow_warning (("assuming signed overflow does not occur when "
14216 "determining that expression is always "
14217 "non-negative"),
14218 WARN_STRICT_OVERFLOW_MISC);
14219 return ret;
14220 }
14221
14222
14223 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14224 For floating point we further ensure that T is not denormal.
14225 Similar logic is present in nonzero_address in rtlanal.h.
14226
14227 If the return value is based on the assumption that signed overflow
14228 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14229 change *STRICT_OVERFLOW_P. */
14230
14231 bool
14232 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14233 bool *strict_overflow_p)
14234 {
14235 switch (code)
14236 {
14237 case ABS_EXPR:
14238 return tree_expr_nonzero_warnv_p (op0,
14239 strict_overflow_p);
14240
14241 case NOP_EXPR:
14242 {
14243 tree inner_type = TREE_TYPE (op0);
14244 tree outer_type = type;
14245
14246 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14247 && tree_expr_nonzero_warnv_p (op0,
14248 strict_overflow_p));
14249 }
14250 break;
14251
14252 case NON_LVALUE_EXPR:
14253 return tree_expr_nonzero_warnv_p (op0,
14254 strict_overflow_p);
14255
14256 default:
14257 break;
14258 }
14259
14260 return false;
14261 }
14262
14263 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14264 For floating point we further ensure that T is not denormal.
14265 Similar logic is present in nonzero_address in rtlanal.h.
14266
14267 If the return value is based on the assumption that signed overflow
14268 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14269 change *STRICT_OVERFLOW_P. */
14270
14271 bool
14272 tree_binary_nonzero_warnv_p (enum tree_code code,
14273 tree type,
14274 tree op0,
14275 tree op1, bool *strict_overflow_p)
14276 {
14277 bool sub_strict_overflow_p;
14278 switch (code)
14279 {
14280 case POINTER_PLUS_EXPR:
14281 case PLUS_EXPR:
14282 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14283 {
14284 /* With the presence of negative values it is hard
14285 to say something. */
14286 sub_strict_overflow_p = false;
14287 if (!tree_expr_nonnegative_warnv_p (op0,
14288 &sub_strict_overflow_p)
14289 || !tree_expr_nonnegative_warnv_p (op1,
14290 &sub_strict_overflow_p))
14291 return false;
14292 /* One of operands must be positive and the other non-negative. */
14293 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14294 overflows, on a twos-complement machine the sum of two
14295 nonnegative numbers can never be zero. */
14296 return (tree_expr_nonzero_warnv_p (op0,
14297 strict_overflow_p)
14298 || tree_expr_nonzero_warnv_p (op1,
14299 strict_overflow_p));
14300 }
14301 break;
14302
14303 case MULT_EXPR:
14304 if (TYPE_OVERFLOW_UNDEFINED (type))
14305 {
14306 if (tree_expr_nonzero_warnv_p (op0,
14307 strict_overflow_p)
14308 && tree_expr_nonzero_warnv_p (op1,
14309 strict_overflow_p))
14310 {
14311 *strict_overflow_p = true;
14312 return true;
14313 }
14314 }
14315 break;
14316
14317 case MIN_EXPR:
14318 sub_strict_overflow_p = false;
14319 if (tree_expr_nonzero_warnv_p (op0,
14320 &sub_strict_overflow_p)
14321 && tree_expr_nonzero_warnv_p (op1,
14322 &sub_strict_overflow_p))
14323 {
14324 if (sub_strict_overflow_p)
14325 *strict_overflow_p = true;
14326 }
14327 break;
14328
14329 case MAX_EXPR:
14330 sub_strict_overflow_p = false;
14331 if (tree_expr_nonzero_warnv_p (op0,
14332 &sub_strict_overflow_p))
14333 {
14334 if (sub_strict_overflow_p)
14335 *strict_overflow_p = true;
14336
14337 /* When both operands are nonzero, then MAX must be too. */
14338 if (tree_expr_nonzero_warnv_p (op1,
14339 strict_overflow_p))
14340 return true;
14341
14342 /* MAX where operand 0 is positive is positive. */
14343 return tree_expr_nonnegative_warnv_p (op0,
14344 strict_overflow_p);
14345 }
14346 /* MAX where operand 1 is positive is positive. */
14347 else if (tree_expr_nonzero_warnv_p (op1,
14348 &sub_strict_overflow_p)
14349 && tree_expr_nonnegative_warnv_p (op1,
14350 &sub_strict_overflow_p))
14351 {
14352 if (sub_strict_overflow_p)
14353 *strict_overflow_p = true;
14354 return true;
14355 }
14356 break;
14357
14358 case BIT_IOR_EXPR:
14359 return (tree_expr_nonzero_warnv_p (op1,
14360 strict_overflow_p)
14361 || tree_expr_nonzero_warnv_p (op0,
14362 strict_overflow_p));
14363
14364 default:
14365 break;
14366 }
14367
14368 return false;
14369 }
14370
14371 /* Return true when T is an address and is known to be nonzero.
14372 For floating point we further ensure that T is not denormal.
14373 Similar logic is present in nonzero_address in rtlanal.h.
14374
14375 If the return value is based on the assumption that signed overflow
14376 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14377 change *STRICT_OVERFLOW_P. */
14378
14379 bool
14380 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14381 {
14382 bool sub_strict_overflow_p;
14383 switch (TREE_CODE (t))
14384 {
14385 case INTEGER_CST:
14386 return !integer_zerop (t);
14387
14388 case ADDR_EXPR:
14389 {
14390 tree base = TREE_OPERAND (t, 0);
14391
14392 if (!DECL_P (base))
14393 base = get_base_address (base);
14394
14395 if (!base)
14396 return false;
14397
14398 /* For objects in symbol table check if we know they are non-zero.
14399 Don't do anything for variables and functions before symtab is built;
14400 it is quite possible that they will be declared weak later. */
14401 if (DECL_P (base) && decl_in_symtab_p (base))
14402 {
14403 struct symtab_node *symbol;
14404
14405 symbol = symtab_node::get_create (base);
14406 if (symbol)
14407 return symbol->nonzero_address ();
14408 else
14409 return false;
14410 }
14411
14412 /* Function local objects are never NULL. */
14413 if (DECL_P (base)
14414 && (DECL_CONTEXT (base)
14415 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
14416 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
14417 return true;
14418
14419 /* Constants are never weak. */
14420 if (CONSTANT_CLASS_P (base))
14421 return true;
14422
14423 return false;
14424 }
14425
14426 case COND_EXPR:
14427 sub_strict_overflow_p = false;
14428 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14429 &sub_strict_overflow_p)
14430 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14431 &sub_strict_overflow_p))
14432 {
14433 if (sub_strict_overflow_p)
14434 *strict_overflow_p = true;
14435 return true;
14436 }
14437 break;
14438
14439 default:
14440 break;
14441 }
14442 return false;
14443 }
14444
14445 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14446 attempt to fold the expression to a constant without modifying TYPE,
14447 OP0 or OP1.
14448
14449 If the expression could be simplified to a constant, then return
14450 the constant. If the expression would not be simplified to a
14451 constant, then return NULL_TREE. */
14452
14453 tree
14454 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14455 {
14456 tree tem = fold_binary (code, type, op0, op1);
14457 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14458 }
14459
14460 /* Given the components of a unary expression CODE, TYPE and OP0,
14461 attempt to fold the expression to a constant without modifying
14462 TYPE or OP0.
14463
14464 If the expression could be simplified to a constant, then return
14465 the constant. If the expression would not be simplified to a
14466 constant, then return NULL_TREE. */
14467
14468 tree
14469 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14470 {
14471 tree tem = fold_unary (code, type, op0);
14472 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14473 }
14474
14475 /* If EXP represents referencing an element in a constant string
14476 (either via pointer arithmetic or array indexing), return the
14477 tree representing the value accessed, otherwise return NULL. */
14478
14479 tree
14480 fold_read_from_constant_string (tree exp)
14481 {
14482 if ((TREE_CODE (exp) == INDIRECT_REF
14483 || TREE_CODE (exp) == ARRAY_REF)
14484 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14485 {
14486 tree exp1 = TREE_OPERAND (exp, 0);
14487 tree index;
14488 tree string;
14489 location_t loc = EXPR_LOCATION (exp);
14490
14491 if (TREE_CODE (exp) == INDIRECT_REF)
14492 string = string_constant (exp1, &index);
14493 else
14494 {
14495 tree low_bound = array_ref_low_bound (exp);
14496 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14497
14498 /* Optimize the special-case of a zero lower bound.
14499
14500 We convert the low_bound to sizetype to avoid some problems
14501 with constant folding. (E.g. suppose the lower bound is 1,
14502 and its mode is QI. Without the conversion,l (ARRAY
14503 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14504 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14505 if (! integer_zerop (low_bound))
14506 index = size_diffop_loc (loc, index,
14507 fold_convert_loc (loc, sizetype, low_bound));
14508
14509 string = exp1;
14510 }
14511
14512 if (string
14513 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14514 && TREE_CODE (string) == STRING_CST
14515 && TREE_CODE (index) == INTEGER_CST
14516 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14517 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14518 == MODE_INT)
14519 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14520 return build_int_cst_type (TREE_TYPE (exp),
14521 (TREE_STRING_POINTER (string)
14522 [TREE_INT_CST_LOW (index)]));
14523 }
14524 return NULL;
14525 }
14526
14527 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14528 an integer constant, real, or fixed-point constant.
14529
14530 TYPE is the type of the result. */
14531
14532 static tree
14533 fold_negate_const (tree arg0, tree type)
14534 {
14535 tree t = NULL_TREE;
14536
14537 switch (TREE_CODE (arg0))
14538 {
14539 case INTEGER_CST:
14540 {
14541 bool overflow;
14542 wide_int val = wi::neg (arg0, &overflow);
14543 t = force_fit_type (type, val, 1,
14544 (overflow | TREE_OVERFLOW (arg0))
14545 && !TYPE_UNSIGNED (type));
14546 break;
14547 }
14548
14549 case REAL_CST:
14550 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14551 break;
14552
14553 case FIXED_CST:
14554 {
14555 FIXED_VALUE_TYPE f;
14556 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14557 &(TREE_FIXED_CST (arg0)), NULL,
14558 TYPE_SATURATING (type));
14559 t = build_fixed (type, f);
14560 /* Propagate overflow flags. */
14561 if (overflow_p | TREE_OVERFLOW (arg0))
14562 TREE_OVERFLOW (t) = 1;
14563 break;
14564 }
14565
14566 default:
14567 gcc_unreachable ();
14568 }
14569
14570 return t;
14571 }
14572
14573 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14574 an integer constant or real constant.
14575
14576 TYPE is the type of the result. */
14577
14578 tree
14579 fold_abs_const (tree arg0, tree type)
14580 {
14581 tree t = NULL_TREE;
14582
14583 switch (TREE_CODE (arg0))
14584 {
14585 case INTEGER_CST:
14586 {
14587 /* If the value is unsigned or non-negative, then the absolute value
14588 is the same as the ordinary value. */
14589 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
14590 t = arg0;
14591
14592 /* If the value is negative, then the absolute value is
14593 its negation. */
14594 else
14595 {
14596 bool overflow;
14597 wide_int val = wi::neg (arg0, &overflow);
14598 t = force_fit_type (type, val, -1,
14599 overflow | TREE_OVERFLOW (arg0));
14600 }
14601 }
14602 break;
14603
14604 case REAL_CST:
14605 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14606 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14607 else
14608 t = arg0;
14609 break;
14610
14611 default:
14612 gcc_unreachable ();
14613 }
14614
14615 return t;
14616 }
14617
14618 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14619 constant. TYPE is the type of the result. */
14620
14621 static tree
14622 fold_not_const (const_tree arg0, tree type)
14623 {
14624 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14625
14626 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
14627 }
14628
14629 /* Given CODE, a relational operator, the target type, TYPE and two
14630 constant operands OP0 and OP1, return the result of the
14631 relational operation. If the result is not a compile time
14632 constant, then return NULL_TREE. */
14633
14634 static tree
14635 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14636 {
14637 int result, invert;
14638
14639 /* From here on, the only cases we handle are when the result is
14640 known to be a constant. */
14641
14642 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14643 {
14644 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14645 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14646
14647 /* Handle the cases where either operand is a NaN. */
14648 if (real_isnan (c0) || real_isnan (c1))
14649 {
14650 switch (code)
14651 {
14652 case EQ_EXPR:
14653 case ORDERED_EXPR:
14654 result = 0;
14655 break;
14656
14657 case NE_EXPR:
14658 case UNORDERED_EXPR:
14659 case UNLT_EXPR:
14660 case UNLE_EXPR:
14661 case UNGT_EXPR:
14662 case UNGE_EXPR:
14663 case UNEQ_EXPR:
14664 result = 1;
14665 break;
14666
14667 case LT_EXPR:
14668 case LE_EXPR:
14669 case GT_EXPR:
14670 case GE_EXPR:
14671 case LTGT_EXPR:
14672 if (flag_trapping_math)
14673 return NULL_TREE;
14674 result = 0;
14675 break;
14676
14677 default:
14678 gcc_unreachable ();
14679 }
14680
14681 return constant_boolean_node (result, type);
14682 }
14683
14684 return constant_boolean_node (real_compare (code, c0, c1), type);
14685 }
14686
14687 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14688 {
14689 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14690 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14691 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14692 }
14693
14694 /* Handle equality/inequality of complex constants. */
14695 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14696 {
14697 tree rcond = fold_relational_const (code, type,
14698 TREE_REALPART (op0),
14699 TREE_REALPART (op1));
14700 tree icond = fold_relational_const (code, type,
14701 TREE_IMAGPART (op0),
14702 TREE_IMAGPART (op1));
14703 if (code == EQ_EXPR)
14704 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14705 else if (code == NE_EXPR)
14706 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14707 else
14708 return NULL_TREE;
14709 }
14710
14711 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14712 {
14713 unsigned count = VECTOR_CST_NELTS (op0);
14714 tree *elts = XALLOCAVEC (tree, count);
14715 gcc_assert (VECTOR_CST_NELTS (op1) == count
14716 && TYPE_VECTOR_SUBPARTS (type) == count);
14717
14718 for (unsigned i = 0; i < count; i++)
14719 {
14720 tree elem_type = TREE_TYPE (type);
14721 tree elem0 = VECTOR_CST_ELT (op0, i);
14722 tree elem1 = VECTOR_CST_ELT (op1, i);
14723
14724 tree tem = fold_relational_const (code, elem_type,
14725 elem0, elem1);
14726
14727 if (tem == NULL_TREE)
14728 return NULL_TREE;
14729
14730 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14731 }
14732
14733 return build_vector (type, elts);
14734 }
14735
14736 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14737
14738 To compute GT, swap the arguments and do LT.
14739 To compute GE, do LT and invert the result.
14740 To compute LE, swap the arguments, do LT and invert the result.
14741 To compute NE, do EQ and invert the result.
14742
14743 Therefore, the code below must handle only EQ and LT. */
14744
14745 if (code == LE_EXPR || code == GT_EXPR)
14746 {
14747 std::swap (op0, op1);
14748 code = swap_tree_comparison (code);
14749 }
14750
14751 /* Note that it is safe to invert for real values here because we
14752 have already handled the one case that it matters. */
14753
14754 invert = 0;
14755 if (code == NE_EXPR || code == GE_EXPR)
14756 {
14757 invert = 1;
14758 code = invert_tree_comparison (code, false);
14759 }
14760
14761 /* Compute a result for LT or EQ if args permit;
14762 Otherwise return T. */
14763 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14764 {
14765 if (code == EQ_EXPR)
14766 result = tree_int_cst_equal (op0, op1);
14767 else
14768 result = tree_int_cst_lt (op0, op1);
14769 }
14770 else
14771 return NULL_TREE;
14772
14773 if (invert)
14774 result ^= 1;
14775 return constant_boolean_node (result, type);
14776 }
14777
14778 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14779 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14780 itself. */
14781
14782 tree
14783 fold_build_cleanup_point_expr (tree type, tree expr)
14784 {
14785 /* If the expression does not have side effects then we don't have to wrap
14786 it with a cleanup point expression. */
14787 if (!TREE_SIDE_EFFECTS (expr))
14788 return expr;
14789
14790 /* If the expression is a return, check to see if the expression inside the
14791 return has no side effects or the right hand side of the modify expression
14792 inside the return. If either don't have side effects set we don't need to
14793 wrap the expression in a cleanup point expression. Note we don't check the
14794 left hand side of the modify because it should always be a return decl. */
14795 if (TREE_CODE (expr) == RETURN_EXPR)
14796 {
14797 tree op = TREE_OPERAND (expr, 0);
14798 if (!op || !TREE_SIDE_EFFECTS (op))
14799 return expr;
14800 op = TREE_OPERAND (op, 1);
14801 if (!TREE_SIDE_EFFECTS (op))
14802 return expr;
14803 }
14804
14805 return build1 (CLEANUP_POINT_EXPR, type, expr);
14806 }
14807
14808 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14809 of an indirection through OP0, or NULL_TREE if no simplification is
14810 possible. */
14811
14812 tree
14813 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14814 {
14815 tree sub = op0;
14816 tree subtype;
14817
14818 STRIP_NOPS (sub);
14819 subtype = TREE_TYPE (sub);
14820 if (!POINTER_TYPE_P (subtype))
14821 return NULL_TREE;
14822
14823 if (TREE_CODE (sub) == ADDR_EXPR)
14824 {
14825 tree op = TREE_OPERAND (sub, 0);
14826 tree optype = TREE_TYPE (op);
14827 /* *&CONST_DECL -> to the value of the const decl. */
14828 if (TREE_CODE (op) == CONST_DECL)
14829 return DECL_INITIAL (op);
14830 /* *&p => p; make sure to handle *&"str"[cst] here. */
14831 if (type == optype)
14832 {
14833 tree fop = fold_read_from_constant_string (op);
14834 if (fop)
14835 return fop;
14836 else
14837 return op;
14838 }
14839 /* *(foo *)&fooarray => fooarray[0] */
14840 else if (TREE_CODE (optype) == ARRAY_TYPE
14841 && type == TREE_TYPE (optype)
14842 && (!in_gimple_form
14843 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14844 {
14845 tree type_domain = TYPE_DOMAIN (optype);
14846 tree min_val = size_zero_node;
14847 if (type_domain && TYPE_MIN_VALUE (type_domain))
14848 min_val = TYPE_MIN_VALUE (type_domain);
14849 if (in_gimple_form
14850 && TREE_CODE (min_val) != INTEGER_CST)
14851 return NULL_TREE;
14852 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14853 NULL_TREE, NULL_TREE);
14854 }
14855 /* *(foo *)&complexfoo => __real__ complexfoo */
14856 else if (TREE_CODE (optype) == COMPLEX_TYPE
14857 && type == TREE_TYPE (optype))
14858 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14859 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14860 else if (TREE_CODE (optype) == VECTOR_TYPE
14861 && type == TREE_TYPE (optype))
14862 {
14863 tree part_width = TYPE_SIZE (type);
14864 tree index = bitsize_int (0);
14865 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14866 }
14867 }
14868
14869 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14870 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14871 {
14872 tree op00 = TREE_OPERAND (sub, 0);
14873 tree op01 = TREE_OPERAND (sub, 1);
14874
14875 STRIP_NOPS (op00);
14876 if (TREE_CODE (op00) == ADDR_EXPR)
14877 {
14878 tree op00type;
14879 op00 = TREE_OPERAND (op00, 0);
14880 op00type = TREE_TYPE (op00);
14881
14882 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14883 if (TREE_CODE (op00type) == VECTOR_TYPE
14884 && type == TREE_TYPE (op00type))
14885 {
14886 HOST_WIDE_INT offset = tree_to_shwi (op01);
14887 tree part_width = TYPE_SIZE (type);
14888 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14889 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14890 tree index = bitsize_int (indexi);
14891
14892 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14893 return fold_build3_loc (loc,
14894 BIT_FIELD_REF, type, op00,
14895 part_width, index);
14896
14897 }
14898 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14899 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14900 && type == TREE_TYPE (op00type))
14901 {
14902 tree size = TYPE_SIZE_UNIT (type);
14903 if (tree_int_cst_equal (size, op01))
14904 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14905 }
14906 /* ((foo *)&fooarray)[1] => fooarray[1] */
14907 else if (TREE_CODE (op00type) == ARRAY_TYPE
14908 && type == TREE_TYPE (op00type))
14909 {
14910 tree type_domain = TYPE_DOMAIN (op00type);
14911 tree min_val = size_zero_node;
14912 if (type_domain && TYPE_MIN_VALUE (type_domain))
14913 min_val = TYPE_MIN_VALUE (type_domain);
14914 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14915 TYPE_SIZE_UNIT (type));
14916 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14917 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14918 NULL_TREE, NULL_TREE);
14919 }
14920 }
14921 }
14922
14923 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14924 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14925 && type == TREE_TYPE (TREE_TYPE (subtype))
14926 && (!in_gimple_form
14927 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14928 {
14929 tree type_domain;
14930 tree min_val = size_zero_node;
14931 sub = build_fold_indirect_ref_loc (loc, sub);
14932 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14933 if (type_domain && TYPE_MIN_VALUE (type_domain))
14934 min_val = TYPE_MIN_VALUE (type_domain);
14935 if (in_gimple_form
14936 && TREE_CODE (min_val) != INTEGER_CST)
14937 return NULL_TREE;
14938 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14939 NULL_TREE);
14940 }
14941
14942 return NULL_TREE;
14943 }
14944
14945 /* Builds an expression for an indirection through T, simplifying some
14946 cases. */
14947
14948 tree
14949 build_fold_indirect_ref_loc (location_t loc, tree t)
14950 {
14951 tree type = TREE_TYPE (TREE_TYPE (t));
14952 tree sub = fold_indirect_ref_1 (loc, type, t);
14953
14954 if (sub)
14955 return sub;
14956
14957 return build1_loc (loc, INDIRECT_REF, type, t);
14958 }
14959
14960 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14961
14962 tree
14963 fold_indirect_ref_loc (location_t loc, tree t)
14964 {
14965 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14966
14967 if (sub)
14968 return sub;
14969 else
14970 return t;
14971 }
14972
14973 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14974 whose result is ignored. The type of the returned tree need not be
14975 the same as the original expression. */
14976
14977 tree
14978 fold_ignored_result (tree t)
14979 {
14980 if (!TREE_SIDE_EFFECTS (t))
14981 return integer_zero_node;
14982
14983 for (;;)
14984 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14985 {
14986 case tcc_unary:
14987 t = TREE_OPERAND (t, 0);
14988 break;
14989
14990 case tcc_binary:
14991 case tcc_comparison:
14992 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14993 t = TREE_OPERAND (t, 0);
14994 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14995 t = TREE_OPERAND (t, 1);
14996 else
14997 return t;
14998 break;
14999
15000 case tcc_expression:
15001 switch (TREE_CODE (t))
15002 {
15003 case COMPOUND_EXPR:
15004 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15005 return t;
15006 t = TREE_OPERAND (t, 0);
15007 break;
15008
15009 case COND_EXPR:
15010 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15011 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15012 return t;
15013 t = TREE_OPERAND (t, 0);
15014 break;
15015
15016 default:
15017 return t;
15018 }
15019 break;
15020
15021 default:
15022 return t;
15023 }
15024 }
15025
15026 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15027
15028 tree
15029 round_up_loc (location_t loc, tree value, unsigned int divisor)
15030 {
15031 tree div = NULL_TREE;
15032
15033 if (divisor == 1)
15034 return value;
15035
15036 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15037 have to do anything. Only do this when we are not given a const,
15038 because in that case, this check is more expensive than just
15039 doing it. */
15040 if (TREE_CODE (value) != INTEGER_CST)
15041 {
15042 div = build_int_cst (TREE_TYPE (value), divisor);
15043
15044 if (multiple_of_p (TREE_TYPE (value), value, div))
15045 return value;
15046 }
15047
15048 /* If divisor is a power of two, simplify this to bit manipulation. */
15049 if (divisor == (divisor & -divisor))
15050 {
15051 if (TREE_CODE (value) == INTEGER_CST)
15052 {
15053 wide_int val = value;
15054 bool overflow_p;
15055
15056 if ((val & (divisor - 1)) == 0)
15057 return value;
15058
15059 overflow_p = TREE_OVERFLOW (value);
15060 val += divisor - 1;
15061 val &= - (int) divisor;
15062 if (val == 0)
15063 overflow_p = true;
15064
15065 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15066 }
15067 else
15068 {
15069 tree t;
15070
15071 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15072 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15073 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15074 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15075 }
15076 }
15077 else
15078 {
15079 if (!div)
15080 div = build_int_cst (TREE_TYPE (value), divisor);
15081 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15082 value = size_binop_loc (loc, MULT_EXPR, value, div);
15083 }
15084
15085 return value;
15086 }
15087
15088 /* Likewise, but round down. */
15089
15090 tree
15091 round_down_loc (location_t loc, tree value, int divisor)
15092 {
15093 tree div = NULL_TREE;
15094
15095 gcc_assert (divisor > 0);
15096 if (divisor == 1)
15097 return value;
15098
15099 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15100 have to do anything. Only do this when we are not given a const,
15101 because in that case, this check is more expensive than just
15102 doing it. */
15103 if (TREE_CODE (value) != INTEGER_CST)
15104 {
15105 div = build_int_cst (TREE_TYPE (value), divisor);
15106
15107 if (multiple_of_p (TREE_TYPE (value), value, div))
15108 return value;
15109 }
15110
15111 /* If divisor is a power of two, simplify this to bit manipulation. */
15112 if (divisor == (divisor & -divisor))
15113 {
15114 tree t;
15115
15116 t = build_int_cst (TREE_TYPE (value), -divisor);
15117 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15118 }
15119 else
15120 {
15121 if (!div)
15122 div = build_int_cst (TREE_TYPE (value), divisor);
15123 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15124 value = size_binop_loc (loc, MULT_EXPR, value, div);
15125 }
15126
15127 return value;
15128 }
15129
15130 /* Returns the pointer to the base of the object addressed by EXP and
15131 extracts the information about the offset of the access, storing it
15132 to PBITPOS and POFFSET. */
15133
15134 static tree
15135 split_address_to_core_and_offset (tree exp,
15136 HOST_WIDE_INT *pbitpos, tree *poffset)
15137 {
15138 tree core;
15139 machine_mode mode;
15140 int unsignedp, volatilep;
15141 HOST_WIDE_INT bitsize;
15142 location_t loc = EXPR_LOCATION (exp);
15143
15144 if (TREE_CODE (exp) == ADDR_EXPR)
15145 {
15146 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15147 poffset, &mode, &unsignedp, &volatilep,
15148 false);
15149 core = build_fold_addr_expr_loc (loc, core);
15150 }
15151 else
15152 {
15153 core = exp;
15154 *pbitpos = 0;
15155 *poffset = NULL_TREE;
15156 }
15157
15158 return core;
15159 }
15160
15161 /* Returns true if addresses of E1 and E2 differ by a constant, false
15162 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15163
15164 bool
15165 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15166 {
15167 tree core1, core2;
15168 HOST_WIDE_INT bitpos1, bitpos2;
15169 tree toffset1, toffset2, tdiff, type;
15170
15171 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15172 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15173
15174 if (bitpos1 % BITS_PER_UNIT != 0
15175 || bitpos2 % BITS_PER_UNIT != 0
15176 || !operand_equal_p (core1, core2, 0))
15177 return false;
15178
15179 if (toffset1 && toffset2)
15180 {
15181 type = TREE_TYPE (toffset1);
15182 if (type != TREE_TYPE (toffset2))
15183 toffset2 = fold_convert (type, toffset2);
15184
15185 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15186 if (!cst_and_fits_in_hwi (tdiff))
15187 return false;
15188
15189 *diff = int_cst_value (tdiff);
15190 }
15191 else if (toffset1 || toffset2)
15192 {
15193 /* If only one of the offsets is non-constant, the difference cannot
15194 be a constant. */
15195 return false;
15196 }
15197 else
15198 *diff = 0;
15199
15200 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15201 return true;
15202 }
15203
15204 /* Simplify the floating point expression EXP when the sign of the
15205 result is not significant. Return NULL_TREE if no simplification
15206 is possible. */
15207
15208 tree
15209 fold_strip_sign_ops (tree exp)
15210 {
15211 tree arg0, arg1;
15212 location_t loc = EXPR_LOCATION (exp);
15213
15214 switch (TREE_CODE (exp))
15215 {
15216 case ABS_EXPR:
15217 case NEGATE_EXPR:
15218 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15219 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15220
15221 case MULT_EXPR:
15222 case RDIV_EXPR:
15223 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
15224 return NULL_TREE;
15225 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15226 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15227 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15228 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15229 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15230 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15231 break;
15232
15233 case COMPOUND_EXPR:
15234 arg0 = TREE_OPERAND (exp, 0);
15235 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15236 if (arg1)
15237 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15238 break;
15239
15240 case COND_EXPR:
15241 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15242 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15243 if (arg0 || arg1)
15244 return fold_build3_loc (loc,
15245 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15246 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15247 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15248 break;
15249
15250 case CALL_EXPR:
15251 {
15252 const enum built_in_function fcode = builtin_mathfn_code (exp);
15253 switch (fcode)
15254 {
15255 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15256 /* Strip copysign function call, return the 1st argument. */
15257 arg0 = CALL_EXPR_ARG (exp, 0);
15258 arg1 = CALL_EXPR_ARG (exp, 1);
15259 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15260
15261 default:
15262 /* Strip sign ops from the argument of "odd" math functions. */
15263 if (negate_mathfn_p (fcode))
15264 {
15265 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15266 if (arg0)
15267 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
15268 }
15269 break;
15270 }
15271 }
15272 break;
15273
15274 default:
15275 break;
15276 }
15277 return NULL_TREE;
15278 }
15279
15280 /* Return OFF converted to a pointer offset type suitable as offset for
15281 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15282 tree
15283 convert_to_ptrofftype_loc (location_t loc, tree off)
15284 {
15285 return fold_convert_loc (loc, sizetype, off);
15286 }
15287
15288 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15289 tree
15290 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15291 {
15292 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15293 ptr, convert_to_ptrofftype_loc (loc, off));
15294 }
15295
15296 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15297 tree
15298 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15299 {
15300 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15301 ptr, size_int (off));
15302 }