]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fold-const.c
c4be017c50c55864ce1b3ba212215fb8807969fb
[thirdparty/gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "predict.h"
48 #include "tree.h"
49 #include "gimple.h"
50 #include "rtl.h"
51 #include "flags.h"
52 #include "alias.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
55 #include "calls.h"
56 #include "tree-iterator.h"
57 #include "realmpfr.h"
58 #include "insn-config.h"
59 #include "expmed.h"
60 #include "dojump.h"
61 #include "explow.h"
62 #include "emit-rtl.h"
63 #include "varasm.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "tm_p.h"
67 #include "target.h"
68 #include "diagnostic-core.h"
69 #include "intl.h"
70 #include "langhooks.h"
71 #include "md5.h"
72 #include "internal-fn.h"
73 #include "tree-eh.h"
74 #include "gimplify.h"
75 #include "tree-dfa.h"
76 #include "builtins.h"
77 #include "cgraph.h"
78 #include "generic-match.h"
79 #include "optabs-query.h"
80 #include "gimple-fold.h"
81 #include "params.h"
82 #include "tree-ssa-operands.h"
83 #include "tree-into-ssa.h"
84
85 #ifndef LOAD_EXTEND_OP
86 #define LOAD_EXTEND_OP(M) UNKNOWN
87 #endif
88
89 /* Nonzero if we are folding constants inside an initializer; zero
90 otherwise. */
91 int folding_initializer = 0;
92
93 /* The following constants represent a bit based encoding of GCC's
94 comparison operators. This encoding simplifies transformations
95 on relational comparison operators, such as AND and OR. */
96 enum comparison_code {
97 COMPCODE_FALSE = 0,
98 COMPCODE_LT = 1,
99 COMPCODE_EQ = 2,
100 COMPCODE_LE = 3,
101 COMPCODE_GT = 4,
102 COMPCODE_LTGT = 5,
103 COMPCODE_GE = 6,
104 COMPCODE_ORD = 7,
105 COMPCODE_UNORD = 8,
106 COMPCODE_UNLT = 9,
107 COMPCODE_UNEQ = 10,
108 COMPCODE_UNLE = 11,
109 COMPCODE_UNGT = 12,
110 COMPCODE_NE = 13,
111 COMPCODE_UNGE = 14,
112 COMPCODE_TRUE = 15
113 };
114
115 static bool negate_expr_p (tree);
116 static tree negate_expr (tree);
117 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
118 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree make_bit_field_ref (location_t, tree, tree,
125 HOST_WIDE_INT, HOST_WIDE_INT, int);
126 static tree optimize_bit_field_compare (location_t, enum tree_code,
127 tree, tree, tree);
128 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
129 HOST_WIDE_INT *,
130 machine_mode *, int *, int *,
131 tree *, tree *);
132 static int simple_operand_p (const_tree);
133 static bool simple_operand_p_2 (tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree optimize_minmax_comparison (location_t, enum tree_code,
141 tree, tree, tree);
142 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
143 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
144 static tree fold_binary_op_with_conditional_arg (location_t,
145 enum tree_code, tree,
146 tree, tree,
147 tree, tree, int);
148 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
149 static bool reorder_operands_p (const_tree, const_tree);
150 static tree fold_negate_const (tree, tree);
151 static tree fold_not_const (const_tree, tree);
152 static tree fold_relational_const (enum tree_code, tree, tree, tree);
153 static tree fold_convert_const (enum tree_code, tree, tree);
154 static tree fold_view_convert_expr (tree, tree);
155 static bool vec_cst_ctor_to_array (tree, tree *);
156
157
158 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
159 Otherwise, return LOC. */
160
161 static location_t
162 expr_location_or (tree t, location_t loc)
163 {
164 location_t tloc = EXPR_LOCATION (t);
165 return tloc == UNKNOWN_LOCATION ? loc : tloc;
166 }
167
168 /* Similar to protected_set_expr_location, but never modify x in place,
169 if location can and needs to be set, unshare it. */
170
171 static inline tree
172 protected_set_expr_location_unshare (tree x, location_t loc)
173 {
174 if (CAN_HAVE_LOCATION_P (x)
175 && EXPR_LOCATION (x) != loc
176 && !(TREE_CODE (x) == SAVE_EXPR
177 || TREE_CODE (x) == TARGET_EXPR
178 || TREE_CODE (x) == BIND_EXPR))
179 {
180 x = copy_node (x);
181 SET_EXPR_LOCATION (x, loc);
182 }
183 return x;
184 }
185 \f
186 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
187 division and returns the quotient. Otherwise returns
188 NULL_TREE. */
189
190 tree
191 div_if_zero_remainder (const_tree arg1, const_tree arg2)
192 {
193 widest_int quo;
194
195 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
196 SIGNED, &quo))
197 return wide_int_to_tree (TREE_TYPE (arg1), quo);
198
199 return NULL_TREE;
200 }
201 \f
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
209 used. */
210
211 static int fold_deferring_overflow_warnings;
212
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
217
218 static const char* fold_deferred_overflow_warning;
219
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
222
223 static enum warn_strict_overflow_code fold_deferred_overflow_code;
224
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
227
228 void
229 fold_defer_overflow_warnings (void)
230 {
231 ++fold_deferring_overflow_warnings;
232 }
233
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
241 deferred code. */
242
243 void
244 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
245 {
246 const char *warnmsg;
247 location_t locus;
248
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
252 {
253 if (fold_deferred_overflow_warning != NULL
254 && code != 0
255 && code < (int) fold_deferred_overflow_code)
256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
257 return;
258 }
259
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
262
263 if (!issue || warnmsg == NULL)
264 return;
265
266 if (gimple_no_warning_p (stmt))
267 return;
268
269 /* Use the smallest code level when deciding to issue the
270 warning. */
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
273
274 if (!issue_strict_overflow_warning (code))
275 return;
276
277 if (stmt == NULL)
278 locus = input_location;
279 else
280 locus = gimple_location (stmt);
281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
282 }
283
284 /* Stop deferring overflow warnings, ignoring any deferred
285 warnings. */
286
287 void
288 fold_undefer_and_ignore_overflow_warnings (void)
289 {
290 fold_undefer_overflow_warnings (false, NULL, 0);
291 }
292
293 /* Whether we are deferring overflow warnings. */
294
295 bool
296 fold_deferring_overflow_warnings_p (void)
297 {
298 return fold_deferring_overflow_warnings > 0;
299 }
300
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
303
304 static void
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
306 {
307 if (fold_deferring_overflow_warnings > 0)
308 {
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
311 {
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
314 }
315 }
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
318 }
319 \f
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
322
323 bool
324 negate_mathfn_p (enum built_in_function code)
325 {
326 switch (code)
327 {
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
336 CASE_FLT_FN (BUILT_IN_CBRT):
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
350 CASE_FLT_FN (BUILT_IN_TRUNC):
351 return true;
352
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
358
359 default:
360 break;
361 }
362 return false;
363 }
364
365 /* Check whether we may negate an integer constant T without causing
366 overflow. */
367
368 bool
369 may_negate_without_overflow_p (const_tree t)
370 {
371 tree type;
372
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
374
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
378
379 return !wi::only_sign_bit_p (t);
380 }
381
382 /* Determine whether an expression T can be cheaply negated using
383 the function negate_expr without introducing undefined overflow. */
384
385 static bool
386 negate_expr_p (tree t)
387 {
388 tree type;
389
390 if (t == 0)
391 return false;
392
393 type = TREE_TYPE (t);
394
395 STRIP_SIGN_NOPS (t);
396 switch (TREE_CODE (t))
397 {
398 case INTEGER_CST:
399 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
400 return true;
401
402 /* Check that -CST will not overflow type. */
403 return may_negate_without_overflow_p (t);
404 case BIT_NOT_EXPR:
405 return (INTEGRAL_TYPE_P (type)
406 && TYPE_OVERFLOW_WRAPS (type));
407
408 case FIXED_CST:
409 return true;
410
411 case NEGATE_EXPR:
412 return !TYPE_OVERFLOW_SANITIZED (type);
413
414 case REAL_CST:
415 /* We want to canonicalize to positive real constants. Pretend
416 that only negative ones can be easily negated. */
417 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
418
419 case COMPLEX_CST:
420 return negate_expr_p (TREE_REALPART (t))
421 && negate_expr_p (TREE_IMAGPART (t));
422
423 case VECTOR_CST:
424 {
425 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
426 return true;
427
428 int count = TYPE_VECTOR_SUBPARTS (type), i;
429
430 for (i = 0; i < count; i++)
431 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
432 return false;
433
434 return true;
435 }
436
437 case COMPLEX_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0))
439 && negate_expr_p (TREE_OPERAND (t, 1));
440
441 case CONJ_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0));
443
444 case PLUS_EXPR:
445 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 || HONOR_SIGNED_ZEROS (element_mode (type)))
447 return false;
448 /* -(A + B) -> (-B) - A. */
449 if (negate_expr_p (TREE_OPERAND (t, 1))
450 && reorder_operands_p (TREE_OPERAND (t, 0),
451 TREE_OPERAND (t, 1)))
452 return true;
453 /* -(A + B) -> (-A) - B. */
454 return negate_expr_p (TREE_OPERAND (t, 0));
455
456 case MINUS_EXPR:
457 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
458 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
459 && !HONOR_SIGNED_ZEROS (element_mode (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
462
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (TREE_TYPE (t)))
465 break;
466
467 /* Fall through. */
468
469 case RDIV_EXPR:
470 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
471 return negate_expr_p (TREE_OPERAND (t, 1))
472 || negate_expr_p (TREE_OPERAND (t, 0));
473 break;
474
475 case TRUNC_DIV_EXPR:
476 case ROUND_DIV_EXPR:
477 case EXACT_DIV_EXPR:
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
482 overflow. */
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
484 {
485 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
486 break;
487 /* If overflow is undefined then we have to be careful because
488 we ask whether it's ok to associate the negate with the
489 division which is not ok for example for
490 -((a - b) / c) where (-(a - b)) / c may invoke undefined
491 overflow because of negating INT_MIN. So do not use
492 negate_expr_p here but open-code the two important cases. */
493 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
494 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
495 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
496 return true;
497 }
498 else if (negate_expr_p (TREE_OPERAND (t, 0)))
499 return true;
500 return negate_expr_p (TREE_OPERAND (t, 1));
501
502 case NOP_EXPR:
503 /* Negate -((double)float) as (double)(-float). */
504 if (TREE_CODE (type) == REAL_TYPE)
505 {
506 tree tem = strip_float_extensions (t);
507 if (tem != t)
508 return negate_expr_p (tem);
509 }
510 break;
511
512 case CALL_EXPR:
513 /* Negate -f(x) as f(-x). */
514 if (negate_mathfn_p (builtin_mathfn_code (t)))
515 return negate_expr_p (CALL_EXPR_ARG (t, 0));
516 break;
517
518 case RSHIFT_EXPR:
519 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
520 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
521 {
522 tree op1 = TREE_OPERAND (t, 1);
523 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
524 return true;
525 }
526 break;
527
528 default:
529 break;
530 }
531 return false;
532 }
533
534 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
535 simplification is possible.
536 If negate_expr_p would return true for T, NULL_TREE will never be
537 returned. */
538
539 static tree
540 fold_negate_expr (location_t loc, tree t)
541 {
542 tree type = TREE_TYPE (t);
543 tree tem;
544
545 switch (TREE_CODE (t))
546 {
547 /* Convert - (~A) to A + 1. */
548 case BIT_NOT_EXPR:
549 if (INTEGRAL_TYPE_P (type))
550 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
551 build_one_cst (type));
552 break;
553
554 case INTEGER_CST:
555 tem = fold_negate_const (t, type);
556 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
557 || (ANY_INTEGRAL_TYPE_P (type)
558 && !TYPE_OVERFLOW_TRAPS (type)
559 && TYPE_OVERFLOW_WRAPS (type))
560 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
561 return tem;
562 break;
563
564 case REAL_CST:
565 tem = fold_negate_const (t, type);
566 return tem;
567
568 case FIXED_CST:
569 tem = fold_negate_const (t, type);
570 return tem;
571
572 case COMPLEX_CST:
573 {
574 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
575 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
576 if (rpart && ipart)
577 return build_complex (type, rpart, ipart);
578 }
579 break;
580
581 case VECTOR_CST:
582 {
583 int count = TYPE_VECTOR_SUBPARTS (type), i;
584 tree *elts = XALLOCAVEC (tree, count);
585
586 for (i = 0; i < count; i++)
587 {
588 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
589 if (elts[i] == NULL_TREE)
590 return NULL_TREE;
591 }
592
593 return build_vector (type, elts);
594 }
595
596 case COMPLEX_EXPR:
597 if (negate_expr_p (t))
598 return fold_build2_loc (loc, COMPLEX_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
600 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
601 break;
602
603 case CONJ_EXPR:
604 if (negate_expr_p (t))
605 return fold_build1_loc (loc, CONJ_EXPR, type,
606 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
607 break;
608
609 case NEGATE_EXPR:
610 if (!TYPE_OVERFLOW_SANITIZED (type))
611 return TREE_OPERAND (t, 0);
612 break;
613
614 case PLUS_EXPR:
615 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
616 && !HONOR_SIGNED_ZEROS (element_mode (type)))
617 {
618 /* -(A + B) -> (-B) - A. */
619 if (negate_expr_p (TREE_OPERAND (t, 1))
620 && reorder_operands_p (TREE_OPERAND (t, 0),
621 TREE_OPERAND (t, 1)))
622 {
623 tem = negate_expr (TREE_OPERAND (t, 1));
624 return fold_build2_loc (loc, MINUS_EXPR, type,
625 tem, TREE_OPERAND (t, 0));
626 }
627
628 /* -(A + B) -> (-A) - B. */
629 if (negate_expr_p (TREE_OPERAND (t, 0)))
630 {
631 tem = negate_expr (TREE_OPERAND (t, 0));
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 tem, TREE_OPERAND (t, 1));
634 }
635 }
636 break;
637
638 case MINUS_EXPR:
639 /* - (A - B) -> B - A */
640 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
641 && !HONOR_SIGNED_ZEROS (element_mode (type))
642 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
643 return fold_build2_loc (loc, MINUS_EXPR, type,
644 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
645 break;
646
647 case MULT_EXPR:
648 if (TYPE_UNSIGNED (type))
649 break;
650
651 /* Fall through. */
652
653 case RDIV_EXPR:
654 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
655 {
656 tem = TREE_OPERAND (t, 1);
657 if (negate_expr_p (tem))
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 TREE_OPERAND (t, 0), negate_expr (tem));
660 tem = TREE_OPERAND (t, 0);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 negate_expr (tem), TREE_OPERAND (t, 1));
664 }
665 break;
666
667 case TRUNC_DIV_EXPR:
668 case ROUND_DIV_EXPR:
669 case EXACT_DIV_EXPR:
670 /* In general we can't negate A / B, because if A is INT_MIN and
671 B is 1, we may turn this into INT_MIN / -1 which is undefined
672 and actually traps on some architectures. But if overflow is
673 undefined, we can negate, because - (INT_MIN / 1) is an
674 overflow. */
675 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
676 {
677 const char * const warnmsg = G_("assuming signed overflow does not "
678 "occur when negating a division");
679 tem = TREE_OPERAND (t, 1);
680 if (negate_expr_p (tem))
681 {
682 if (INTEGRAL_TYPE_P (type)
683 && (TREE_CODE (tem) != INTEGER_CST
684 || integer_onep (tem)))
685 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
686 return fold_build2_loc (loc, TREE_CODE (t), type,
687 TREE_OPERAND (t, 0), negate_expr (tem));
688 }
689 /* If overflow is undefined then we have to be careful because
690 we ask whether it's ok to associate the negate with the
691 division which is not ok for example for
692 -((a - b) / c) where (-(a - b)) / c may invoke undefined
693 overflow because of negating INT_MIN. So do not use
694 negate_expr_p here but open-code the two important cases. */
695 tem = TREE_OPERAND (t, 0);
696 if ((INTEGRAL_TYPE_P (type)
697 && (TREE_CODE (tem) == NEGATE_EXPR
698 || (TREE_CODE (tem) == INTEGER_CST
699 && may_negate_without_overflow_p (tem))))
700 || !INTEGRAL_TYPE_P (type))
701 return fold_build2_loc (loc, TREE_CODE (t), type,
702 negate_expr (tem), TREE_OPERAND (t, 1));
703 }
704 break;
705
706 case NOP_EXPR:
707 /* Convert -((double)float) into (double)(-float). */
708 if (TREE_CODE (type) == REAL_TYPE)
709 {
710 tem = strip_float_extensions (t);
711 if (tem != t && negate_expr_p (tem))
712 return fold_convert_loc (loc, type, negate_expr (tem));
713 }
714 break;
715
716 case CALL_EXPR:
717 /* Negate -f(x) as f(-x). */
718 if (negate_mathfn_p (builtin_mathfn_code (t))
719 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
720 {
721 tree fndecl, arg;
722
723 fndecl = get_callee_fndecl (t);
724 arg = negate_expr (CALL_EXPR_ARG (t, 0));
725 return build_call_expr_loc (loc, fndecl, 1, arg);
726 }
727 break;
728
729 case RSHIFT_EXPR:
730 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
731 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
732 {
733 tree op1 = TREE_OPERAND (t, 1);
734 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
735 {
736 tree ntype = TYPE_UNSIGNED (type)
737 ? signed_type_for (type)
738 : unsigned_type_for (type);
739 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
740 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
741 return fold_convert_loc (loc, type, temp);
742 }
743 }
744 break;
745
746 default:
747 break;
748 }
749
750 return NULL_TREE;
751 }
752
753 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
754 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
755 return NULL_TREE. */
756
757 static tree
758 negate_expr (tree t)
759 {
760 tree type, tem;
761 location_t loc;
762
763 if (t == NULL_TREE)
764 return NULL_TREE;
765
766 loc = EXPR_LOCATION (t);
767 type = TREE_TYPE (t);
768 STRIP_SIGN_NOPS (t);
769
770 tem = fold_negate_expr (loc, t);
771 if (!tem)
772 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
773 return fold_convert_loc (loc, type, tem);
774 }
775 \f
776 /* Split a tree IN into a constant, literal and variable parts that could be
777 combined with CODE to make IN. "constant" means an expression with
778 TREE_CONSTANT but that isn't an actual constant. CODE must be a
779 commutative arithmetic operation. Store the constant part into *CONP,
780 the literal in *LITP and return the variable part. If a part isn't
781 present, set it to null. If the tree does not decompose in this way,
782 return the entire tree as the variable part and the other parts as null.
783
784 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
785 case, we negate an operand that was subtracted. Except if it is a
786 literal for which we use *MINUS_LITP instead.
787
788 If NEGATE_P is true, we are negating all of IN, again except a literal
789 for which we use *MINUS_LITP instead.
790
791 If IN is itself a literal or constant, return it as appropriate.
792
793 Note that we do not guarantee that any of the three values will be the
794 same type as IN, but they will have the same signedness and mode. */
795
796 static tree
797 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
798 tree *minus_litp, int negate_p)
799 {
800 tree var = 0;
801
802 *conp = 0;
803 *litp = 0;
804 *minus_litp = 0;
805
806 /* Strip any conversions that don't change the machine mode or signedness. */
807 STRIP_SIGN_NOPS (in);
808
809 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
810 || TREE_CODE (in) == FIXED_CST)
811 *litp = in;
812 else if (TREE_CODE (in) == code
813 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
814 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
815 /* We can associate addition and subtraction together (even
816 though the C standard doesn't say so) for integers because
817 the value is not affected. For reals, the value might be
818 affected, so we can't. */
819 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
820 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
821 {
822 tree op0 = TREE_OPERAND (in, 0);
823 tree op1 = TREE_OPERAND (in, 1);
824 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
825 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
826
827 /* First see if either of the operands is a literal, then a constant. */
828 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
829 || TREE_CODE (op0) == FIXED_CST)
830 *litp = op0, op0 = 0;
831 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
832 || TREE_CODE (op1) == FIXED_CST)
833 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
834
835 if (op0 != 0 && TREE_CONSTANT (op0))
836 *conp = op0, op0 = 0;
837 else if (op1 != 0 && TREE_CONSTANT (op1))
838 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
839
840 /* If we haven't dealt with either operand, this is not a case we can
841 decompose. Otherwise, VAR is either of the ones remaining, if any. */
842 if (op0 != 0 && op1 != 0)
843 var = in;
844 else if (op0 != 0)
845 var = op0;
846 else
847 var = op1, neg_var_p = neg1_p;
848
849 /* Now do any needed negations. */
850 if (neg_litp_p)
851 *minus_litp = *litp, *litp = 0;
852 if (neg_conp_p)
853 *conp = negate_expr (*conp);
854 if (neg_var_p)
855 var = negate_expr (var);
856 }
857 else if (TREE_CODE (in) == BIT_NOT_EXPR
858 && code == PLUS_EXPR)
859 {
860 /* -X - 1 is folded to ~X, undo that here. */
861 *minus_litp = build_one_cst (TREE_TYPE (in));
862 var = negate_expr (TREE_OPERAND (in, 0));
863 }
864 else if (TREE_CONSTANT (in))
865 *conp = in;
866 else
867 var = in;
868
869 if (negate_p)
870 {
871 if (*litp)
872 *minus_litp = *litp, *litp = 0;
873 else if (*minus_litp)
874 *litp = *minus_litp, *minus_litp = 0;
875 *conp = negate_expr (*conp);
876 var = negate_expr (var);
877 }
878
879 return var;
880 }
881
882 /* Re-associate trees split by the above function. T1 and T2 are
883 either expressions to associate or null. Return the new
884 expression, if any. LOC is the location of the new expression. If
885 we build an operation, do it in TYPE and with CODE. */
886
887 static tree
888 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
889 {
890 if (t1 == 0)
891 return t2;
892 else if (t2 == 0)
893 return t1;
894
895 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
896 try to fold this since we will have infinite recursion. But do
897 deal with any NEGATE_EXPRs. */
898 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
899 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
900 {
901 if (code == PLUS_EXPR)
902 {
903 if (TREE_CODE (t1) == NEGATE_EXPR)
904 return build2_loc (loc, MINUS_EXPR, type,
905 fold_convert_loc (loc, type, t2),
906 fold_convert_loc (loc, type,
907 TREE_OPERAND (t1, 0)));
908 else if (TREE_CODE (t2) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t1),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t2, 0)));
913 else if (integer_zerop (t2))
914 return fold_convert_loc (loc, type, t1);
915 }
916 else if (code == MINUS_EXPR)
917 {
918 if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
920 }
921
922 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type, t2));
924 }
925
926 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
928 }
929 \f
930 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
931 for use in int_const_binop, size_binop and size_diffop. */
932
933 static bool
934 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
935 {
936 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
937 return false;
938 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
939 return false;
940
941 switch (code)
942 {
943 case LSHIFT_EXPR:
944 case RSHIFT_EXPR:
945 case LROTATE_EXPR:
946 case RROTATE_EXPR:
947 return true;
948
949 default:
950 break;
951 }
952
953 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
954 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
955 && TYPE_MODE (type1) == TYPE_MODE (type2);
956 }
957
958
959 /* Combine two integer constants ARG1 and ARG2 under operation CODE
960 to produce a new constant. Return NULL_TREE if we don't know how
961 to evaluate CODE at compile-time. */
962
963 static tree
964 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
965 int overflowable)
966 {
967 wide_int res;
968 tree t;
969 tree type = TREE_TYPE (arg1);
970 signop sign = TYPE_SIGN (type);
971 bool overflow = false;
972
973 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
974 TYPE_SIGN (TREE_TYPE (parg2)));
975
976 switch (code)
977 {
978 case BIT_IOR_EXPR:
979 res = wi::bit_or (arg1, arg2);
980 break;
981
982 case BIT_XOR_EXPR:
983 res = wi::bit_xor (arg1, arg2);
984 break;
985
986 case BIT_AND_EXPR:
987 res = wi::bit_and (arg1, arg2);
988 break;
989
990 case RSHIFT_EXPR:
991 case LSHIFT_EXPR:
992 if (wi::neg_p (arg2))
993 {
994 arg2 = -arg2;
995 if (code == RSHIFT_EXPR)
996 code = LSHIFT_EXPR;
997 else
998 code = RSHIFT_EXPR;
999 }
1000
1001 if (code == RSHIFT_EXPR)
1002 /* It's unclear from the C standard whether shifts can overflow.
1003 The following code ignores overflow; perhaps a C standard
1004 interpretation ruling is needed. */
1005 res = wi::rshift (arg1, arg2, sign);
1006 else
1007 res = wi::lshift (arg1, arg2);
1008 break;
1009
1010 case RROTATE_EXPR:
1011 case LROTATE_EXPR:
1012 if (wi::neg_p (arg2))
1013 {
1014 arg2 = -arg2;
1015 if (code == RROTATE_EXPR)
1016 code = LROTATE_EXPR;
1017 else
1018 code = RROTATE_EXPR;
1019 }
1020
1021 if (code == RROTATE_EXPR)
1022 res = wi::rrotate (arg1, arg2);
1023 else
1024 res = wi::lrotate (arg1, arg2);
1025 break;
1026
1027 case PLUS_EXPR:
1028 res = wi::add (arg1, arg2, sign, &overflow);
1029 break;
1030
1031 case MINUS_EXPR:
1032 res = wi::sub (arg1, arg2, sign, &overflow);
1033 break;
1034
1035 case MULT_EXPR:
1036 res = wi::mul (arg1, arg2, sign, &overflow);
1037 break;
1038
1039 case MULT_HIGHPART_EXPR:
1040 res = wi::mul_high (arg1, arg2, sign);
1041 break;
1042
1043 case TRUNC_DIV_EXPR:
1044 case EXACT_DIV_EXPR:
1045 if (arg2 == 0)
1046 return NULL_TREE;
1047 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1048 break;
1049
1050 case FLOOR_DIV_EXPR:
1051 if (arg2 == 0)
1052 return NULL_TREE;
1053 res = wi::div_floor (arg1, arg2, sign, &overflow);
1054 break;
1055
1056 case CEIL_DIV_EXPR:
1057 if (arg2 == 0)
1058 return NULL_TREE;
1059 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1060 break;
1061
1062 case ROUND_DIV_EXPR:
1063 if (arg2 == 0)
1064 return NULL_TREE;
1065 res = wi::div_round (arg1, arg2, sign, &overflow);
1066 break;
1067
1068 case TRUNC_MOD_EXPR:
1069 if (arg2 == 0)
1070 return NULL_TREE;
1071 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1072 break;
1073
1074 case FLOOR_MOD_EXPR:
1075 if (arg2 == 0)
1076 return NULL_TREE;
1077 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1078 break;
1079
1080 case CEIL_MOD_EXPR:
1081 if (arg2 == 0)
1082 return NULL_TREE;
1083 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1084 break;
1085
1086 case ROUND_MOD_EXPR:
1087 if (arg2 == 0)
1088 return NULL_TREE;
1089 res = wi::mod_round (arg1, arg2, sign, &overflow);
1090 break;
1091
1092 case MIN_EXPR:
1093 res = wi::min (arg1, arg2, sign);
1094 break;
1095
1096 case MAX_EXPR:
1097 res = wi::max (arg1, arg2, sign);
1098 break;
1099
1100 default:
1101 return NULL_TREE;
1102 }
1103
1104 t = force_fit_type (type, res, overflowable,
1105 (((sign == SIGNED || overflowable == -1)
1106 && overflow)
1107 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1108
1109 return t;
1110 }
1111
1112 tree
1113 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1114 {
1115 return int_const_binop_1 (code, arg1, arg2, 1);
1116 }
1117
1118 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1119 constant. We assume ARG1 and ARG2 have the same data type, or at least
1120 are the same kind of constant and the same machine mode. Return zero if
1121 combining the constants is not allowed in the current operating mode. */
1122
1123 static tree
1124 const_binop (enum tree_code code, tree arg1, tree arg2)
1125 {
1126 /* Sanity check for the recursive cases. */
1127 if (!arg1 || !arg2)
1128 return NULL_TREE;
1129
1130 STRIP_NOPS (arg1);
1131 STRIP_NOPS (arg2);
1132
1133 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1134 {
1135 if (code == POINTER_PLUS_EXPR)
1136 return int_const_binop (PLUS_EXPR,
1137 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1138
1139 return int_const_binop (code, arg1, arg2);
1140 }
1141
1142 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1143 {
1144 machine_mode mode;
1145 REAL_VALUE_TYPE d1;
1146 REAL_VALUE_TYPE d2;
1147 REAL_VALUE_TYPE value;
1148 REAL_VALUE_TYPE result;
1149 bool inexact;
1150 tree t, type;
1151
1152 /* The following codes are handled by real_arithmetic. */
1153 switch (code)
1154 {
1155 case PLUS_EXPR:
1156 case MINUS_EXPR:
1157 case MULT_EXPR:
1158 case RDIV_EXPR:
1159 case MIN_EXPR:
1160 case MAX_EXPR:
1161 break;
1162
1163 default:
1164 return NULL_TREE;
1165 }
1166
1167 d1 = TREE_REAL_CST (arg1);
1168 d2 = TREE_REAL_CST (arg2);
1169
1170 type = TREE_TYPE (arg1);
1171 mode = TYPE_MODE (type);
1172
1173 /* Don't perform operation if we honor signaling NaNs and
1174 either operand is a NaN. */
1175 if (HONOR_SNANS (mode)
1176 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1177 return NULL_TREE;
1178
1179 /* Don't perform operation if it would raise a division
1180 by zero exception. */
1181 if (code == RDIV_EXPR
1182 && real_equal (&d2, &dconst0)
1183 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1184 return NULL_TREE;
1185
1186 /* If either operand is a NaN, just return it. Otherwise, set up
1187 for floating-point trap; we return an overflow. */
1188 if (REAL_VALUE_ISNAN (d1))
1189 return arg1;
1190 else if (REAL_VALUE_ISNAN (d2))
1191 return arg2;
1192
1193 inexact = real_arithmetic (&value, code, &d1, &d2);
1194 real_convert (&result, mode, &value);
1195
1196 /* Don't constant fold this floating point operation if
1197 the result has overflowed and flag_trapping_math. */
1198 if (flag_trapping_math
1199 && MODE_HAS_INFINITIES (mode)
1200 && REAL_VALUE_ISINF (result)
1201 && !REAL_VALUE_ISINF (d1)
1202 && !REAL_VALUE_ISINF (d2))
1203 return NULL_TREE;
1204
1205 /* Don't constant fold this floating point operation if the
1206 result may dependent upon the run-time rounding mode and
1207 flag_rounding_math is set, or if GCC's software emulation
1208 is unable to accurately represent the result. */
1209 if ((flag_rounding_math
1210 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1211 && (inexact || !real_identical (&result, &value)))
1212 return NULL_TREE;
1213
1214 t = build_real (type, result);
1215
1216 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1217 return t;
1218 }
1219
1220 if (TREE_CODE (arg1) == FIXED_CST)
1221 {
1222 FIXED_VALUE_TYPE f1;
1223 FIXED_VALUE_TYPE f2;
1224 FIXED_VALUE_TYPE result;
1225 tree t, type;
1226 int sat_p;
1227 bool overflow_p;
1228
1229 /* The following codes are handled by fixed_arithmetic. */
1230 switch (code)
1231 {
1232 case PLUS_EXPR:
1233 case MINUS_EXPR:
1234 case MULT_EXPR:
1235 case TRUNC_DIV_EXPR:
1236 if (TREE_CODE (arg2) != FIXED_CST)
1237 return NULL_TREE;
1238 f2 = TREE_FIXED_CST (arg2);
1239 break;
1240
1241 case LSHIFT_EXPR:
1242 case RSHIFT_EXPR:
1243 {
1244 if (TREE_CODE (arg2) != INTEGER_CST)
1245 return NULL_TREE;
1246 wide_int w2 = arg2;
1247 f2.data.high = w2.elt (1);
1248 f2.data.low = w2.elt (0);
1249 f2.mode = SImode;
1250 }
1251 break;
1252
1253 default:
1254 return NULL_TREE;
1255 }
1256
1257 f1 = TREE_FIXED_CST (arg1);
1258 type = TREE_TYPE (arg1);
1259 sat_p = TYPE_SATURATING (type);
1260 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1261 t = build_fixed (type, result);
1262 /* Propagate overflow flags. */
1263 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1264 TREE_OVERFLOW (t) = 1;
1265 return t;
1266 }
1267
1268 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1269 {
1270 tree type = TREE_TYPE (arg1);
1271 tree r1 = TREE_REALPART (arg1);
1272 tree i1 = TREE_IMAGPART (arg1);
1273 tree r2 = TREE_REALPART (arg2);
1274 tree i2 = TREE_IMAGPART (arg2);
1275 tree real, imag;
1276
1277 switch (code)
1278 {
1279 case PLUS_EXPR:
1280 case MINUS_EXPR:
1281 real = const_binop (code, r1, r2);
1282 imag = const_binop (code, i1, i2);
1283 break;
1284
1285 case MULT_EXPR:
1286 if (COMPLEX_FLOAT_TYPE_P (type))
1287 return do_mpc_arg2 (arg1, arg2, type,
1288 /* do_nonfinite= */ folding_initializer,
1289 mpc_mul);
1290
1291 real = const_binop (MINUS_EXPR,
1292 const_binop (MULT_EXPR, r1, r2),
1293 const_binop (MULT_EXPR, i1, i2));
1294 imag = const_binop (PLUS_EXPR,
1295 const_binop (MULT_EXPR, r1, i2),
1296 const_binop (MULT_EXPR, i1, r2));
1297 break;
1298
1299 case RDIV_EXPR:
1300 if (COMPLEX_FLOAT_TYPE_P (type))
1301 return do_mpc_arg2 (arg1, arg2, type,
1302 /* do_nonfinite= */ folding_initializer,
1303 mpc_div);
1304 /* Fallthru ... */
1305 case TRUNC_DIV_EXPR:
1306 case CEIL_DIV_EXPR:
1307 case FLOOR_DIV_EXPR:
1308 case ROUND_DIV_EXPR:
1309 if (flag_complex_method == 0)
1310 {
1311 /* Keep this algorithm in sync with
1312 tree-complex.c:expand_complex_div_straight().
1313
1314 Expand complex division to scalars, straightforward algorithm.
1315 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1316 t = br*br + bi*bi
1317 */
1318 tree magsquared
1319 = const_binop (PLUS_EXPR,
1320 const_binop (MULT_EXPR, r2, r2),
1321 const_binop (MULT_EXPR, i2, i2));
1322 tree t1
1323 = const_binop (PLUS_EXPR,
1324 const_binop (MULT_EXPR, r1, r2),
1325 const_binop (MULT_EXPR, i1, i2));
1326 tree t2
1327 = const_binop (MINUS_EXPR,
1328 const_binop (MULT_EXPR, i1, r2),
1329 const_binop (MULT_EXPR, r1, i2));
1330
1331 real = const_binop (code, t1, magsquared);
1332 imag = const_binop (code, t2, magsquared);
1333 }
1334 else
1335 {
1336 /* Keep this algorithm in sync with
1337 tree-complex.c:expand_complex_div_wide().
1338
1339 Expand complex division to scalars, modified algorithm to minimize
1340 overflow with wide input ranges. */
1341 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1342 fold_abs_const (r2, TREE_TYPE (type)),
1343 fold_abs_const (i2, TREE_TYPE (type)));
1344
1345 if (integer_nonzerop (compare))
1346 {
1347 /* In the TRUE branch, we compute
1348 ratio = br/bi;
1349 div = (br * ratio) + bi;
1350 tr = (ar * ratio) + ai;
1351 ti = (ai * ratio) - ar;
1352 tr = tr / div;
1353 ti = ti / div; */
1354 tree ratio = const_binop (code, r2, i2);
1355 tree div = const_binop (PLUS_EXPR, i2,
1356 const_binop (MULT_EXPR, r2, ratio));
1357 real = const_binop (MULT_EXPR, r1, ratio);
1358 real = const_binop (PLUS_EXPR, real, i1);
1359 real = const_binop (code, real, div);
1360
1361 imag = const_binop (MULT_EXPR, i1, ratio);
1362 imag = const_binop (MINUS_EXPR, imag, r1);
1363 imag = const_binop (code, imag, div);
1364 }
1365 else
1366 {
1367 /* In the FALSE branch, we compute
1368 ratio = d/c;
1369 divisor = (d * ratio) + c;
1370 tr = (b * ratio) + a;
1371 ti = b - (a * ratio);
1372 tr = tr / div;
1373 ti = ti / div; */
1374 tree ratio = const_binop (code, i2, r2);
1375 tree div = const_binop (PLUS_EXPR, r2,
1376 const_binop (MULT_EXPR, i2, ratio));
1377
1378 real = const_binop (MULT_EXPR, i1, ratio);
1379 real = const_binop (PLUS_EXPR, real, r1);
1380 real = const_binop (code, real, div);
1381
1382 imag = const_binop (MULT_EXPR, r1, ratio);
1383 imag = const_binop (MINUS_EXPR, i1, imag);
1384 imag = const_binop (code, imag, div);
1385 }
1386 }
1387 break;
1388
1389 default:
1390 return NULL_TREE;
1391 }
1392
1393 if (real && imag)
1394 return build_complex (type, real, imag);
1395 }
1396
1397 if (TREE_CODE (arg1) == VECTOR_CST
1398 && TREE_CODE (arg2) == VECTOR_CST)
1399 {
1400 tree type = TREE_TYPE (arg1);
1401 int count = TYPE_VECTOR_SUBPARTS (type), i;
1402 tree *elts = XALLOCAVEC (tree, count);
1403
1404 for (i = 0; i < count; i++)
1405 {
1406 tree elem1 = VECTOR_CST_ELT (arg1, i);
1407 tree elem2 = VECTOR_CST_ELT (arg2, i);
1408
1409 elts[i] = const_binop (code, elem1, elem2);
1410
1411 /* It is possible that const_binop cannot handle the given
1412 code and return NULL_TREE */
1413 if (elts[i] == NULL_TREE)
1414 return NULL_TREE;
1415 }
1416
1417 return build_vector (type, elts);
1418 }
1419
1420 /* Shifts allow a scalar offset for a vector. */
1421 if (TREE_CODE (arg1) == VECTOR_CST
1422 && TREE_CODE (arg2) == INTEGER_CST)
1423 {
1424 tree type = TREE_TYPE (arg1);
1425 int count = TYPE_VECTOR_SUBPARTS (type), i;
1426 tree *elts = XALLOCAVEC (tree, count);
1427
1428 for (i = 0; i < count; i++)
1429 {
1430 tree elem1 = VECTOR_CST_ELT (arg1, i);
1431
1432 elts[i] = const_binop (code, elem1, arg2);
1433
1434 /* It is possible that const_binop cannot handle the given
1435 code and return NULL_TREE. */
1436 if (elts[i] == NULL_TREE)
1437 return NULL_TREE;
1438 }
1439
1440 return build_vector (type, elts);
1441 }
1442 return NULL_TREE;
1443 }
1444
1445 /* Overload that adds a TYPE parameter to be able to dispatch
1446 to fold_relational_const. */
1447
1448 tree
1449 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1450 {
1451 if (TREE_CODE_CLASS (code) == tcc_comparison)
1452 return fold_relational_const (code, type, arg1, arg2);
1453
1454 /* ??? Until we make the const_binop worker take the type of the
1455 result as argument put those cases that need it here. */
1456 switch (code)
1457 {
1458 case COMPLEX_EXPR:
1459 if ((TREE_CODE (arg1) == REAL_CST
1460 && TREE_CODE (arg2) == REAL_CST)
1461 || (TREE_CODE (arg1) == INTEGER_CST
1462 && TREE_CODE (arg2) == INTEGER_CST))
1463 return build_complex (type, arg1, arg2);
1464 return NULL_TREE;
1465
1466 case VEC_PACK_TRUNC_EXPR:
1467 case VEC_PACK_FIX_TRUNC_EXPR:
1468 {
1469 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1470 tree *elts;
1471
1472 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1473 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1474 if (TREE_CODE (arg1) != VECTOR_CST
1475 || TREE_CODE (arg2) != VECTOR_CST)
1476 return NULL_TREE;
1477
1478 elts = XALLOCAVEC (tree, nelts);
1479 if (!vec_cst_ctor_to_array (arg1, elts)
1480 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1481 return NULL_TREE;
1482
1483 for (i = 0; i < nelts; i++)
1484 {
1485 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1486 ? NOP_EXPR : FIX_TRUNC_EXPR,
1487 TREE_TYPE (type), elts[i]);
1488 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1489 return NULL_TREE;
1490 }
1491
1492 return build_vector (type, elts);
1493 }
1494
1495 case VEC_WIDEN_MULT_LO_EXPR:
1496 case VEC_WIDEN_MULT_HI_EXPR:
1497 case VEC_WIDEN_MULT_EVEN_EXPR:
1498 case VEC_WIDEN_MULT_ODD_EXPR:
1499 {
1500 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1501 unsigned int out, ofs, scale;
1502 tree *elts;
1503
1504 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1505 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1506 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1507 return NULL_TREE;
1508
1509 elts = XALLOCAVEC (tree, nelts * 4);
1510 if (!vec_cst_ctor_to_array (arg1, elts)
1511 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1512 return NULL_TREE;
1513
1514 if (code == VEC_WIDEN_MULT_LO_EXPR)
1515 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1516 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1517 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1518 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1519 scale = 1, ofs = 0;
1520 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1521 scale = 1, ofs = 1;
1522
1523 for (out = 0; out < nelts; out++)
1524 {
1525 unsigned int in1 = (out << scale) + ofs;
1526 unsigned int in2 = in1 + nelts * 2;
1527 tree t1, t2;
1528
1529 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1530 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1531
1532 if (t1 == NULL_TREE || t2 == NULL_TREE)
1533 return NULL_TREE;
1534 elts[out] = const_binop (MULT_EXPR, t1, t2);
1535 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1536 return NULL_TREE;
1537 }
1538
1539 return build_vector (type, elts);
1540 }
1541
1542 default:;
1543 }
1544
1545 if (TREE_CODE_CLASS (code) != tcc_binary)
1546 return NULL_TREE;
1547
1548 /* Make sure type and arg0 have the same saturating flag. */
1549 gcc_checking_assert (TYPE_SATURATING (type)
1550 == TYPE_SATURATING (TREE_TYPE (arg1)));
1551
1552 return const_binop (code, arg1, arg2);
1553 }
1554
1555 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1556 Return zero if computing the constants is not possible. */
1557
1558 tree
1559 const_unop (enum tree_code code, tree type, tree arg0)
1560 {
1561 switch (code)
1562 {
1563 CASE_CONVERT:
1564 case FLOAT_EXPR:
1565 case FIX_TRUNC_EXPR:
1566 case FIXED_CONVERT_EXPR:
1567 return fold_convert_const (code, type, arg0);
1568
1569 case ADDR_SPACE_CONVERT_EXPR:
1570 if (integer_zerop (arg0))
1571 return fold_convert_const (code, type, arg0);
1572 break;
1573
1574 case VIEW_CONVERT_EXPR:
1575 return fold_view_convert_expr (type, arg0);
1576
1577 case NEGATE_EXPR:
1578 {
1579 /* Can't call fold_negate_const directly here as that doesn't
1580 handle all cases and we might not be able to negate some
1581 constants. */
1582 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1583 if (tem && CONSTANT_CLASS_P (tem))
1584 return tem;
1585 break;
1586 }
1587
1588 case ABS_EXPR:
1589 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1590 return fold_abs_const (arg0, type);
1591 break;
1592
1593 case CONJ_EXPR:
1594 if (TREE_CODE (arg0) == COMPLEX_CST)
1595 {
1596 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1597 TREE_TYPE (type));
1598 return build_complex (type, TREE_REALPART (arg0), ipart);
1599 }
1600 break;
1601
1602 case BIT_NOT_EXPR:
1603 if (TREE_CODE (arg0) == INTEGER_CST)
1604 return fold_not_const (arg0, type);
1605 /* Perform BIT_NOT_EXPR on each element individually. */
1606 else if (TREE_CODE (arg0) == VECTOR_CST)
1607 {
1608 tree *elements;
1609 tree elem;
1610 unsigned count = VECTOR_CST_NELTS (arg0), i;
1611
1612 elements = XALLOCAVEC (tree, count);
1613 for (i = 0; i < count; i++)
1614 {
1615 elem = VECTOR_CST_ELT (arg0, i);
1616 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1617 if (elem == NULL_TREE)
1618 break;
1619 elements[i] = elem;
1620 }
1621 if (i == count)
1622 return build_vector (type, elements);
1623 }
1624 break;
1625
1626 case TRUTH_NOT_EXPR:
1627 if (TREE_CODE (arg0) == INTEGER_CST)
1628 return constant_boolean_node (integer_zerop (arg0), type);
1629 break;
1630
1631 case REALPART_EXPR:
1632 if (TREE_CODE (arg0) == COMPLEX_CST)
1633 return fold_convert (type, TREE_REALPART (arg0));
1634 break;
1635
1636 case IMAGPART_EXPR:
1637 if (TREE_CODE (arg0) == COMPLEX_CST)
1638 return fold_convert (type, TREE_IMAGPART (arg0));
1639 break;
1640
1641 case VEC_UNPACK_LO_EXPR:
1642 case VEC_UNPACK_HI_EXPR:
1643 case VEC_UNPACK_FLOAT_LO_EXPR:
1644 case VEC_UNPACK_FLOAT_HI_EXPR:
1645 {
1646 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1647 tree *elts;
1648 enum tree_code subcode;
1649
1650 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1651 if (TREE_CODE (arg0) != VECTOR_CST)
1652 return NULL_TREE;
1653
1654 elts = XALLOCAVEC (tree, nelts * 2);
1655 if (!vec_cst_ctor_to_array (arg0, elts))
1656 return NULL_TREE;
1657
1658 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1659 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1660 elts += nelts;
1661
1662 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1663 subcode = NOP_EXPR;
1664 else
1665 subcode = FLOAT_EXPR;
1666
1667 for (i = 0; i < nelts; i++)
1668 {
1669 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1670 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1671 return NULL_TREE;
1672 }
1673
1674 return build_vector (type, elts);
1675 }
1676
1677 case REDUC_MIN_EXPR:
1678 case REDUC_MAX_EXPR:
1679 case REDUC_PLUS_EXPR:
1680 {
1681 unsigned int nelts, i;
1682 tree *elts;
1683 enum tree_code subcode;
1684
1685 if (TREE_CODE (arg0) != VECTOR_CST)
1686 return NULL_TREE;
1687 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1688
1689 elts = XALLOCAVEC (tree, nelts);
1690 if (!vec_cst_ctor_to_array (arg0, elts))
1691 return NULL_TREE;
1692
1693 switch (code)
1694 {
1695 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1696 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1697 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1698 default: gcc_unreachable ();
1699 }
1700
1701 for (i = 1; i < nelts; i++)
1702 {
1703 elts[0] = const_binop (subcode, elts[0], elts[i]);
1704 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1705 return NULL_TREE;
1706 }
1707
1708 return elts[0];
1709 }
1710
1711 default:
1712 break;
1713 }
1714
1715 return NULL_TREE;
1716 }
1717
1718 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1719 indicates which particular sizetype to create. */
1720
1721 tree
1722 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1723 {
1724 return build_int_cst (sizetype_tab[(int) kind], number);
1725 }
1726 \f
1727 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1728 is a tree code. The type of the result is taken from the operands.
1729 Both must be equivalent integer types, ala int_binop_types_match_p.
1730 If the operands are constant, so is the result. */
1731
1732 tree
1733 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1734 {
1735 tree type = TREE_TYPE (arg0);
1736
1737 if (arg0 == error_mark_node || arg1 == error_mark_node)
1738 return error_mark_node;
1739
1740 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1741 TREE_TYPE (arg1)));
1742
1743 /* Handle the special case of two integer constants faster. */
1744 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1745 {
1746 /* And some specific cases even faster than that. */
1747 if (code == PLUS_EXPR)
1748 {
1749 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1750 return arg1;
1751 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1752 return arg0;
1753 }
1754 else if (code == MINUS_EXPR)
1755 {
1756 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1757 return arg0;
1758 }
1759 else if (code == MULT_EXPR)
1760 {
1761 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1762 return arg1;
1763 }
1764
1765 /* Handle general case of two integer constants. For sizetype
1766 constant calculations we always want to know about overflow,
1767 even in the unsigned case. */
1768 return int_const_binop_1 (code, arg0, arg1, -1);
1769 }
1770
1771 return fold_build2_loc (loc, code, type, arg0, arg1);
1772 }
1773
1774 /* Given two values, either both of sizetype or both of bitsizetype,
1775 compute the difference between the two values. Return the value
1776 in signed type corresponding to the type of the operands. */
1777
1778 tree
1779 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1780 {
1781 tree type = TREE_TYPE (arg0);
1782 tree ctype;
1783
1784 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1785 TREE_TYPE (arg1)));
1786
1787 /* If the type is already signed, just do the simple thing. */
1788 if (!TYPE_UNSIGNED (type))
1789 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1790
1791 if (type == sizetype)
1792 ctype = ssizetype;
1793 else if (type == bitsizetype)
1794 ctype = sbitsizetype;
1795 else
1796 ctype = signed_type_for (type);
1797
1798 /* If either operand is not a constant, do the conversions to the signed
1799 type and subtract. The hardware will do the right thing with any
1800 overflow in the subtraction. */
1801 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1802 return size_binop_loc (loc, MINUS_EXPR,
1803 fold_convert_loc (loc, ctype, arg0),
1804 fold_convert_loc (loc, ctype, arg1));
1805
1806 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1807 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1808 overflow) and negate (which can't either). Special-case a result
1809 of zero while we're here. */
1810 if (tree_int_cst_equal (arg0, arg1))
1811 return build_int_cst (ctype, 0);
1812 else if (tree_int_cst_lt (arg1, arg0))
1813 return fold_convert_loc (loc, ctype,
1814 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1815 else
1816 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1817 fold_convert_loc (loc, ctype,
1818 size_binop_loc (loc,
1819 MINUS_EXPR,
1820 arg1, arg0)));
1821 }
1822 \f
1823 /* A subroutine of fold_convert_const handling conversions of an
1824 INTEGER_CST to another integer type. */
1825
1826 static tree
1827 fold_convert_const_int_from_int (tree type, const_tree arg1)
1828 {
1829 /* Given an integer constant, make new constant with new type,
1830 appropriately sign-extended or truncated. Use widest_int
1831 so that any extension is done according ARG1's type. */
1832 return force_fit_type (type, wi::to_widest (arg1),
1833 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1834 TREE_OVERFLOW (arg1));
1835 }
1836
1837 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1838 to an integer type. */
1839
1840 static tree
1841 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1842 {
1843 bool overflow = false;
1844 tree t;
1845
1846 /* The following code implements the floating point to integer
1847 conversion rules required by the Java Language Specification,
1848 that IEEE NaNs are mapped to zero and values that overflow
1849 the target precision saturate, i.e. values greater than
1850 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1851 are mapped to INT_MIN. These semantics are allowed by the
1852 C and C++ standards that simply state that the behavior of
1853 FP-to-integer conversion is unspecified upon overflow. */
1854
1855 wide_int val;
1856 REAL_VALUE_TYPE r;
1857 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1858
1859 switch (code)
1860 {
1861 case FIX_TRUNC_EXPR:
1862 real_trunc (&r, VOIDmode, &x);
1863 break;
1864
1865 default:
1866 gcc_unreachable ();
1867 }
1868
1869 /* If R is NaN, return zero and show we have an overflow. */
1870 if (REAL_VALUE_ISNAN (r))
1871 {
1872 overflow = true;
1873 val = wi::zero (TYPE_PRECISION (type));
1874 }
1875
1876 /* See if R is less than the lower bound or greater than the
1877 upper bound. */
1878
1879 if (! overflow)
1880 {
1881 tree lt = TYPE_MIN_VALUE (type);
1882 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1883 if (real_less (&r, &l))
1884 {
1885 overflow = true;
1886 val = lt;
1887 }
1888 }
1889
1890 if (! overflow)
1891 {
1892 tree ut = TYPE_MAX_VALUE (type);
1893 if (ut)
1894 {
1895 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1896 if (real_less (&u, &r))
1897 {
1898 overflow = true;
1899 val = ut;
1900 }
1901 }
1902 }
1903
1904 if (! overflow)
1905 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1906
1907 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1908 return t;
1909 }
1910
1911 /* A subroutine of fold_convert_const handling conversions of a
1912 FIXED_CST to an integer type. */
1913
1914 static tree
1915 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1916 {
1917 tree t;
1918 double_int temp, temp_trunc;
1919 unsigned int mode;
1920
1921 /* Right shift FIXED_CST to temp by fbit. */
1922 temp = TREE_FIXED_CST (arg1).data;
1923 mode = TREE_FIXED_CST (arg1).mode;
1924 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1925 {
1926 temp = temp.rshift (GET_MODE_FBIT (mode),
1927 HOST_BITS_PER_DOUBLE_INT,
1928 SIGNED_FIXED_POINT_MODE_P (mode));
1929
1930 /* Left shift temp to temp_trunc by fbit. */
1931 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1932 HOST_BITS_PER_DOUBLE_INT,
1933 SIGNED_FIXED_POINT_MODE_P (mode));
1934 }
1935 else
1936 {
1937 temp = double_int_zero;
1938 temp_trunc = double_int_zero;
1939 }
1940
1941 /* If FIXED_CST is negative, we need to round the value toward 0.
1942 By checking if the fractional bits are not zero to add 1 to temp. */
1943 if (SIGNED_FIXED_POINT_MODE_P (mode)
1944 && temp_trunc.is_negative ()
1945 && TREE_FIXED_CST (arg1).data != temp_trunc)
1946 temp += double_int_one;
1947
1948 /* Given a fixed-point constant, make new constant with new type,
1949 appropriately sign-extended or truncated. */
1950 t = force_fit_type (type, temp, -1,
1951 (temp.is_negative ()
1952 && (TYPE_UNSIGNED (type)
1953 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1954 | TREE_OVERFLOW (arg1));
1955
1956 return t;
1957 }
1958
1959 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1960 to another floating point type. */
1961
1962 static tree
1963 fold_convert_const_real_from_real (tree type, const_tree arg1)
1964 {
1965 REAL_VALUE_TYPE value;
1966 tree t;
1967
1968 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1969 t = build_real (type, value);
1970
1971 /* If converting an infinity or NAN to a representation that doesn't
1972 have one, set the overflow bit so that we can produce some kind of
1973 error message at the appropriate point if necessary. It's not the
1974 most user-friendly message, but it's better than nothing. */
1975 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1976 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1977 TREE_OVERFLOW (t) = 1;
1978 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1979 && !MODE_HAS_NANS (TYPE_MODE (type)))
1980 TREE_OVERFLOW (t) = 1;
1981 /* Regular overflow, conversion produced an infinity in a mode that
1982 can't represent them. */
1983 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1984 && REAL_VALUE_ISINF (value)
1985 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1986 TREE_OVERFLOW (t) = 1;
1987 else
1988 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1989 return t;
1990 }
1991
1992 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1993 to a floating point type. */
1994
1995 static tree
1996 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1997 {
1998 REAL_VALUE_TYPE value;
1999 tree t;
2000
2001 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2002 t = build_real (type, value);
2003
2004 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2005 return t;
2006 }
2007
2008 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2009 to another fixed-point type. */
2010
2011 static tree
2012 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2013 {
2014 FIXED_VALUE_TYPE value;
2015 tree t;
2016 bool overflow_p;
2017
2018 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2019 TYPE_SATURATING (type));
2020 t = build_fixed (type, value);
2021
2022 /* Propagate overflow flags. */
2023 if (overflow_p | TREE_OVERFLOW (arg1))
2024 TREE_OVERFLOW (t) = 1;
2025 return t;
2026 }
2027
2028 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2029 to a fixed-point type. */
2030
2031 static tree
2032 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2033 {
2034 FIXED_VALUE_TYPE value;
2035 tree t;
2036 bool overflow_p;
2037 double_int di;
2038
2039 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2040
2041 di.low = TREE_INT_CST_ELT (arg1, 0);
2042 if (TREE_INT_CST_NUNITS (arg1) == 1)
2043 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2044 else
2045 di.high = TREE_INT_CST_ELT (arg1, 1);
2046
2047 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2048 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2049 TYPE_SATURATING (type));
2050 t = build_fixed (type, value);
2051
2052 /* Propagate overflow flags. */
2053 if (overflow_p | TREE_OVERFLOW (arg1))
2054 TREE_OVERFLOW (t) = 1;
2055 return t;
2056 }
2057
2058 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2059 to a fixed-point type. */
2060
2061 static tree
2062 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2063 {
2064 FIXED_VALUE_TYPE value;
2065 tree t;
2066 bool overflow_p;
2067
2068 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2069 &TREE_REAL_CST (arg1),
2070 TYPE_SATURATING (type));
2071 t = build_fixed (type, value);
2072
2073 /* Propagate overflow flags. */
2074 if (overflow_p | TREE_OVERFLOW (arg1))
2075 TREE_OVERFLOW (t) = 1;
2076 return t;
2077 }
2078
2079 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2080 type TYPE. If no simplification can be done return NULL_TREE. */
2081
2082 static tree
2083 fold_convert_const (enum tree_code code, tree type, tree arg1)
2084 {
2085 if (TREE_TYPE (arg1) == type)
2086 return arg1;
2087
2088 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2089 || TREE_CODE (type) == OFFSET_TYPE)
2090 {
2091 if (TREE_CODE (arg1) == INTEGER_CST)
2092 return fold_convert_const_int_from_int (type, arg1);
2093 else if (TREE_CODE (arg1) == REAL_CST)
2094 return fold_convert_const_int_from_real (code, type, arg1);
2095 else if (TREE_CODE (arg1) == FIXED_CST)
2096 return fold_convert_const_int_from_fixed (type, arg1);
2097 }
2098 else if (TREE_CODE (type) == REAL_TYPE)
2099 {
2100 if (TREE_CODE (arg1) == INTEGER_CST)
2101 return build_real_from_int_cst (type, arg1);
2102 else if (TREE_CODE (arg1) == REAL_CST)
2103 return fold_convert_const_real_from_real (type, arg1);
2104 else if (TREE_CODE (arg1) == FIXED_CST)
2105 return fold_convert_const_real_from_fixed (type, arg1);
2106 }
2107 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2108 {
2109 if (TREE_CODE (arg1) == FIXED_CST)
2110 return fold_convert_const_fixed_from_fixed (type, arg1);
2111 else if (TREE_CODE (arg1) == INTEGER_CST)
2112 return fold_convert_const_fixed_from_int (type, arg1);
2113 else if (TREE_CODE (arg1) == REAL_CST)
2114 return fold_convert_const_fixed_from_real (type, arg1);
2115 }
2116 return NULL_TREE;
2117 }
2118
2119 /* Construct a vector of zero elements of vector type TYPE. */
2120
2121 static tree
2122 build_zero_vector (tree type)
2123 {
2124 tree t;
2125
2126 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2127 return build_vector_from_val (type, t);
2128 }
2129
2130 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2131
2132 bool
2133 fold_convertible_p (const_tree type, const_tree arg)
2134 {
2135 tree orig = TREE_TYPE (arg);
2136
2137 if (type == orig)
2138 return true;
2139
2140 if (TREE_CODE (arg) == ERROR_MARK
2141 || TREE_CODE (type) == ERROR_MARK
2142 || TREE_CODE (orig) == ERROR_MARK)
2143 return false;
2144
2145 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2146 return true;
2147
2148 switch (TREE_CODE (type))
2149 {
2150 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2151 case POINTER_TYPE: case REFERENCE_TYPE:
2152 case OFFSET_TYPE:
2153 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2154 || TREE_CODE (orig) == OFFSET_TYPE)
2155 return true;
2156 return (TREE_CODE (orig) == VECTOR_TYPE
2157 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2158
2159 case REAL_TYPE:
2160 case FIXED_POINT_TYPE:
2161 case COMPLEX_TYPE:
2162 case VECTOR_TYPE:
2163 case VOID_TYPE:
2164 return TREE_CODE (type) == TREE_CODE (orig);
2165
2166 default:
2167 return false;
2168 }
2169 }
2170
2171 /* Convert expression ARG to type TYPE. Used by the middle-end for
2172 simple conversions in preference to calling the front-end's convert. */
2173
2174 tree
2175 fold_convert_loc (location_t loc, tree type, tree arg)
2176 {
2177 tree orig = TREE_TYPE (arg);
2178 tree tem;
2179
2180 if (type == orig)
2181 return arg;
2182
2183 if (TREE_CODE (arg) == ERROR_MARK
2184 || TREE_CODE (type) == ERROR_MARK
2185 || TREE_CODE (orig) == ERROR_MARK)
2186 return error_mark_node;
2187
2188 switch (TREE_CODE (type))
2189 {
2190 case POINTER_TYPE:
2191 case REFERENCE_TYPE:
2192 /* Handle conversions between pointers to different address spaces. */
2193 if (POINTER_TYPE_P (orig)
2194 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2195 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2196 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2197 /* fall through */
2198
2199 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2200 case OFFSET_TYPE:
2201 if (TREE_CODE (arg) == INTEGER_CST)
2202 {
2203 tem = fold_convert_const (NOP_EXPR, type, arg);
2204 if (tem != NULL_TREE)
2205 return tem;
2206 }
2207 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2208 || TREE_CODE (orig) == OFFSET_TYPE)
2209 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2210 if (TREE_CODE (orig) == COMPLEX_TYPE)
2211 return fold_convert_loc (loc, type,
2212 fold_build1_loc (loc, REALPART_EXPR,
2213 TREE_TYPE (orig), arg));
2214 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2215 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2216 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2217
2218 case REAL_TYPE:
2219 if (TREE_CODE (arg) == INTEGER_CST)
2220 {
2221 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2222 if (tem != NULL_TREE)
2223 return tem;
2224 }
2225 else if (TREE_CODE (arg) == REAL_CST)
2226 {
2227 tem = fold_convert_const (NOP_EXPR, type, arg);
2228 if (tem != NULL_TREE)
2229 return tem;
2230 }
2231 else if (TREE_CODE (arg) == FIXED_CST)
2232 {
2233 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2234 if (tem != NULL_TREE)
2235 return tem;
2236 }
2237
2238 switch (TREE_CODE (orig))
2239 {
2240 case INTEGER_TYPE:
2241 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2242 case POINTER_TYPE: case REFERENCE_TYPE:
2243 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2244
2245 case REAL_TYPE:
2246 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2247
2248 case FIXED_POINT_TYPE:
2249 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2250
2251 case COMPLEX_TYPE:
2252 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2253 return fold_convert_loc (loc, type, tem);
2254
2255 default:
2256 gcc_unreachable ();
2257 }
2258
2259 case FIXED_POINT_TYPE:
2260 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2261 || TREE_CODE (arg) == REAL_CST)
2262 {
2263 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2264 if (tem != NULL_TREE)
2265 goto fold_convert_exit;
2266 }
2267
2268 switch (TREE_CODE (orig))
2269 {
2270 case FIXED_POINT_TYPE:
2271 case INTEGER_TYPE:
2272 case ENUMERAL_TYPE:
2273 case BOOLEAN_TYPE:
2274 case REAL_TYPE:
2275 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2276
2277 case COMPLEX_TYPE:
2278 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2279 return fold_convert_loc (loc, type, tem);
2280
2281 default:
2282 gcc_unreachable ();
2283 }
2284
2285 case COMPLEX_TYPE:
2286 switch (TREE_CODE (orig))
2287 {
2288 case INTEGER_TYPE:
2289 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2290 case POINTER_TYPE: case REFERENCE_TYPE:
2291 case REAL_TYPE:
2292 case FIXED_POINT_TYPE:
2293 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2294 fold_convert_loc (loc, TREE_TYPE (type), arg),
2295 fold_convert_loc (loc, TREE_TYPE (type),
2296 integer_zero_node));
2297 case COMPLEX_TYPE:
2298 {
2299 tree rpart, ipart;
2300
2301 if (TREE_CODE (arg) == COMPLEX_EXPR)
2302 {
2303 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2304 TREE_OPERAND (arg, 0));
2305 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2306 TREE_OPERAND (arg, 1));
2307 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2308 }
2309
2310 arg = save_expr (arg);
2311 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2312 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2313 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2314 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2315 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2316 }
2317
2318 default:
2319 gcc_unreachable ();
2320 }
2321
2322 case VECTOR_TYPE:
2323 if (integer_zerop (arg))
2324 return build_zero_vector (type);
2325 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2326 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2327 || TREE_CODE (orig) == VECTOR_TYPE);
2328 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2329
2330 case VOID_TYPE:
2331 tem = fold_ignored_result (arg);
2332 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2333
2334 default:
2335 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2336 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2337 gcc_unreachable ();
2338 }
2339 fold_convert_exit:
2340 protected_set_expr_location_unshare (tem, loc);
2341 return tem;
2342 }
2343 \f
2344 /* Return false if expr can be assumed not to be an lvalue, true
2345 otherwise. */
2346
2347 static bool
2348 maybe_lvalue_p (const_tree x)
2349 {
2350 /* We only need to wrap lvalue tree codes. */
2351 switch (TREE_CODE (x))
2352 {
2353 case VAR_DECL:
2354 case PARM_DECL:
2355 case RESULT_DECL:
2356 case LABEL_DECL:
2357 case FUNCTION_DECL:
2358 case SSA_NAME:
2359
2360 case COMPONENT_REF:
2361 case MEM_REF:
2362 case INDIRECT_REF:
2363 case ARRAY_REF:
2364 case ARRAY_RANGE_REF:
2365 case BIT_FIELD_REF:
2366 case OBJ_TYPE_REF:
2367
2368 case REALPART_EXPR:
2369 case IMAGPART_EXPR:
2370 case PREINCREMENT_EXPR:
2371 case PREDECREMENT_EXPR:
2372 case SAVE_EXPR:
2373 case TRY_CATCH_EXPR:
2374 case WITH_CLEANUP_EXPR:
2375 case COMPOUND_EXPR:
2376 case MODIFY_EXPR:
2377 case TARGET_EXPR:
2378 case COND_EXPR:
2379 case BIND_EXPR:
2380 break;
2381
2382 default:
2383 /* Assume the worst for front-end tree codes. */
2384 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2385 break;
2386 return false;
2387 }
2388
2389 return true;
2390 }
2391
2392 /* Return an expr equal to X but certainly not valid as an lvalue. */
2393
2394 tree
2395 non_lvalue_loc (location_t loc, tree x)
2396 {
2397 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2398 us. */
2399 if (in_gimple_form)
2400 return x;
2401
2402 if (! maybe_lvalue_p (x))
2403 return x;
2404 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2405 }
2406
2407 /* When pedantic, return an expr equal to X but certainly not valid as a
2408 pedantic lvalue. Otherwise, return X. */
2409
2410 static tree
2411 pedantic_non_lvalue_loc (location_t loc, tree x)
2412 {
2413 return protected_set_expr_location_unshare (x, loc);
2414 }
2415 \f
2416 /* Given a tree comparison code, return the code that is the logical inverse.
2417 It is generally not safe to do this for floating-point comparisons, except
2418 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2419 ERROR_MARK in this case. */
2420
2421 enum tree_code
2422 invert_tree_comparison (enum tree_code code, bool honor_nans)
2423 {
2424 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2425 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2426 return ERROR_MARK;
2427
2428 switch (code)
2429 {
2430 case EQ_EXPR:
2431 return NE_EXPR;
2432 case NE_EXPR:
2433 return EQ_EXPR;
2434 case GT_EXPR:
2435 return honor_nans ? UNLE_EXPR : LE_EXPR;
2436 case GE_EXPR:
2437 return honor_nans ? UNLT_EXPR : LT_EXPR;
2438 case LT_EXPR:
2439 return honor_nans ? UNGE_EXPR : GE_EXPR;
2440 case LE_EXPR:
2441 return honor_nans ? UNGT_EXPR : GT_EXPR;
2442 case LTGT_EXPR:
2443 return UNEQ_EXPR;
2444 case UNEQ_EXPR:
2445 return LTGT_EXPR;
2446 case UNGT_EXPR:
2447 return LE_EXPR;
2448 case UNGE_EXPR:
2449 return LT_EXPR;
2450 case UNLT_EXPR:
2451 return GE_EXPR;
2452 case UNLE_EXPR:
2453 return GT_EXPR;
2454 case ORDERED_EXPR:
2455 return UNORDERED_EXPR;
2456 case UNORDERED_EXPR:
2457 return ORDERED_EXPR;
2458 default:
2459 gcc_unreachable ();
2460 }
2461 }
2462
2463 /* Similar, but return the comparison that results if the operands are
2464 swapped. This is safe for floating-point. */
2465
2466 enum tree_code
2467 swap_tree_comparison (enum tree_code code)
2468 {
2469 switch (code)
2470 {
2471 case EQ_EXPR:
2472 case NE_EXPR:
2473 case ORDERED_EXPR:
2474 case UNORDERED_EXPR:
2475 case LTGT_EXPR:
2476 case UNEQ_EXPR:
2477 return code;
2478 case GT_EXPR:
2479 return LT_EXPR;
2480 case GE_EXPR:
2481 return LE_EXPR;
2482 case LT_EXPR:
2483 return GT_EXPR;
2484 case LE_EXPR:
2485 return GE_EXPR;
2486 case UNGT_EXPR:
2487 return UNLT_EXPR;
2488 case UNGE_EXPR:
2489 return UNLE_EXPR;
2490 case UNLT_EXPR:
2491 return UNGT_EXPR;
2492 case UNLE_EXPR:
2493 return UNGE_EXPR;
2494 default:
2495 gcc_unreachable ();
2496 }
2497 }
2498
2499
2500 /* Convert a comparison tree code from an enum tree_code representation
2501 into a compcode bit-based encoding. This function is the inverse of
2502 compcode_to_comparison. */
2503
2504 static enum comparison_code
2505 comparison_to_compcode (enum tree_code code)
2506 {
2507 switch (code)
2508 {
2509 case LT_EXPR:
2510 return COMPCODE_LT;
2511 case EQ_EXPR:
2512 return COMPCODE_EQ;
2513 case LE_EXPR:
2514 return COMPCODE_LE;
2515 case GT_EXPR:
2516 return COMPCODE_GT;
2517 case NE_EXPR:
2518 return COMPCODE_NE;
2519 case GE_EXPR:
2520 return COMPCODE_GE;
2521 case ORDERED_EXPR:
2522 return COMPCODE_ORD;
2523 case UNORDERED_EXPR:
2524 return COMPCODE_UNORD;
2525 case UNLT_EXPR:
2526 return COMPCODE_UNLT;
2527 case UNEQ_EXPR:
2528 return COMPCODE_UNEQ;
2529 case UNLE_EXPR:
2530 return COMPCODE_UNLE;
2531 case UNGT_EXPR:
2532 return COMPCODE_UNGT;
2533 case LTGT_EXPR:
2534 return COMPCODE_LTGT;
2535 case UNGE_EXPR:
2536 return COMPCODE_UNGE;
2537 default:
2538 gcc_unreachable ();
2539 }
2540 }
2541
2542 /* Convert a compcode bit-based encoding of a comparison operator back
2543 to GCC's enum tree_code representation. This function is the
2544 inverse of comparison_to_compcode. */
2545
2546 static enum tree_code
2547 compcode_to_comparison (enum comparison_code code)
2548 {
2549 switch (code)
2550 {
2551 case COMPCODE_LT:
2552 return LT_EXPR;
2553 case COMPCODE_EQ:
2554 return EQ_EXPR;
2555 case COMPCODE_LE:
2556 return LE_EXPR;
2557 case COMPCODE_GT:
2558 return GT_EXPR;
2559 case COMPCODE_NE:
2560 return NE_EXPR;
2561 case COMPCODE_GE:
2562 return GE_EXPR;
2563 case COMPCODE_ORD:
2564 return ORDERED_EXPR;
2565 case COMPCODE_UNORD:
2566 return UNORDERED_EXPR;
2567 case COMPCODE_UNLT:
2568 return UNLT_EXPR;
2569 case COMPCODE_UNEQ:
2570 return UNEQ_EXPR;
2571 case COMPCODE_UNLE:
2572 return UNLE_EXPR;
2573 case COMPCODE_UNGT:
2574 return UNGT_EXPR;
2575 case COMPCODE_LTGT:
2576 return LTGT_EXPR;
2577 case COMPCODE_UNGE:
2578 return UNGE_EXPR;
2579 default:
2580 gcc_unreachable ();
2581 }
2582 }
2583
2584 /* Return a tree for the comparison which is the combination of
2585 doing the AND or OR (depending on CODE) of the two operations LCODE
2586 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2587 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2588 if this makes the transformation invalid. */
2589
2590 tree
2591 combine_comparisons (location_t loc,
2592 enum tree_code code, enum tree_code lcode,
2593 enum tree_code rcode, tree truth_type,
2594 tree ll_arg, tree lr_arg)
2595 {
2596 bool honor_nans = HONOR_NANS (ll_arg);
2597 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2598 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2599 int compcode;
2600
2601 switch (code)
2602 {
2603 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2604 compcode = lcompcode & rcompcode;
2605 break;
2606
2607 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2608 compcode = lcompcode | rcompcode;
2609 break;
2610
2611 default:
2612 return NULL_TREE;
2613 }
2614
2615 if (!honor_nans)
2616 {
2617 /* Eliminate unordered comparisons, as well as LTGT and ORD
2618 which are not used unless the mode has NaNs. */
2619 compcode &= ~COMPCODE_UNORD;
2620 if (compcode == COMPCODE_LTGT)
2621 compcode = COMPCODE_NE;
2622 else if (compcode == COMPCODE_ORD)
2623 compcode = COMPCODE_TRUE;
2624 }
2625 else if (flag_trapping_math)
2626 {
2627 /* Check that the original operation and the optimized ones will trap
2628 under the same condition. */
2629 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2630 && (lcompcode != COMPCODE_EQ)
2631 && (lcompcode != COMPCODE_ORD);
2632 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2633 && (rcompcode != COMPCODE_EQ)
2634 && (rcompcode != COMPCODE_ORD);
2635 bool trap = (compcode & COMPCODE_UNORD) == 0
2636 && (compcode != COMPCODE_EQ)
2637 && (compcode != COMPCODE_ORD);
2638
2639 /* In a short-circuited boolean expression the LHS might be
2640 such that the RHS, if evaluated, will never trap. For
2641 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2642 if neither x nor y is NaN. (This is a mixed blessing: for
2643 example, the expression above will never trap, hence
2644 optimizing it to x < y would be invalid). */
2645 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2646 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2647 rtrap = false;
2648
2649 /* If the comparison was short-circuited, and only the RHS
2650 trapped, we may now generate a spurious trap. */
2651 if (rtrap && !ltrap
2652 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2653 return NULL_TREE;
2654
2655 /* If we changed the conditions that cause a trap, we lose. */
2656 if ((ltrap || rtrap) != trap)
2657 return NULL_TREE;
2658 }
2659
2660 if (compcode == COMPCODE_TRUE)
2661 return constant_boolean_node (true, truth_type);
2662 else if (compcode == COMPCODE_FALSE)
2663 return constant_boolean_node (false, truth_type);
2664 else
2665 {
2666 enum tree_code tcode;
2667
2668 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2669 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2670 }
2671 }
2672 \f
2673 /* Return nonzero if two operands (typically of the same tree node)
2674 are necessarily equal. If either argument has side-effects this
2675 function returns zero. FLAGS modifies behavior as follows:
2676
2677 If OEP_ONLY_CONST is set, only return nonzero for constants.
2678 This function tests whether the operands are indistinguishable;
2679 it does not test whether they are equal using C's == operation.
2680 The distinction is important for IEEE floating point, because
2681 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2682 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2683
2684 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2685 even though it may hold multiple values during a function.
2686 This is because a GCC tree node guarantees that nothing else is
2687 executed between the evaluation of its "operands" (which may often
2688 be evaluated in arbitrary order). Hence if the operands themselves
2689 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2690 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2691 unset means assuming isochronic (or instantaneous) tree equivalence.
2692 Unless comparing arbitrary expression trees, such as from different
2693 statements, this flag can usually be left unset.
2694
2695 If OEP_PURE_SAME is set, then pure functions with identical arguments
2696 are considered the same. It is used when the caller has other ways
2697 to ensure that global memory is unchanged in between.
2698
2699 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2700 not values of expressions. OEP_CONSTANT_ADDRESS_OF in addition to
2701 OEP_ADDRESS_OF is used for ADDR_EXPR with TREE_CONSTANT flag set and we
2702 further ignore any side effects on SAVE_EXPRs then. */
2703
2704 int
2705 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2706 {
2707 /* If either is ERROR_MARK, they aren't equal. */
2708 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2709 || TREE_TYPE (arg0) == error_mark_node
2710 || TREE_TYPE (arg1) == error_mark_node)
2711 return 0;
2712
2713 /* Similar, if either does not have a type (like a released SSA name),
2714 they aren't equal. */
2715 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2716 return 0;
2717
2718 /* Check equality of integer constants before bailing out due to
2719 precision differences. */
2720 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2721 {
2722 /* Address of INTEGER_CST is not defined; check that we did not forget
2723 to drop the OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2724 gcc_checking_assert (!(flags
2725 & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF)));
2726 return tree_int_cst_equal (arg0, arg1);
2727 }
2728
2729 if (!(flags & OEP_ADDRESS_OF))
2730 {
2731 /* If both types don't have the same signedness, then we can't consider
2732 them equal. We must check this before the STRIP_NOPS calls
2733 because they may change the signedness of the arguments. As pointers
2734 strictly don't have a signedness, require either two pointers or
2735 two non-pointers as well. */
2736 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2737 || POINTER_TYPE_P (TREE_TYPE (arg0))
2738 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2739 return 0;
2740
2741 /* We cannot consider pointers to different address space equal. */
2742 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2743 && POINTER_TYPE_P (TREE_TYPE (arg1))
2744 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2745 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2746 return 0;
2747
2748 /* If both types don't have the same precision, then it is not safe
2749 to strip NOPs. */
2750 if (element_precision (TREE_TYPE (arg0))
2751 != element_precision (TREE_TYPE (arg1)))
2752 return 0;
2753
2754 STRIP_NOPS (arg0);
2755 STRIP_NOPS (arg1);
2756 }
2757 #if 0
2758 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2759 sanity check once the issue is solved. */
2760 else
2761 /* Addresses of conversions and SSA_NAMEs (and many other things)
2762 are not defined. Check that we did not forget to drop the
2763 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2764 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2765 && TREE_CODE (arg0) != SSA_NAME);
2766 #endif
2767
2768 /* In case both args are comparisons but with different comparison
2769 code, try to swap the comparison operands of one arg to produce
2770 a match and compare that variant. */
2771 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2772 && COMPARISON_CLASS_P (arg0)
2773 && COMPARISON_CLASS_P (arg1))
2774 {
2775 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2776
2777 if (TREE_CODE (arg0) == swap_code)
2778 return operand_equal_p (TREE_OPERAND (arg0, 0),
2779 TREE_OPERAND (arg1, 1), flags)
2780 && operand_equal_p (TREE_OPERAND (arg0, 1),
2781 TREE_OPERAND (arg1, 0), flags);
2782 }
2783
2784 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2785 {
2786 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2787 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2788 ;
2789 else if (flags & OEP_ADDRESS_OF)
2790 {
2791 /* If we are interested in comparing addresses ignore
2792 MEM_REF wrappings of the base that can appear just for
2793 TBAA reasons. */
2794 if (TREE_CODE (arg0) == MEM_REF
2795 && DECL_P (arg1)
2796 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2797 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2798 && integer_zerop (TREE_OPERAND (arg0, 1)))
2799 return 1;
2800 else if (TREE_CODE (arg1) == MEM_REF
2801 && DECL_P (arg0)
2802 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2803 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2804 && integer_zerop (TREE_OPERAND (arg1, 1)))
2805 return 1;
2806 return 0;
2807 }
2808 else
2809 return 0;
2810 }
2811
2812 /* This is needed for conversions and for COMPONENT_REF.
2813 Might as well play it safe and always test this. */
2814 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2815 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2816 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2817 return 0;
2818
2819 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2820 We don't care about side effects in that case because the SAVE_EXPR
2821 takes care of that for us. In all other cases, two expressions are
2822 equal if they have no side effects. If we have two identical
2823 expressions with side effects that should be treated the same due
2824 to the only side effects being identical SAVE_EXPR's, that will
2825 be detected in the recursive calls below.
2826 If we are taking an invariant address of two identical objects
2827 they are necessarily equal as well. */
2828 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2829 && (TREE_CODE (arg0) == SAVE_EXPR
2830 || (flags & OEP_CONSTANT_ADDRESS_OF)
2831 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2832 return 1;
2833
2834 /* Next handle constant cases, those for which we can return 1 even
2835 if ONLY_CONST is set. */
2836 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2837 switch (TREE_CODE (arg0))
2838 {
2839 case INTEGER_CST:
2840 return tree_int_cst_equal (arg0, arg1);
2841
2842 case FIXED_CST:
2843 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2844 TREE_FIXED_CST (arg1));
2845
2846 case REAL_CST:
2847 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2848 return 1;
2849
2850
2851 if (!HONOR_SIGNED_ZEROS (arg0))
2852 {
2853 /* If we do not distinguish between signed and unsigned zero,
2854 consider them equal. */
2855 if (real_zerop (arg0) && real_zerop (arg1))
2856 return 1;
2857 }
2858 return 0;
2859
2860 case VECTOR_CST:
2861 {
2862 unsigned i;
2863
2864 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2865 return 0;
2866
2867 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2868 {
2869 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2870 VECTOR_CST_ELT (arg1, i), flags))
2871 return 0;
2872 }
2873 return 1;
2874 }
2875
2876 case COMPLEX_CST:
2877 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2878 flags)
2879 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2880 flags));
2881
2882 case STRING_CST:
2883 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2884 && ! memcmp (TREE_STRING_POINTER (arg0),
2885 TREE_STRING_POINTER (arg1),
2886 TREE_STRING_LENGTH (arg0)));
2887
2888 case ADDR_EXPR:
2889 gcc_checking_assert (!(flags
2890 & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF)));
2891 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2892 flags | OEP_ADDRESS_OF
2893 | OEP_CONSTANT_ADDRESS_OF);
2894 case CONSTRUCTOR:
2895 /* In GIMPLE empty constructors are allowed in initializers of
2896 aggregates. */
2897 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2898 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2899 default:
2900 break;
2901 }
2902
2903 if (flags & OEP_ONLY_CONST)
2904 return 0;
2905
2906 /* Define macros to test an operand from arg0 and arg1 for equality and a
2907 variant that allows null and views null as being different from any
2908 non-null value. In the latter case, if either is null, the both
2909 must be; otherwise, do the normal comparison. */
2910 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2911 TREE_OPERAND (arg1, N), flags)
2912
2913 #define OP_SAME_WITH_NULL(N) \
2914 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2915 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2916
2917 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2918 {
2919 case tcc_unary:
2920 /* Two conversions are equal only if signedness and modes match. */
2921 switch (TREE_CODE (arg0))
2922 {
2923 CASE_CONVERT:
2924 case FIX_TRUNC_EXPR:
2925 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2926 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2927 return 0;
2928 break;
2929 default:
2930 break;
2931 }
2932
2933 return OP_SAME (0);
2934
2935
2936 case tcc_comparison:
2937 case tcc_binary:
2938 if (OP_SAME (0) && OP_SAME (1))
2939 return 1;
2940
2941 /* For commutative ops, allow the other order. */
2942 return (commutative_tree_code (TREE_CODE (arg0))
2943 && operand_equal_p (TREE_OPERAND (arg0, 0),
2944 TREE_OPERAND (arg1, 1), flags)
2945 && operand_equal_p (TREE_OPERAND (arg0, 1),
2946 TREE_OPERAND (arg1, 0), flags));
2947
2948 case tcc_reference:
2949 /* If either of the pointer (or reference) expressions we are
2950 dereferencing contain a side effect, these cannot be equal,
2951 but their addresses can be. */
2952 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2953 && (TREE_SIDE_EFFECTS (arg0)
2954 || TREE_SIDE_EFFECTS (arg1)))
2955 return 0;
2956
2957 switch (TREE_CODE (arg0))
2958 {
2959 case INDIRECT_REF:
2960 if (!(flags & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF))
2961 && (TYPE_ALIGN (TREE_TYPE (arg0))
2962 != TYPE_ALIGN (TREE_TYPE (arg1))))
2963 return 0;
2964 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2965 return OP_SAME (0);
2966
2967 case REALPART_EXPR:
2968 case IMAGPART_EXPR:
2969 return OP_SAME (0);
2970
2971 case TARGET_MEM_REF:
2972 case MEM_REF:
2973 if (!(flags & (OEP_ADDRESS_OF | OEP_CONSTANT_ADDRESS_OF)))
2974 {
2975 /* Require equal access sizes */
2976 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
2977 && (!TYPE_SIZE (TREE_TYPE (arg0))
2978 || !TYPE_SIZE (TREE_TYPE (arg1))
2979 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2980 TYPE_SIZE (TREE_TYPE (arg1)),
2981 flags)))
2982 return 0;
2983 /* Verify that access happens in similar types. */
2984 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2985 return 0;
2986 /* Verify that accesses are TBAA compatible. */
2987 if (flag_strict_aliasing
2988 && (!alias_ptr_types_compatible_p
2989 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2990 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2991 || (MR_DEPENDENCE_CLIQUE (arg0)
2992 != MR_DEPENDENCE_CLIQUE (arg1))
2993 || (MR_DEPENDENCE_BASE (arg0)
2994 != MR_DEPENDENCE_BASE (arg1))))
2995 return 0;
2996 /* Verify that alignment is compatible. */
2997 if (TYPE_ALIGN (TREE_TYPE (arg0))
2998 != TYPE_ALIGN (TREE_TYPE (arg1)))
2999 return 0;
3000 }
3001 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3002 return (OP_SAME (0) && OP_SAME (1)
3003 /* TARGET_MEM_REF require equal extra operands. */
3004 && (TREE_CODE (arg0) != TARGET_MEM_REF
3005 || (OP_SAME_WITH_NULL (2)
3006 && OP_SAME_WITH_NULL (3)
3007 && OP_SAME_WITH_NULL (4))));
3008
3009 case ARRAY_REF:
3010 case ARRAY_RANGE_REF:
3011 /* Operands 2 and 3 may be null.
3012 Compare the array index by value if it is constant first as we
3013 may have different types but same value here. */
3014 if (!OP_SAME (0))
3015 return 0;
3016 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3017 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3018 TREE_OPERAND (arg1, 1))
3019 || OP_SAME (1))
3020 && OP_SAME_WITH_NULL (2)
3021 && OP_SAME_WITH_NULL (3));
3022
3023 case COMPONENT_REF:
3024 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3025 may be NULL when we're called to compare MEM_EXPRs. */
3026 if (!OP_SAME_WITH_NULL (0)
3027 || !OP_SAME (1))
3028 return 0;
3029 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3030 return OP_SAME_WITH_NULL (2);
3031
3032 case BIT_FIELD_REF:
3033 if (!OP_SAME (0))
3034 return 0;
3035 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
3036 return OP_SAME (1) && OP_SAME (2);
3037
3038 default:
3039 return 0;
3040 }
3041
3042 case tcc_expression:
3043 switch (TREE_CODE (arg0))
3044 {
3045 case ADDR_EXPR:
3046 /* Be sure we pass right ADDRESS_OF flag. */
3047 gcc_checking_assert (!(flags
3048 & (OEP_ADDRESS_OF
3049 | OEP_CONSTANT_ADDRESS_OF)));
3050 return operand_equal_p (TREE_OPERAND (arg0, 0),
3051 TREE_OPERAND (arg1, 0),
3052 flags | OEP_ADDRESS_OF);
3053
3054 case TRUTH_NOT_EXPR:
3055 return OP_SAME (0);
3056
3057 case TRUTH_ANDIF_EXPR:
3058 case TRUTH_ORIF_EXPR:
3059 return OP_SAME (0) && OP_SAME (1);
3060
3061 case FMA_EXPR:
3062 case WIDEN_MULT_PLUS_EXPR:
3063 case WIDEN_MULT_MINUS_EXPR:
3064 if (!OP_SAME (2))
3065 return 0;
3066 /* The multiplcation operands are commutative. */
3067 /* FALLTHRU */
3068
3069 case TRUTH_AND_EXPR:
3070 case TRUTH_OR_EXPR:
3071 case TRUTH_XOR_EXPR:
3072 if (OP_SAME (0) && OP_SAME (1))
3073 return 1;
3074
3075 /* Otherwise take into account this is a commutative operation. */
3076 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3077 TREE_OPERAND (arg1, 1), flags)
3078 && operand_equal_p (TREE_OPERAND (arg0, 1),
3079 TREE_OPERAND (arg1, 0), flags));
3080
3081 case COND_EXPR:
3082 case VEC_COND_EXPR:
3083 case DOT_PROD_EXPR:
3084 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3085
3086 default:
3087 return 0;
3088 }
3089
3090 case tcc_vl_exp:
3091 switch (TREE_CODE (arg0))
3092 {
3093 case CALL_EXPR:
3094 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3095 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3096 /* If not both CALL_EXPRs are either internal or normal function
3097 functions, then they are not equal. */
3098 return 0;
3099 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3100 {
3101 /* If the CALL_EXPRs call different internal functions, then they
3102 are not equal. */
3103 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3104 return 0;
3105 }
3106 else
3107 {
3108 /* If the CALL_EXPRs call different functions, then they are not
3109 equal. */
3110 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3111 flags))
3112 return 0;
3113 }
3114
3115 {
3116 unsigned int cef = call_expr_flags (arg0);
3117 if (flags & OEP_PURE_SAME)
3118 cef &= ECF_CONST | ECF_PURE;
3119 else
3120 cef &= ECF_CONST;
3121 if (!cef)
3122 return 0;
3123 }
3124
3125 /* Now see if all the arguments are the same. */
3126 {
3127 const_call_expr_arg_iterator iter0, iter1;
3128 const_tree a0, a1;
3129 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3130 a1 = first_const_call_expr_arg (arg1, &iter1);
3131 a0 && a1;
3132 a0 = next_const_call_expr_arg (&iter0),
3133 a1 = next_const_call_expr_arg (&iter1))
3134 if (! operand_equal_p (a0, a1, flags))
3135 return 0;
3136
3137 /* If we get here and both argument lists are exhausted
3138 then the CALL_EXPRs are equal. */
3139 return ! (a0 || a1);
3140 }
3141 default:
3142 return 0;
3143 }
3144
3145 case tcc_declaration:
3146 /* Consider __builtin_sqrt equal to sqrt. */
3147 return (TREE_CODE (arg0) == FUNCTION_DECL
3148 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3149 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3150 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3151
3152 default:
3153 return 0;
3154 }
3155
3156 #undef OP_SAME
3157 #undef OP_SAME_WITH_NULL
3158 }
3159 \f
3160 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3161 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3162
3163 When in doubt, return 0. */
3164
3165 static int
3166 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3167 {
3168 int unsignedp1, unsignedpo;
3169 tree primarg0, primarg1, primother;
3170 unsigned int correct_width;
3171
3172 if (operand_equal_p (arg0, arg1, 0))
3173 return 1;
3174
3175 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3176 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3177 return 0;
3178
3179 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3180 and see if the inner values are the same. This removes any
3181 signedness comparison, which doesn't matter here. */
3182 primarg0 = arg0, primarg1 = arg1;
3183 STRIP_NOPS (primarg0);
3184 STRIP_NOPS (primarg1);
3185 if (operand_equal_p (primarg0, primarg1, 0))
3186 return 1;
3187
3188 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3189 actual comparison operand, ARG0.
3190
3191 First throw away any conversions to wider types
3192 already present in the operands. */
3193
3194 primarg1 = get_narrower (arg1, &unsignedp1);
3195 primother = get_narrower (other, &unsignedpo);
3196
3197 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3198 if (unsignedp1 == unsignedpo
3199 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3200 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3201 {
3202 tree type = TREE_TYPE (arg0);
3203
3204 /* Make sure shorter operand is extended the right way
3205 to match the longer operand. */
3206 primarg1 = fold_convert (signed_or_unsigned_type_for
3207 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3208
3209 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3210 return 1;
3211 }
3212
3213 return 0;
3214 }
3215 \f
3216 /* See if ARG is an expression that is either a comparison or is performing
3217 arithmetic on comparisons. The comparisons must only be comparing
3218 two different values, which will be stored in *CVAL1 and *CVAL2; if
3219 they are nonzero it means that some operands have already been found.
3220 No variables may be used anywhere else in the expression except in the
3221 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3222 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3223
3224 If this is true, return 1. Otherwise, return zero. */
3225
3226 static int
3227 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3228 {
3229 enum tree_code code = TREE_CODE (arg);
3230 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3231
3232 /* We can handle some of the tcc_expression cases here. */
3233 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3234 tclass = tcc_unary;
3235 else if (tclass == tcc_expression
3236 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3237 || code == COMPOUND_EXPR))
3238 tclass = tcc_binary;
3239
3240 else if (tclass == tcc_expression && code == SAVE_EXPR
3241 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3242 {
3243 /* If we've already found a CVAL1 or CVAL2, this expression is
3244 two complex to handle. */
3245 if (*cval1 || *cval2)
3246 return 0;
3247
3248 tclass = tcc_unary;
3249 *save_p = 1;
3250 }
3251
3252 switch (tclass)
3253 {
3254 case tcc_unary:
3255 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3256
3257 case tcc_binary:
3258 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3259 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3260 cval1, cval2, save_p));
3261
3262 case tcc_constant:
3263 return 1;
3264
3265 case tcc_expression:
3266 if (code == COND_EXPR)
3267 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3268 cval1, cval2, save_p)
3269 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3270 cval1, cval2, save_p)
3271 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3272 cval1, cval2, save_p));
3273 return 0;
3274
3275 case tcc_comparison:
3276 /* First see if we can handle the first operand, then the second. For
3277 the second operand, we know *CVAL1 can't be zero. It must be that
3278 one side of the comparison is each of the values; test for the
3279 case where this isn't true by failing if the two operands
3280 are the same. */
3281
3282 if (operand_equal_p (TREE_OPERAND (arg, 0),
3283 TREE_OPERAND (arg, 1), 0))
3284 return 0;
3285
3286 if (*cval1 == 0)
3287 *cval1 = TREE_OPERAND (arg, 0);
3288 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3289 ;
3290 else if (*cval2 == 0)
3291 *cval2 = TREE_OPERAND (arg, 0);
3292 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3293 ;
3294 else
3295 return 0;
3296
3297 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3298 ;
3299 else if (*cval2 == 0)
3300 *cval2 = TREE_OPERAND (arg, 1);
3301 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3302 ;
3303 else
3304 return 0;
3305
3306 return 1;
3307
3308 default:
3309 return 0;
3310 }
3311 }
3312 \f
3313 /* ARG is a tree that is known to contain just arithmetic operations and
3314 comparisons. Evaluate the operations in the tree substituting NEW0 for
3315 any occurrence of OLD0 as an operand of a comparison and likewise for
3316 NEW1 and OLD1. */
3317
3318 static tree
3319 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3320 tree old1, tree new1)
3321 {
3322 tree type = TREE_TYPE (arg);
3323 enum tree_code code = TREE_CODE (arg);
3324 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3325
3326 /* We can handle some of the tcc_expression cases here. */
3327 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3328 tclass = tcc_unary;
3329 else if (tclass == tcc_expression
3330 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3331 tclass = tcc_binary;
3332
3333 switch (tclass)
3334 {
3335 case tcc_unary:
3336 return fold_build1_loc (loc, code, type,
3337 eval_subst (loc, TREE_OPERAND (arg, 0),
3338 old0, new0, old1, new1));
3339
3340 case tcc_binary:
3341 return fold_build2_loc (loc, code, type,
3342 eval_subst (loc, TREE_OPERAND (arg, 0),
3343 old0, new0, old1, new1),
3344 eval_subst (loc, TREE_OPERAND (arg, 1),
3345 old0, new0, old1, new1));
3346
3347 case tcc_expression:
3348 switch (code)
3349 {
3350 case SAVE_EXPR:
3351 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3352 old1, new1);
3353
3354 case COMPOUND_EXPR:
3355 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3356 old1, new1);
3357
3358 case COND_EXPR:
3359 return fold_build3_loc (loc, code, type,
3360 eval_subst (loc, TREE_OPERAND (arg, 0),
3361 old0, new0, old1, new1),
3362 eval_subst (loc, TREE_OPERAND (arg, 1),
3363 old0, new0, old1, new1),
3364 eval_subst (loc, TREE_OPERAND (arg, 2),
3365 old0, new0, old1, new1));
3366 default:
3367 break;
3368 }
3369 /* Fall through - ??? */
3370
3371 case tcc_comparison:
3372 {
3373 tree arg0 = TREE_OPERAND (arg, 0);
3374 tree arg1 = TREE_OPERAND (arg, 1);
3375
3376 /* We need to check both for exact equality and tree equality. The
3377 former will be true if the operand has a side-effect. In that
3378 case, we know the operand occurred exactly once. */
3379
3380 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3381 arg0 = new0;
3382 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3383 arg0 = new1;
3384
3385 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3386 arg1 = new0;
3387 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3388 arg1 = new1;
3389
3390 return fold_build2_loc (loc, code, type, arg0, arg1);
3391 }
3392
3393 default:
3394 return arg;
3395 }
3396 }
3397 \f
3398 /* Return a tree for the case when the result of an expression is RESULT
3399 converted to TYPE and OMITTED was previously an operand of the expression
3400 but is now not needed (e.g., we folded OMITTED * 0).
3401
3402 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3403 the conversion of RESULT to TYPE. */
3404
3405 tree
3406 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3407 {
3408 tree t = fold_convert_loc (loc, type, result);
3409
3410 /* If the resulting operand is an empty statement, just return the omitted
3411 statement casted to void. */
3412 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3413 return build1_loc (loc, NOP_EXPR, void_type_node,
3414 fold_ignored_result (omitted));
3415
3416 if (TREE_SIDE_EFFECTS (omitted))
3417 return build2_loc (loc, COMPOUND_EXPR, type,
3418 fold_ignored_result (omitted), t);
3419
3420 return non_lvalue_loc (loc, t);
3421 }
3422
3423 /* Return a tree for the case when the result of an expression is RESULT
3424 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3425 of the expression but are now not needed.
3426
3427 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3428 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3429 evaluated before OMITTED2. Otherwise, if neither has side effects,
3430 just do the conversion of RESULT to TYPE. */
3431
3432 tree
3433 omit_two_operands_loc (location_t loc, tree type, tree result,
3434 tree omitted1, tree omitted2)
3435 {
3436 tree t = fold_convert_loc (loc, type, result);
3437
3438 if (TREE_SIDE_EFFECTS (omitted2))
3439 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3440 if (TREE_SIDE_EFFECTS (omitted1))
3441 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3442
3443 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3444 }
3445
3446 \f
3447 /* Return a simplified tree node for the truth-negation of ARG. This
3448 never alters ARG itself. We assume that ARG is an operation that
3449 returns a truth value (0 or 1).
3450
3451 FIXME: one would think we would fold the result, but it causes
3452 problems with the dominator optimizer. */
3453
3454 static tree
3455 fold_truth_not_expr (location_t loc, tree arg)
3456 {
3457 tree type = TREE_TYPE (arg);
3458 enum tree_code code = TREE_CODE (arg);
3459 location_t loc1, loc2;
3460
3461 /* If this is a comparison, we can simply invert it, except for
3462 floating-point non-equality comparisons, in which case we just
3463 enclose a TRUTH_NOT_EXPR around what we have. */
3464
3465 if (TREE_CODE_CLASS (code) == tcc_comparison)
3466 {
3467 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3468 if (FLOAT_TYPE_P (op_type)
3469 && flag_trapping_math
3470 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3471 && code != NE_EXPR && code != EQ_EXPR)
3472 return NULL_TREE;
3473
3474 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3475 if (code == ERROR_MARK)
3476 return NULL_TREE;
3477
3478 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3479 TREE_OPERAND (arg, 1));
3480 }
3481
3482 switch (code)
3483 {
3484 case INTEGER_CST:
3485 return constant_boolean_node (integer_zerop (arg), type);
3486
3487 case TRUTH_AND_EXPR:
3488 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3489 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3490 return build2_loc (loc, TRUTH_OR_EXPR, type,
3491 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3492 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3493
3494 case TRUTH_OR_EXPR:
3495 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3496 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3497 return build2_loc (loc, TRUTH_AND_EXPR, type,
3498 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3499 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3500
3501 case TRUTH_XOR_EXPR:
3502 /* Here we can invert either operand. We invert the first operand
3503 unless the second operand is a TRUTH_NOT_EXPR in which case our
3504 result is the XOR of the first operand with the inside of the
3505 negation of the second operand. */
3506
3507 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3508 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3509 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3510 else
3511 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3512 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3513 TREE_OPERAND (arg, 1));
3514
3515 case TRUTH_ANDIF_EXPR:
3516 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3517 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3518 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3519 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3520 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3521
3522 case TRUTH_ORIF_EXPR:
3523 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3524 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3525 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3526 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3527 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3528
3529 case TRUTH_NOT_EXPR:
3530 return TREE_OPERAND (arg, 0);
3531
3532 case COND_EXPR:
3533 {
3534 tree arg1 = TREE_OPERAND (arg, 1);
3535 tree arg2 = TREE_OPERAND (arg, 2);
3536
3537 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3538 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3539
3540 /* A COND_EXPR may have a throw as one operand, which
3541 then has void type. Just leave void operands
3542 as they are. */
3543 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3544 VOID_TYPE_P (TREE_TYPE (arg1))
3545 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3546 VOID_TYPE_P (TREE_TYPE (arg2))
3547 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3548 }
3549
3550 case COMPOUND_EXPR:
3551 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3552 return build2_loc (loc, COMPOUND_EXPR, type,
3553 TREE_OPERAND (arg, 0),
3554 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3555
3556 case NON_LVALUE_EXPR:
3557 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3558 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3559
3560 CASE_CONVERT:
3561 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3562 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3563
3564 /* ... fall through ... */
3565
3566 case FLOAT_EXPR:
3567 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3568 return build1_loc (loc, TREE_CODE (arg), type,
3569 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3570
3571 case BIT_AND_EXPR:
3572 if (!integer_onep (TREE_OPERAND (arg, 1)))
3573 return NULL_TREE;
3574 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3575
3576 case SAVE_EXPR:
3577 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3578
3579 case CLEANUP_POINT_EXPR:
3580 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3581 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3582 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3583
3584 default:
3585 return NULL_TREE;
3586 }
3587 }
3588
3589 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3590 assume that ARG is an operation that returns a truth value (0 or 1
3591 for scalars, 0 or -1 for vectors). Return the folded expression if
3592 folding is successful. Otherwise, return NULL_TREE. */
3593
3594 static tree
3595 fold_invert_truthvalue (location_t loc, tree arg)
3596 {
3597 tree type = TREE_TYPE (arg);
3598 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3599 ? BIT_NOT_EXPR
3600 : TRUTH_NOT_EXPR,
3601 type, arg);
3602 }
3603
3604 /* Return a simplified tree node for the truth-negation of ARG. This
3605 never alters ARG itself. We assume that ARG is an operation that
3606 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3607
3608 tree
3609 invert_truthvalue_loc (location_t loc, tree arg)
3610 {
3611 if (TREE_CODE (arg) == ERROR_MARK)
3612 return arg;
3613
3614 tree type = TREE_TYPE (arg);
3615 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3616 ? BIT_NOT_EXPR
3617 : TRUTH_NOT_EXPR,
3618 type, arg);
3619 }
3620
3621 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3622 with code CODE. This optimization is unsafe. */
3623 static tree
3624 distribute_real_division (location_t loc, enum tree_code code, tree type,
3625 tree arg0, tree arg1)
3626 {
3627 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3628 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3629
3630 /* (A / C) +- (B / C) -> (A +- B) / C. */
3631 if (mul0 == mul1
3632 && operand_equal_p (TREE_OPERAND (arg0, 1),
3633 TREE_OPERAND (arg1, 1), 0))
3634 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3635 fold_build2_loc (loc, code, type,
3636 TREE_OPERAND (arg0, 0),
3637 TREE_OPERAND (arg1, 0)),
3638 TREE_OPERAND (arg0, 1));
3639
3640 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3641 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3642 TREE_OPERAND (arg1, 0), 0)
3643 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3644 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3645 {
3646 REAL_VALUE_TYPE r0, r1;
3647 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3648 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3649 if (!mul0)
3650 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3651 if (!mul1)
3652 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3653 real_arithmetic (&r0, code, &r0, &r1);
3654 return fold_build2_loc (loc, MULT_EXPR, type,
3655 TREE_OPERAND (arg0, 0),
3656 build_real (type, r0));
3657 }
3658
3659 return NULL_TREE;
3660 }
3661 \f
3662 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3663 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3664
3665 static tree
3666 make_bit_field_ref (location_t loc, tree inner, tree type,
3667 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3668 {
3669 tree result, bftype;
3670
3671 if (bitpos == 0)
3672 {
3673 tree size = TYPE_SIZE (TREE_TYPE (inner));
3674 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3675 || POINTER_TYPE_P (TREE_TYPE (inner)))
3676 && tree_fits_shwi_p (size)
3677 && tree_to_shwi (size) == bitsize)
3678 return fold_convert_loc (loc, type, inner);
3679 }
3680
3681 bftype = type;
3682 if (TYPE_PRECISION (bftype) != bitsize
3683 || TYPE_UNSIGNED (bftype) == !unsignedp)
3684 bftype = build_nonstandard_integer_type (bitsize, 0);
3685
3686 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3687 size_int (bitsize), bitsize_int (bitpos));
3688
3689 if (bftype != type)
3690 result = fold_convert_loc (loc, type, result);
3691
3692 return result;
3693 }
3694
3695 /* Optimize a bit-field compare.
3696
3697 There are two cases: First is a compare against a constant and the
3698 second is a comparison of two items where the fields are at the same
3699 bit position relative to the start of a chunk (byte, halfword, word)
3700 large enough to contain it. In these cases we can avoid the shift
3701 implicit in bitfield extractions.
3702
3703 For constants, we emit a compare of the shifted constant with the
3704 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3705 compared. For two fields at the same position, we do the ANDs with the
3706 similar mask and compare the result of the ANDs.
3707
3708 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3709 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3710 are the left and right operands of the comparison, respectively.
3711
3712 If the optimization described above can be done, we return the resulting
3713 tree. Otherwise we return zero. */
3714
3715 static tree
3716 optimize_bit_field_compare (location_t loc, enum tree_code code,
3717 tree compare_type, tree lhs, tree rhs)
3718 {
3719 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3720 tree type = TREE_TYPE (lhs);
3721 tree unsigned_type;
3722 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3723 machine_mode lmode, rmode, nmode;
3724 int lunsignedp, runsignedp;
3725 int lvolatilep = 0, rvolatilep = 0;
3726 tree linner, rinner = NULL_TREE;
3727 tree mask;
3728 tree offset;
3729
3730 /* Get all the information about the extractions being done. If the bit size
3731 if the same as the size of the underlying object, we aren't doing an
3732 extraction at all and so can do nothing. We also don't want to
3733 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3734 then will no longer be able to replace it. */
3735 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3736 &lunsignedp, &lvolatilep, false);
3737 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3738 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3739 return 0;
3740
3741 if (!const_p)
3742 {
3743 /* If this is not a constant, we can only do something if bit positions,
3744 sizes, and signedness are the same. */
3745 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3746 &runsignedp, &rvolatilep, false);
3747
3748 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3749 || lunsignedp != runsignedp || offset != 0
3750 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3751 return 0;
3752 }
3753
3754 /* See if we can find a mode to refer to this field. We should be able to,
3755 but fail if we can't. */
3756 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3757 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3758 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3759 TYPE_ALIGN (TREE_TYPE (rinner))),
3760 word_mode, false);
3761 if (nmode == VOIDmode)
3762 return 0;
3763
3764 /* Set signed and unsigned types of the precision of this mode for the
3765 shifts below. */
3766 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3767
3768 /* Compute the bit position and size for the new reference and our offset
3769 within it. If the new reference is the same size as the original, we
3770 won't optimize anything, so return zero. */
3771 nbitsize = GET_MODE_BITSIZE (nmode);
3772 nbitpos = lbitpos & ~ (nbitsize - 1);
3773 lbitpos -= nbitpos;
3774 if (nbitsize == lbitsize)
3775 return 0;
3776
3777 if (BYTES_BIG_ENDIAN)
3778 lbitpos = nbitsize - lbitsize - lbitpos;
3779
3780 /* Make the mask to be used against the extracted field. */
3781 mask = build_int_cst_type (unsigned_type, -1);
3782 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3783 mask = const_binop (RSHIFT_EXPR, mask,
3784 size_int (nbitsize - lbitsize - lbitpos));
3785
3786 if (! const_p)
3787 /* If not comparing with constant, just rework the comparison
3788 and return. */
3789 return fold_build2_loc (loc, code, compare_type,
3790 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3791 make_bit_field_ref (loc, linner,
3792 unsigned_type,
3793 nbitsize, nbitpos,
3794 1),
3795 mask),
3796 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3797 make_bit_field_ref (loc, rinner,
3798 unsigned_type,
3799 nbitsize, nbitpos,
3800 1),
3801 mask));
3802
3803 /* Otherwise, we are handling the constant case. See if the constant is too
3804 big for the field. Warn and return a tree of for 0 (false) if so. We do
3805 this not only for its own sake, but to avoid having to test for this
3806 error case below. If we didn't, we might generate wrong code.
3807
3808 For unsigned fields, the constant shifted right by the field length should
3809 be all zero. For signed fields, the high-order bits should agree with
3810 the sign bit. */
3811
3812 if (lunsignedp)
3813 {
3814 if (wi::lrshift (rhs, lbitsize) != 0)
3815 {
3816 warning (0, "comparison is always %d due to width of bit-field",
3817 code == NE_EXPR);
3818 return constant_boolean_node (code == NE_EXPR, compare_type);
3819 }
3820 }
3821 else
3822 {
3823 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3824 if (tem != 0 && tem != -1)
3825 {
3826 warning (0, "comparison is always %d due to width of bit-field",
3827 code == NE_EXPR);
3828 return constant_boolean_node (code == NE_EXPR, compare_type);
3829 }
3830 }
3831
3832 /* Single-bit compares should always be against zero. */
3833 if (lbitsize == 1 && ! integer_zerop (rhs))
3834 {
3835 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3836 rhs = build_int_cst (type, 0);
3837 }
3838
3839 /* Make a new bitfield reference, shift the constant over the
3840 appropriate number of bits and mask it with the computed mask
3841 (in case this was a signed field). If we changed it, make a new one. */
3842 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3843
3844 rhs = const_binop (BIT_AND_EXPR,
3845 const_binop (LSHIFT_EXPR,
3846 fold_convert_loc (loc, unsigned_type, rhs),
3847 size_int (lbitpos)),
3848 mask);
3849
3850 lhs = build2_loc (loc, code, compare_type,
3851 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3852 return lhs;
3853 }
3854 \f
3855 /* Subroutine for fold_truth_andor_1: decode a field reference.
3856
3857 If EXP is a comparison reference, we return the innermost reference.
3858
3859 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3860 set to the starting bit number.
3861
3862 If the innermost field can be completely contained in a mode-sized
3863 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3864
3865 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3866 otherwise it is not changed.
3867
3868 *PUNSIGNEDP is set to the signedness of the field.
3869
3870 *PMASK is set to the mask used. This is either contained in a
3871 BIT_AND_EXPR or derived from the width of the field.
3872
3873 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3874
3875 Return 0 if this is not a component reference or is one that we can't
3876 do anything with. */
3877
3878 static tree
3879 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3880 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3881 int *punsignedp, int *pvolatilep,
3882 tree *pmask, tree *pand_mask)
3883 {
3884 tree outer_type = 0;
3885 tree and_mask = 0;
3886 tree mask, inner, offset;
3887 tree unsigned_type;
3888 unsigned int precision;
3889
3890 /* All the optimizations using this function assume integer fields.
3891 There are problems with FP fields since the type_for_size call
3892 below can fail for, e.g., XFmode. */
3893 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3894 return 0;
3895
3896 /* We are interested in the bare arrangement of bits, so strip everything
3897 that doesn't affect the machine mode. However, record the type of the
3898 outermost expression if it may matter below. */
3899 if (CONVERT_EXPR_P (exp)
3900 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3901 outer_type = TREE_TYPE (exp);
3902 STRIP_NOPS (exp);
3903
3904 if (TREE_CODE (exp) == BIT_AND_EXPR)
3905 {
3906 and_mask = TREE_OPERAND (exp, 1);
3907 exp = TREE_OPERAND (exp, 0);
3908 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3909 if (TREE_CODE (and_mask) != INTEGER_CST)
3910 return 0;
3911 }
3912
3913 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3914 punsignedp, pvolatilep, false);
3915 if ((inner == exp && and_mask == 0)
3916 || *pbitsize < 0 || offset != 0
3917 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3918 return 0;
3919
3920 /* If the number of bits in the reference is the same as the bitsize of
3921 the outer type, then the outer type gives the signedness. Otherwise
3922 (in case of a small bitfield) the signedness is unchanged. */
3923 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3924 *punsignedp = TYPE_UNSIGNED (outer_type);
3925
3926 /* Compute the mask to access the bitfield. */
3927 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3928 precision = TYPE_PRECISION (unsigned_type);
3929
3930 mask = build_int_cst_type (unsigned_type, -1);
3931
3932 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3933 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3934
3935 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3936 if (and_mask != 0)
3937 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3938 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3939
3940 *pmask = mask;
3941 *pand_mask = and_mask;
3942 return inner;
3943 }
3944
3945 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3946 bit positions and MASK is SIGNED. */
3947
3948 static int
3949 all_ones_mask_p (const_tree mask, unsigned int size)
3950 {
3951 tree type = TREE_TYPE (mask);
3952 unsigned int precision = TYPE_PRECISION (type);
3953
3954 /* If this function returns true when the type of the mask is
3955 UNSIGNED, then there will be errors. In particular see
3956 gcc.c-torture/execute/990326-1.c. There does not appear to be
3957 any documentation paper trail as to why this is so. But the pre
3958 wide-int worked with that restriction and it has been preserved
3959 here. */
3960 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3961 return false;
3962
3963 return wi::mask (size, false, precision) == mask;
3964 }
3965
3966 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3967 represents the sign bit of EXP's type. If EXP represents a sign
3968 or zero extension, also test VAL against the unextended type.
3969 The return value is the (sub)expression whose sign bit is VAL,
3970 or NULL_TREE otherwise. */
3971
3972 tree
3973 sign_bit_p (tree exp, const_tree val)
3974 {
3975 int width;
3976 tree t;
3977
3978 /* Tree EXP must have an integral type. */
3979 t = TREE_TYPE (exp);
3980 if (! INTEGRAL_TYPE_P (t))
3981 return NULL_TREE;
3982
3983 /* Tree VAL must be an integer constant. */
3984 if (TREE_CODE (val) != INTEGER_CST
3985 || TREE_OVERFLOW (val))
3986 return NULL_TREE;
3987
3988 width = TYPE_PRECISION (t);
3989 if (wi::only_sign_bit_p (val, width))
3990 return exp;
3991
3992 /* Handle extension from a narrower type. */
3993 if (TREE_CODE (exp) == NOP_EXPR
3994 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3995 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3996
3997 return NULL_TREE;
3998 }
3999
4000 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4001 to be evaluated unconditionally. */
4002
4003 static int
4004 simple_operand_p (const_tree exp)
4005 {
4006 /* Strip any conversions that don't change the machine mode. */
4007 STRIP_NOPS (exp);
4008
4009 return (CONSTANT_CLASS_P (exp)
4010 || TREE_CODE (exp) == SSA_NAME
4011 || (DECL_P (exp)
4012 && ! TREE_ADDRESSABLE (exp)
4013 && ! TREE_THIS_VOLATILE (exp)
4014 && ! DECL_NONLOCAL (exp)
4015 /* Don't regard global variables as simple. They may be
4016 allocated in ways unknown to the compiler (shared memory,
4017 #pragma weak, etc). */
4018 && ! TREE_PUBLIC (exp)
4019 && ! DECL_EXTERNAL (exp)
4020 /* Weakrefs are not safe to be read, since they can be NULL.
4021 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4022 have DECL_WEAK flag set. */
4023 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4024 /* Loading a static variable is unduly expensive, but global
4025 registers aren't expensive. */
4026 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4027 }
4028
4029 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4030 to be evaluated unconditionally.
4031 I addition to simple_operand_p, we assume that comparisons, conversions,
4032 and logic-not operations are simple, if their operands are simple, too. */
4033
4034 static bool
4035 simple_operand_p_2 (tree exp)
4036 {
4037 enum tree_code code;
4038
4039 if (TREE_SIDE_EFFECTS (exp)
4040 || tree_could_trap_p (exp))
4041 return false;
4042
4043 while (CONVERT_EXPR_P (exp))
4044 exp = TREE_OPERAND (exp, 0);
4045
4046 code = TREE_CODE (exp);
4047
4048 if (TREE_CODE_CLASS (code) == tcc_comparison)
4049 return (simple_operand_p (TREE_OPERAND (exp, 0))
4050 && simple_operand_p (TREE_OPERAND (exp, 1)));
4051
4052 if (code == TRUTH_NOT_EXPR)
4053 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4054
4055 return simple_operand_p (exp);
4056 }
4057
4058 \f
4059 /* The following functions are subroutines to fold_range_test and allow it to
4060 try to change a logical combination of comparisons into a range test.
4061
4062 For example, both
4063 X == 2 || X == 3 || X == 4 || X == 5
4064 and
4065 X >= 2 && X <= 5
4066 are converted to
4067 (unsigned) (X - 2) <= 3
4068
4069 We describe each set of comparisons as being either inside or outside
4070 a range, using a variable named like IN_P, and then describe the
4071 range with a lower and upper bound. If one of the bounds is omitted,
4072 it represents either the highest or lowest value of the type.
4073
4074 In the comments below, we represent a range by two numbers in brackets
4075 preceded by a "+" to designate being inside that range, or a "-" to
4076 designate being outside that range, so the condition can be inverted by
4077 flipping the prefix. An omitted bound is represented by a "-". For
4078 example, "- [-, 10]" means being outside the range starting at the lowest
4079 possible value and ending at 10, in other words, being greater than 10.
4080 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4081 always false.
4082
4083 We set up things so that the missing bounds are handled in a consistent
4084 manner so neither a missing bound nor "true" and "false" need to be
4085 handled using a special case. */
4086
4087 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4088 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4089 and UPPER1_P are nonzero if the respective argument is an upper bound
4090 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4091 must be specified for a comparison. ARG1 will be converted to ARG0's
4092 type if both are specified. */
4093
4094 static tree
4095 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4096 tree arg1, int upper1_p)
4097 {
4098 tree tem;
4099 int result;
4100 int sgn0, sgn1;
4101
4102 /* If neither arg represents infinity, do the normal operation.
4103 Else, if not a comparison, return infinity. Else handle the special
4104 comparison rules. Note that most of the cases below won't occur, but
4105 are handled for consistency. */
4106
4107 if (arg0 != 0 && arg1 != 0)
4108 {
4109 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4110 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4111 STRIP_NOPS (tem);
4112 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4113 }
4114
4115 if (TREE_CODE_CLASS (code) != tcc_comparison)
4116 return 0;
4117
4118 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4119 for neither. In real maths, we cannot assume open ended ranges are
4120 the same. But, this is computer arithmetic, where numbers are finite.
4121 We can therefore make the transformation of any unbounded range with
4122 the value Z, Z being greater than any representable number. This permits
4123 us to treat unbounded ranges as equal. */
4124 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4125 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4126 switch (code)
4127 {
4128 case EQ_EXPR:
4129 result = sgn0 == sgn1;
4130 break;
4131 case NE_EXPR:
4132 result = sgn0 != sgn1;
4133 break;
4134 case LT_EXPR:
4135 result = sgn0 < sgn1;
4136 break;
4137 case LE_EXPR:
4138 result = sgn0 <= sgn1;
4139 break;
4140 case GT_EXPR:
4141 result = sgn0 > sgn1;
4142 break;
4143 case GE_EXPR:
4144 result = sgn0 >= sgn1;
4145 break;
4146 default:
4147 gcc_unreachable ();
4148 }
4149
4150 return constant_boolean_node (result, type);
4151 }
4152 \f
4153 /* Helper routine for make_range. Perform one step for it, return
4154 new expression if the loop should continue or NULL_TREE if it should
4155 stop. */
4156
4157 tree
4158 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4159 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4160 bool *strict_overflow_p)
4161 {
4162 tree arg0_type = TREE_TYPE (arg0);
4163 tree n_low, n_high, low = *p_low, high = *p_high;
4164 int in_p = *p_in_p, n_in_p;
4165
4166 switch (code)
4167 {
4168 case TRUTH_NOT_EXPR:
4169 /* We can only do something if the range is testing for zero. */
4170 if (low == NULL_TREE || high == NULL_TREE
4171 || ! integer_zerop (low) || ! integer_zerop (high))
4172 return NULL_TREE;
4173 *p_in_p = ! in_p;
4174 return arg0;
4175
4176 case EQ_EXPR: case NE_EXPR:
4177 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4178 /* We can only do something if the range is testing for zero
4179 and if the second operand is an integer constant. Note that
4180 saying something is "in" the range we make is done by
4181 complementing IN_P since it will set in the initial case of
4182 being not equal to zero; "out" is leaving it alone. */
4183 if (low == NULL_TREE || high == NULL_TREE
4184 || ! integer_zerop (low) || ! integer_zerop (high)
4185 || TREE_CODE (arg1) != INTEGER_CST)
4186 return NULL_TREE;
4187
4188 switch (code)
4189 {
4190 case NE_EXPR: /* - [c, c] */
4191 low = high = arg1;
4192 break;
4193 case EQ_EXPR: /* + [c, c] */
4194 in_p = ! in_p, low = high = arg1;
4195 break;
4196 case GT_EXPR: /* - [-, c] */
4197 low = 0, high = arg1;
4198 break;
4199 case GE_EXPR: /* + [c, -] */
4200 in_p = ! in_p, low = arg1, high = 0;
4201 break;
4202 case LT_EXPR: /* - [c, -] */
4203 low = arg1, high = 0;
4204 break;
4205 case LE_EXPR: /* + [-, c] */
4206 in_p = ! in_p, low = 0, high = arg1;
4207 break;
4208 default:
4209 gcc_unreachable ();
4210 }
4211
4212 /* If this is an unsigned comparison, we also know that EXP is
4213 greater than or equal to zero. We base the range tests we make
4214 on that fact, so we record it here so we can parse existing
4215 range tests. We test arg0_type since often the return type
4216 of, e.g. EQ_EXPR, is boolean. */
4217 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4218 {
4219 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4220 in_p, low, high, 1,
4221 build_int_cst (arg0_type, 0),
4222 NULL_TREE))
4223 return NULL_TREE;
4224
4225 in_p = n_in_p, low = n_low, high = n_high;
4226
4227 /* If the high bound is missing, but we have a nonzero low
4228 bound, reverse the range so it goes from zero to the low bound
4229 minus 1. */
4230 if (high == 0 && low && ! integer_zerop (low))
4231 {
4232 in_p = ! in_p;
4233 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4234 build_int_cst (TREE_TYPE (low), 1), 0);
4235 low = build_int_cst (arg0_type, 0);
4236 }
4237 }
4238
4239 *p_low = low;
4240 *p_high = high;
4241 *p_in_p = in_p;
4242 return arg0;
4243
4244 case NEGATE_EXPR:
4245 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4246 low and high are non-NULL, then normalize will DTRT. */
4247 if (!TYPE_UNSIGNED (arg0_type)
4248 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4249 {
4250 if (low == NULL_TREE)
4251 low = TYPE_MIN_VALUE (arg0_type);
4252 if (high == NULL_TREE)
4253 high = TYPE_MAX_VALUE (arg0_type);
4254 }
4255
4256 /* (-x) IN [a,b] -> x in [-b, -a] */
4257 n_low = range_binop (MINUS_EXPR, exp_type,
4258 build_int_cst (exp_type, 0),
4259 0, high, 1);
4260 n_high = range_binop (MINUS_EXPR, exp_type,
4261 build_int_cst (exp_type, 0),
4262 0, low, 0);
4263 if (n_high != 0 && TREE_OVERFLOW (n_high))
4264 return NULL_TREE;
4265 goto normalize;
4266
4267 case BIT_NOT_EXPR:
4268 /* ~ X -> -X - 1 */
4269 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4270 build_int_cst (exp_type, 1));
4271
4272 case PLUS_EXPR:
4273 case MINUS_EXPR:
4274 if (TREE_CODE (arg1) != INTEGER_CST)
4275 return NULL_TREE;
4276
4277 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4278 move a constant to the other side. */
4279 if (!TYPE_UNSIGNED (arg0_type)
4280 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4281 return NULL_TREE;
4282
4283 /* If EXP is signed, any overflow in the computation is undefined,
4284 so we don't worry about it so long as our computations on
4285 the bounds don't overflow. For unsigned, overflow is defined
4286 and this is exactly the right thing. */
4287 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4288 arg0_type, low, 0, arg1, 0);
4289 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4290 arg0_type, high, 1, arg1, 0);
4291 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4292 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4293 return NULL_TREE;
4294
4295 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4296 *strict_overflow_p = true;
4297
4298 normalize:
4299 /* Check for an unsigned range which has wrapped around the maximum
4300 value thus making n_high < n_low, and normalize it. */
4301 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4302 {
4303 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4304 build_int_cst (TREE_TYPE (n_high), 1), 0);
4305 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4306 build_int_cst (TREE_TYPE (n_low), 1), 0);
4307
4308 /* If the range is of the form +/- [ x+1, x ], we won't
4309 be able to normalize it. But then, it represents the
4310 whole range or the empty set, so make it
4311 +/- [ -, - ]. */
4312 if (tree_int_cst_equal (n_low, low)
4313 && tree_int_cst_equal (n_high, high))
4314 low = high = 0;
4315 else
4316 in_p = ! in_p;
4317 }
4318 else
4319 low = n_low, high = n_high;
4320
4321 *p_low = low;
4322 *p_high = high;
4323 *p_in_p = in_p;
4324 return arg0;
4325
4326 CASE_CONVERT:
4327 case NON_LVALUE_EXPR:
4328 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4329 return NULL_TREE;
4330
4331 if (! INTEGRAL_TYPE_P (arg0_type)
4332 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4333 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4334 return NULL_TREE;
4335
4336 n_low = low, n_high = high;
4337
4338 if (n_low != 0)
4339 n_low = fold_convert_loc (loc, arg0_type, n_low);
4340
4341 if (n_high != 0)
4342 n_high = fold_convert_loc (loc, arg0_type, n_high);
4343
4344 /* If we're converting arg0 from an unsigned type, to exp,
4345 a signed type, we will be doing the comparison as unsigned.
4346 The tests above have already verified that LOW and HIGH
4347 are both positive.
4348
4349 So we have to ensure that we will handle large unsigned
4350 values the same way that the current signed bounds treat
4351 negative values. */
4352
4353 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4354 {
4355 tree high_positive;
4356 tree equiv_type;
4357 /* For fixed-point modes, we need to pass the saturating flag
4358 as the 2nd parameter. */
4359 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4360 equiv_type
4361 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4362 TYPE_SATURATING (arg0_type));
4363 else
4364 equiv_type
4365 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4366
4367 /* A range without an upper bound is, naturally, unbounded.
4368 Since convert would have cropped a very large value, use
4369 the max value for the destination type. */
4370 high_positive
4371 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4372 : TYPE_MAX_VALUE (arg0_type);
4373
4374 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4375 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4376 fold_convert_loc (loc, arg0_type,
4377 high_positive),
4378 build_int_cst (arg0_type, 1));
4379
4380 /* If the low bound is specified, "and" the range with the
4381 range for which the original unsigned value will be
4382 positive. */
4383 if (low != 0)
4384 {
4385 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4386 1, fold_convert_loc (loc, arg0_type,
4387 integer_zero_node),
4388 high_positive))
4389 return NULL_TREE;
4390
4391 in_p = (n_in_p == in_p);
4392 }
4393 else
4394 {
4395 /* Otherwise, "or" the range with the range of the input
4396 that will be interpreted as negative. */
4397 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4398 1, fold_convert_loc (loc, arg0_type,
4399 integer_zero_node),
4400 high_positive))
4401 return NULL_TREE;
4402
4403 in_p = (in_p != n_in_p);
4404 }
4405 }
4406
4407 *p_low = n_low;
4408 *p_high = n_high;
4409 *p_in_p = in_p;
4410 return arg0;
4411
4412 default:
4413 return NULL_TREE;
4414 }
4415 }
4416
4417 /* Given EXP, a logical expression, set the range it is testing into
4418 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4419 actually being tested. *PLOW and *PHIGH will be made of the same
4420 type as the returned expression. If EXP is not a comparison, we
4421 will most likely not be returning a useful value and range. Set
4422 *STRICT_OVERFLOW_P to true if the return value is only valid
4423 because signed overflow is undefined; otherwise, do not change
4424 *STRICT_OVERFLOW_P. */
4425
4426 tree
4427 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4428 bool *strict_overflow_p)
4429 {
4430 enum tree_code code;
4431 tree arg0, arg1 = NULL_TREE;
4432 tree exp_type, nexp;
4433 int in_p;
4434 tree low, high;
4435 location_t loc = EXPR_LOCATION (exp);
4436
4437 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4438 and see if we can refine the range. Some of the cases below may not
4439 happen, but it doesn't seem worth worrying about this. We "continue"
4440 the outer loop when we've changed something; otherwise we "break"
4441 the switch, which will "break" the while. */
4442
4443 in_p = 0;
4444 low = high = build_int_cst (TREE_TYPE (exp), 0);
4445
4446 while (1)
4447 {
4448 code = TREE_CODE (exp);
4449 exp_type = TREE_TYPE (exp);
4450 arg0 = NULL_TREE;
4451
4452 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4453 {
4454 if (TREE_OPERAND_LENGTH (exp) > 0)
4455 arg0 = TREE_OPERAND (exp, 0);
4456 if (TREE_CODE_CLASS (code) == tcc_binary
4457 || TREE_CODE_CLASS (code) == tcc_comparison
4458 || (TREE_CODE_CLASS (code) == tcc_expression
4459 && TREE_OPERAND_LENGTH (exp) > 1))
4460 arg1 = TREE_OPERAND (exp, 1);
4461 }
4462 if (arg0 == NULL_TREE)
4463 break;
4464
4465 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4466 &high, &in_p, strict_overflow_p);
4467 if (nexp == NULL_TREE)
4468 break;
4469 exp = nexp;
4470 }
4471
4472 /* If EXP is a constant, we can evaluate whether this is true or false. */
4473 if (TREE_CODE (exp) == INTEGER_CST)
4474 {
4475 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4476 exp, 0, low, 0))
4477 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4478 exp, 1, high, 1)));
4479 low = high = 0;
4480 exp = 0;
4481 }
4482
4483 *pin_p = in_p, *plow = low, *phigh = high;
4484 return exp;
4485 }
4486 \f
4487 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4488 type, TYPE, return an expression to test if EXP is in (or out of, depending
4489 on IN_P) the range. Return 0 if the test couldn't be created. */
4490
4491 tree
4492 build_range_check (location_t loc, tree type, tree exp, int in_p,
4493 tree low, tree high)
4494 {
4495 tree etype = TREE_TYPE (exp), value;
4496
4497 /* Disable this optimization for function pointer expressions
4498 on targets that require function pointer canonicalization. */
4499 if (targetm.have_canonicalize_funcptr_for_compare ()
4500 && TREE_CODE (etype) == POINTER_TYPE
4501 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4502 return NULL_TREE;
4503
4504 if (! in_p)
4505 {
4506 value = build_range_check (loc, type, exp, 1, low, high);
4507 if (value != 0)
4508 return invert_truthvalue_loc (loc, value);
4509
4510 return 0;
4511 }
4512
4513 if (low == 0 && high == 0)
4514 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4515
4516 if (low == 0)
4517 return fold_build2_loc (loc, LE_EXPR, type, exp,
4518 fold_convert_loc (loc, etype, high));
4519
4520 if (high == 0)
4521 return fold_build2_loc (loc, GE_EXPR, type, exp,
4522 fold_convert_loc (loc, etype, low));
4523
4524 if (operand_equal_p (low, high, 0))
4525 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4526 fold_convert_loc (loc, etype, low));
4527
4528 if (integer_zerop (low))
4529 {
4530 if (! TYPE_UNSIGNED (etype))
4531 {
4532 etype = unsigned_type_for (etype);
4533 high = fold_convert_loc (loc, etype, high);
4534 exp = fold_convert_loc (loc, etype, exp);
4535 }
4536 return build_range_check (loc, type, exp, 1, 0, high);
4537 }
4538
4539 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4540 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4541 {
4542 int prec = TYPE_PRECISION (etype);
4543
4544 if (wi::mask (prec - 1, false, prec) == high)
4545 {
4546 if (TYPE_UNSIGNED (etype))
4547 {
4548 tree signed_etype = signed_type_for (etype);
4549 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4550 etype
4551 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4552 else
4553 etype = signed_etype;
4554 exp = fold_convert_loc (loc, etype, exp);
4555 }
4556 return fold_build2_loc (loc, GT_EXPR, type, exp,
4557 build_int_cst (etype, 0));
4558 }
4559 }
4560
4561 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4562 This requires wrap-around arithmetics for the type of the expression.
4563 First make sure that arithmetics in this type is valid, then make sure
4564 that it wraps around. */
4565 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4566 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4567 TYPE_UNSIGNED (etype));
4568
4569 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4570 {
4571 tree utype, minv, maxv;
4572
4573 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4574 for the type in question, as we rely on this here. */
4575 utype = unsigned_type_for (etype);
4576 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4577 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4578 build_int_cst (TREE_TYPE (maxv), 1), 1);
4579 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4580
4581 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4582 minv, 1, maxv, 1)))
4583 etype = utype;
4584 else
4585 return 0;
4586 }
4587
4588 high = fold_convert_loc (loc, etype, high);
4589 low = fold_convert_loc (loc, etype, low);
4590 exp = fold_convert_loc (loc, etype, exp);
4591
4592 value = const_binop (MINUS_EXPR, high, low);
4593
4594
4595 if (POINTER_TYPE_P (etype))
4596 {
4597 if (value != 0 && !TREE_OVERFLOW (value))
4598 {
4599 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4600 return build_range_check (loc, type,
4601 fold_build_pointer_plus_loc (loc, exp, low),
4602 1, build_int_cst (etype, 0), value);
4603 }
4604 return 0;
4605 }
4606
4607 if (value != 0 && !TREE_OVERFLOW (value))
4608 return build_range_check (loc, type,
4609 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4610 1, build_int_cst (etype, 0), value);
4611
4612 return 0;
4613 }
4614 \f
4615 /* Return the predecessor of VAL in its type, handling the infinite case. */
4616
4617 static tree
4618 range_predecessor (tree val)
4619 {
4620 tree type = TREE_TYPE (val);
4621
4622 if (INTEGRAL_TYPE_P (type)
4623 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4624 return 0;
4625 else
4626 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4627 build_int_cst (TREE_TYPE (val), 1), 0);
4628 }
4629
4630 /* Return the successor of VAL in its type, handling the infinite case. */
4631
4632 static tree
4633 range_successor (tree val)
4634 {
4635 tree type = TREE_TYPE (val);
4636
4637 if (INTEGRAL_TYPE_P (type)
4638 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4639 return 0;
4640 else
4641 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4642 build_int_cst (TREE_TYPE (val), 1), 0);
4643 }
4644
4645 /* Given two ranges, see if we can merge them into one. Return 1 if we
4646 can, 0 if we can't. Set the output range into the specified parameters. */
4647
4648 bool
4649 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4650 tree high0, int in1_p, tree low1, tree high1)
4651 {
4652 int no_overlap;
4653 int subset;
4654 int temp;
4655 tree tem;
4656 int in_p;
4657 tree low, high;
4658 int lowequal = ((low0 == 0 && low1 == 0)
4659 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4660 low0, 0, low1, 0)));
4661 int highequal = ((high0 == 0 && high1 == 0)
4662 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4663 high0, 1, high1, 1)));
4664
4665 /* Make range 0 be the range that starts first, or ends last if they
4666 start at the same value. Swap them if it isn't. */
4667 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4668 low0, 0, low1, 0))
4669 || (lowequal
4670 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4671 high1, 1, high0, 1))))
4672 {
4673 temp = in0_p, in0_p = in1_p, in1_p = temp;
4674 tem = low0, low0 = low1, low1 = tem;
4675 tem = high0, high0 = high1, high1 = tem;
4676 }
4677
4678 /* Now flag two cases, whether the ranges are disjoint or whether the
4679 second range is totally subsumed in the first. Note that the tests
4680 below are simplified by the ones above. */
4681 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4682 high0, 1, low1, 0));
4683 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4684 high1, 1, high0, 1));
4685
4686 /* We now have four cases, depending on whether we are including or
4687 excluding the two ranges. */
4688 if (in0_p && in1_p)
4689 {
4690 /* If they don't overlap, the result is false. If the second range
4691 is a subset it is the result. Otherwise, the range is from the start
4692 of the second to the end of the first. */
4693 if (no_overlap)
4694 in_p = 0, low = high = 0;
4695 else if (subset)
4696 in_p = 1, low = low1, high = high1;
4697 else
4698 in_p = 1, low = low1, high = high0;
4699 }
4700
4701 else if (in0_p && ! in1_p)
4702 {
4703 /* If they don't overlap, the result is the first range. If they are
4704 equal, the result is false. If the second range is a subset of the
4705 first, and the ranges begin at the same place, we go from just after
4706 the end of the second range to the end of the first. If the second
4707 range is not a subset of the first, or if it is a subset and both
4708 ranges end at the same place, the range starts at the start of the
4709 first range and ends just before the second range.
4710 Otherwise, we can't describe this as a single range. */
4711 if (no_overlap)
4712 in_p = 1, low = low0, high = high0;
4713 else if (lowequal && highequal)
4714 in_p = 0, low = high = 0;
4715 else if (subset && lowequal)
4716 {
4717 low = range_successor (high1);
4718 high = high0;
4719 in_p = 1;
4720 if (low == 0)
4721 {
4722 /* We are in the weird situation where high0 > high1 but
4723 high1 has no successor. Punt. */
4724 return 0;
4725 }
4726 }
4727 else if (! subset || highequal)
4728 {
4729 low = low0;
4730 high = range_predecessor (low1);
4731 in_p = 1;
4732 if (high == 0)
4733 {
4734 /* low0 < low1 but low1 has no predecessor. Punt. */
4735 return 0;
4736 }
4737 }
4738 else
4739 return 0;
4740 }
4741
4742 else if (! in0_p && in1_p)
4743 {
4744 /* If they don't overlap, the result is the second range. If the second
4745 is a subset of the first, the result is false. Otherwise,
4746 the range starts just after the first range and ends at the
4747 end of the second. */
4748 if (no_overlap)
4749 in_p = 1, low = low1, high = high1;
4750 else if (subset || highequal)
4751 in_p = 0, low = high = 0;
4752 else
4753 {
4754 low = range_successor (high0);
4755 high = high1;
4756 in_p = 1;
4757 if (low == 0)
4758 {
4759 /* high1 > high0 but high0 has no successor. Punt. */
4760 return 0;
4761 }
4762 }
4763 }
4764
4765 else
4766 {
4767 /* The case where we are excluding both ranges. Here the complex case
4768 is if they don't overlap. In that case, the only time we have a
4769 range is if they are adjacent. If the second is a subset of the
4770 first, the result is the first. Otherwise, the range to exclude
4771 starts at the beginning of the first range and ends at the end of the
4772 second. */
4773 if (no_overlap)
4774 {
4775 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4776 range_successor (high0),
4777 1, low1, 0)))
4778 in_p = 0, low = low0, high = high1;
4779 else
4780 {
4781 /* Canonicalize - [min, x] into - [-, x]. */
4782 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4783 switch (TREE_CODE (TREE_TYPE (low0)))
4784 {
4785 case ENUMERAL_TYPE:
4786 if (TYPE_PRECISION (TREE_TYPE (low0))
4787 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4788 break;
4789 /* FALLTHROUGH */
4790 case INTEGER_TYPE:
4791 if (tree_int_cst_equal (low0,
4792 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4793 low0 = 0;
4794 break;
4795 case POINTER_TYPE:
4796 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4797 && integer_zerop (low0))
4798 low0 = 0;
4799 break;
4800 default:
4801 break;
4802 }
4803
4804 /* Canonicalize - [x, max] into - [x, -]. */
4805 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4806 switch (TREE_CODE (TREE_TYPE (high1)))
4807 {
4808 case ENUMERAL_TYPE:
4809 if (TYPE_PRECISION (TREE_TYPE (high1))
4810 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4811 break;
4812 /* FALLTHROUGH */
4813 case INTEGER_TYPE:
4814 if (tree_int_cst_equal (high1,
4815 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4816 high1 = 0;
4817 break;
4818 case POINTER_TYPE:
4819 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4820 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4821 high1, 1,
4822 build_int_cst (TREE_TYPE (high1), 1),
4823 1)))
4824 high1 = 0;
4825 break;
4826 default:
4827 break;
4828 }
4829
4830 /* The ranges might be also adjacent between the maximum and
4831 minimum values of the given type. For
4832 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4833 return + [x + 1, y - 1]. */
4834 if (low0 == 0 && high1 == 0)
4835 {
4836 low = range_successor (high0);
4837 high = range_predecessor (low1);
4838 if (low == 0 || high == 0)
4839 return 0;
4840
4841 in_p = 1;
4842 }
4843 else
4844 return 0;
4845 }
4846 }
4847 else if (subset)
4848 in_p = 0, low = low0, high = high0;
4849 else
4850 in_p = 0, low = low0, high = high1;
4851 }
4852
4853 *pin_p = in_p, *plow = low, *phigh = high;
4854 return 1;
4855 }
4856 \f
4857
4858 /* Subroutine of fold, looking inside expressions of the form
4859 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4860 of the COND_EXPR. This function is being used also to optimize
4861 A op B ? C : A, by reversing the comparison first.
4862
4863 Return a folded expression whose code is not a COND_EXPR
4864 anymore, or NULL_TREE if no folding opportunity is found. */
4865
4866 static tree
4867 fold_cond_expr_with_comparison (location_t loc, tree type,
4868 tree arg0, tree arg1, tree arg2)
4869 {
4870 enum tree_code comp_code = TREE_CODE (arg0);
4871 tree arg00 = TREE_OPERAND (arg0, 0);
4872 tree arg01 = TREE_OPERAND (arg0, 1);
4873 tree arg1_type = TREE_TYPE (arg1);
4874 tree tem;
4875
4876 STRIP_NOPS (arg1);
4877 STRIP_NOPS (arg2);
4878
4879 /* If we have A op 0 ? A : -A, consider applying the following
4880 transformations:
4881
4882 A == 0? A : -A same as -A
4883 A != 0? A : -A same as A
4884 A >= 0? A : -A same as abs (A)
4885 A > 0? A : -A same as abs (A)
4886 A <= 0? A : -A same as -abs (A)
4887 A < 0? A : -A same as -abs (A)
4888
4889 None of these transformations work for modes with signed
4890 zeros. If A is +/-0, the first two transformations will
4891 change the sign of the result (from +0 to -0, or vice
4892 versa). The last four will fix the sign of the result,
4893 even though the original expressions could be positive or
4894 negative, depending on the sign of A.
4895
4896 Note that all these transformations are correct if A is
4897 NaN, since the two alternatives (A and -A) are also NaNs. */
4898 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4899 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4900 ? real_zerop (arg01)
4901 : integer_zerop (arg01))
4902 && ((TREE_CODE (arg2) == NEGATE_EXPR
4903 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4904 /* In the case that A is of the form X-Y, '-A' (arg2) may
4905 have already been folded to Y-X, check for that. */
4906 || (TREE_CODE (arg1) == MINUS_EXPR
4907 && TREE_CODE (arg2) == MINUS_EXPR
4908 && operand_equal_p (TREE_OPERAND (arg1, 0),
4909 TREE_OPERAND (arg2, 1), 0)
4910 && operand_equal_p (TREE_OPERAND (arg1, 1),
4911 TREE_OPERAND (arg2, 0), 0))))
4912 switch (comp_code)
4913 {
4914 case EQ_EXPR:
4915 case UNEQ_EXPR:
4916 tem = fold_convert_loc (loc, arg1_type, arg1);
4917 return pedantic_non_lvalue_loc (loc,
4918 fold_convert_loc (loc, type,
4919 negate_expr (tem)));
4920 case NE_EXPR:
4921 case LTGT_EXPR:
4922 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4923 case UNGE_EXPR:
4924 case UNGT_EXPR:
4925 if (flag_trapping_math)
4926 break;
4927 /* Fall through. */
4928 case GE_EXPR:
4929 case GT_EXPR:
4930 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4931 arg1 = fold_convert_loc (loc, signed_type_for
4932 (TREE_TYPE (arg1)), arg1);
4933 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4934 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4935 case UNLE_EXPR:
4936 case UNLT_EXPR:
4937 if (flag_trapping_math)
4938 break;
4939 case LE_EXPR:
4940 case LT_EXPR:
4941 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4942 arg1 = fold_convert_loc (loc, signed_type_for
4943 (TREE_TYPE (arg1)), arg1);
4944 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4945 return negate_expr (fold_convert_loc (loc, type, tem));
4946 default:
4947 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4948 break;
4949 }
4950
4951 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4952 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4953 both transformations are correct when A is NaN: A != 0
4954 is then true, and A == 0 is false. */
4955
4956 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4957 && integer_zerop (arg01) && integer_zerop (arg2))
4958 {
4959 if (comp_code == NE_EXPR)
4960 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4961 else if (comp_code == EQ_EXPR)
4962 return build_zero_cst (type);
4963 }
4964
4965 /* Try some transformations of A op B ? A : B.
4966
4967 A == B? A : B same as B
4968 A != B? A : B same as A
4969 A >= B? A : B same as max (A, B)
4970 A > B? A : B same as max (B, A)
4971 A <= B? A : B same as min (A, B)
4972 A < B? A : B same as min (B, A)
4973
4974 As above, these transformations don't work in the presence
4975 of signed zeros. For example, if A and B are zeros of
4976 opposite sign, the first two transformations will change
4977 the sign of the result. In the last four, the original
4978 expressions give different results for (A=+0, B=-0) and
4979 (A=-0, B=+0), but the transformed expressions do not.
4980
4981 The first two transformations are correct if either A or B
4982 is a NaN. In the first transformation, the condition will
4983 be false, and B will indeed be chosen. In the case of the
4984 second transformation, the condition A != B will be true,
4985 and A will be chosen.
4986
4987 The conversions to max() and min() are not correct if B is
4988 a number and A is not. The conditions in the original
4989 expressions will be false, so all four give B. The min()
4990 and max() versions would give a NaN instead. */
4991 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4992 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4993 /* Avoid these transformations if the COND_EXPR may be used
4994 as an lvalue in the C++ front-end. PR c++/19199. */
4995 && (in_gimple_form
4996 || VECTOR_TYPE_P (type)
4997 || (! lang_GNU_CXX ()
4998 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4999 || ! maybe_lvalue_p (arg1)
5000 || ! maybe_lvalue_p (arg2)))
5001 {
5002 tree comp_op0 = arg00;
5003 tree comp_op1 = arg01;
5004 tree comp_type = TREE_TYPE (comp_op0);
5005
5006 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5007 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5008 {
5009 comp_type = type;
5010 comp_op0 = arg1;
5011 comp_op1 = arg2;
5012 }
5013
5014 switch (comp_code)
5015 {
5016 case EQ_EXPR:
5017 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5018 case NE_EXPR:
5019 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5020 case LE_EXPR:
5021 case LT_EXPR:
5022 case UNLE_EXPR:
5023 case UNLT_EXPR:
5024 /* In C++ a ?: expression can be an lvalue, so put the
5025 operand which will be used if they are equal first
5026 so that we can convert this back to the
5027 corresponding COND_EXPR. */
5028 if (!HONOR_NANS (arg1))
5029 {
5030 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5031 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5032 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5033 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5034 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5035 comp_op1, comp_op0);
5036 return pedantic_non_lvalue_loc (loc,
5037 fold_convert_loc (loc, type, tem));
5038 }
5039 break;
5040 case GE_EXPR:
5041 case GT_EXPR:
5042 case UNGE_EXPR:
5043 case UNGT_EXPR:
5044 if (!HONOR_NANS (arg1))
5045 {
5046 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5047 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5048 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5049 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5050 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5051 comp_op1, comp_op0);
5052 return pedantic_non_lvalue_loc (loc,
5053 fold_convert_loc (loc, type, tem));
5054 }
5055 break;
5056 case UNEQ_EXPR:
5057 if (!HONOR_NANS (arg1))
5058 return pedantic_non_lvalue_loc (loc,
5059 fold_convert_loc (loc, type, arg2));
5060 break;
5061 case LTGT_EXPR:
5062 if (!HONOR_NANS (arg1))
5063 return pedantic_non_lvalue_loc (loc,
5064 fold_convert_loc (loc, type, arg1));
5065 break;
5066 default:
5067 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5068 break;
5069 }
5070 }
5071
5072 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5073 we might still be able to simplify this. For example,
5074 if C1 is one less or one more than C2, this might have started
5075 out as a MIN or MAX and been transformed by this function.
5076 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5077
5078 if (INTEGRAL_TYPE_P (type)
5079 && TREE_CODE (arg01) == INTEGER_CST
5080 && TREE_CODE (arg2) == INTEGER_CST)
5081 switch (comp_code)
5082 {
5083 case EQ_EXPR:
5084 if (TREE_CODE (arg1) == INTEGER_CST)
5085 break;
5086 /* We can replace A with C1 in this case. */
5087 arg1 = fold_convert_loc (loc, type, arg01);
5088 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5089
5090 case LT_EXPR:
5091 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5092 MIN_EXPR, to preserve the signedness of the comparison. */
5093 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5094 OEP_ONLY_CONST)
5095 && operand_equal_p (arg01,
5096 const_binop (PLUS_EXPR, arg2,
5097 build_int_cst (type, 1)),
5098 OEP_ONLY_CONST))
5099 {
5100 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5101 fold_convert_loc (loc, TREE_TYPE (arg00),
5102 arg2));
5103 return pedantic_non_lvalue_loc (loc,
5104 fold_convert_loc (loc, type, tem));
5105 }
5106 break;
5107
5108 case LE_EXPR:
5109 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5110 as above. */
5111 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5112 OEP_ONLY_CONST)
5113 && operand_equal_p (arg01,
5114 const_binop (MINUS_EXPR, arg2,
5115 build_int_cst (type, 1)),
5116 OEP_ONLY_CONST))
5117 {
5118 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5119 fold_convert_loc (loc, TREE_TYPE (arg00),
5120 arg2));
5121 return pedantic_non_lvalue_loc (loc,
5122 fold_convert_loc (loc, type, tem));
5123 }
5124 break;
5125
5126 case GT_EXPR:
5127 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5128 MAX_EXPR, to preserve the signedness of the comparison. */
5129 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5130 OEP_ONLY_CONST)
5131 && operand_equal_p (arg01,
5132 const_binop (MINUS_EXPR, arg2,
5133 build_int_cst (type, 1)),
5134 OEP_ONLY_CONST))
5135 {
5136 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5137 fold_convert_loc (loc, TREE_TYPE (arg00),
5138 arg2));
5139 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5140 }
5141 break;
5142
5143 case GE_EXPR:
5144 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5145 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5146 OEP_ONLY_CONST)
5147 && operand_equal_p (arg01,
5148 const_binop (PLUS_EXPR, arg2,
5149 build_int_cst (type, 1)),
5150 OEP_ONLY_CONST))
5151 {
5152 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5153 fold_convert_loc (loc, TREE_TYPE (arg00),
5154 arg2));
5155 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5156 }
5157 break;
5158 case NE_EXPR:
5159 break;
5160 default:
5161 gcc_unreachable ();
5162 }
5163
5164 return NULL_TREE;
5165 }
5166
5167
5168 \f
5169 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5170 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5171 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5172 false) >= 2)
5173 #endif
5174
5175 /* EXP is some logical combination of boolean tests. See if we can
5176 merge it into some range test. Return the new tree if so. */
5177
5178 static tree
5179 fold_range_test (location_t loc, enum tree_code code, tree type,
5180 tree op0, tree op1)
5181 {
5182 int or_op = (code == TRUTH_ORIF_EXPR
5183 || code == TRUTH_OR_EXPR);
5184 int in0_p, in1_p, in_p;
5185 tree low0, low1, low, high0, high1, high;
5186 bool strict_overflow_p = false;
5187 tree tem, lhs, rhs;
5188 const char * const warnmsg = G_("assuming signed overflow does not occur "
5189 "when simplifying range test");
5190
5191 if (!INTEGRAL_TYPE_P (type))
5192 return 0;
5193
5194 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5195 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5196
5197 /* If this is an OR operation, invert both sides; we will invert
5198 again at the end. */
5199 if (or_op)
5200 in0_p = ! in0_p, in1_p = ! in1_p;
5201
5202 /* If both expressions are the same, if we can merge the ranges, and we
5203 can build the range test, return it or it inverted. If one of the
5204 ranges is always true or always false, consider it to be the same
5205 expression as the other. */
5206 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5207 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5208 in1_p, low1, high1)
5209 && 0 != (tem = (build_range_check (loc, type,
5210 lhs != 0 ? lhs
5211 : rhs != 0 ? rhs : integer_zero_node,
5212 in_p, low, high))))
5213 {
5214 if (strict_overflow_p)
5215 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5216 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5217 }
5218
5219 /* On machines where the branch cost is expensive, if this is a
5220 short-circuited branch and the underlying object on both sides
5221 is the same, make a non-short-circuit operation. */
5222 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5223 && lhs != 0 && rhs != 0
5224 && (code == TRUTH_ANDIF_EXPR
5225 || code == TRUTH_ORIF_EXPR)
5226 && operand_equal_p (lhs, rhs, 0))
5227 {
5228 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5229 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5230 which cases we can't do this. */
5231 if (simple_operand_p (lhs))
5232 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5233 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5234 type, op0, op1);
5235
5236 else if (!lang_hooks.decls.global_bindings_p ()
5237 && !CONTAINS_PLACEHOLDER_P (lhs))
5238 {
5239 tree common = save_expr (lhs);
5240
5241 if (0 != (lhs = build_range_check (loc, type, common,
5242 or_op ? ! in0_p : in0_p,
5243 low0, high0))
5244 && (0 != (rhs = build_range_check (loc, type, common,
5245 or_op ? ! in1_p : in1_p,
5246 low1, high1))))
5247 {
5248 if (strict_overflow_p)
5249 fold_overflow_warning (warnmsg,
5250 WARN_STRICT_OVERFLOW_COMPARISON);
5251 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5252 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5253 type, lhs, rhs);
5254 }
5255 }
5256 }
5257
5258 return 0;
5259 }
5260 \f
5261 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5262 bit value. Arrange things so the extra bits will be set to zero if and
5263 only if C is signed-extended to its full width. If MASK is nonzero,
5264 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5265
5266 static tree
5267 unextend (tree c, int p, int unsignedp, tree mask)
5268 {
5269 tree type = TREE_TYPE (c);
5270 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5271 tree temp;
5272
5273 if (p == modesize || unsignedp)
5274 return c;
5275
5276 /* We work by getting just the sign bit into the low-order bit, then
5277 into the high-order bit, then sign-extend. We then XOR that value
5278 with C. */
5279 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5280
5281 /* We must use a signed type in order to get an arithmetic right shift.
5282 However, we must also avoid introducing accidental overflows, so that
5283 a subsequent call to integer_zerop will work. Hence we must
5284 do the type conversion here. At this point, the constant is either
5285 zero or one, and the conversion to a signed type can never overflow.
5286 We could get an overflow if this conversion is done anywhere else. */
5287 if (TYPE_UNSIGNED (type))
5288 temp = fold_convert (signed_type_for (type), temp);
5289
5290 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5291 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5292 if (mask != 0)
5293 temp = const_binop (BIT_AND_EXPR, temp,
5294 fold_convert (TREE_TYPE (c), mask));
5295 /* If necessary, convert the type back to match the type of C. */
5296 if (TYPE_UNSIGNED (type))
5297 temp = fold_convert (type, temp);
5298
5299 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5300 }
5301 \f
5302 /* For an expression that has the form
5303 (A && B) || ~B
5304 or
5305 (A || B) && ~B,
5306 we can drop one of the inner expressions and simplify to
5307 A || ~B
5308 or
5309 A && ~B
5310 LOC is the location of the resulting expression. OP is the inner
5311 logical operation; the left-hand side in the examples above, while CMPOP
5312 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5313 removing a condition that guards another, as in
5314 (A != NULL && A->...) || A == NULL
5315 which we must not transform. If RHS_ONLY is true, only eliminate the
5316 right-most operand of the inner logical operation. */
5317
5318 static tree
5319 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5320 bool rhs_only)
5321 {
5322 tree type = TREE_TYPE (cmpop);
5323 enum tree_code code = TREE_CODE (cmpop);
5324 enum tree_code truthop_code = TREE_CODE (op);
5325 tree lhs = TREE_OPERAND (op, 0);
5326 tree rhs = TREE_OPERAND (op, 1);
5327 tree orig_lhs = lhs, orig_rhs = rhs;
5328 enum tree_code rhs_code = TREE_CODE (rhs);
5329 enum tree_code lhs_code = TREE_CODE (lhs);
5330 enum tree_code inv_code;
5331
5332 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5333 return NULL_TREE;
5334
5335 if (TREE_CODE_CLASS (code) != tcc_comparison)
5336 return NULL_TREE;
5337
5338 if (rhs_code == truthop_code)
5339 {
5340 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5341 if (newrhs != NULL_TREE)
5342 {
5343 rhs = newrhs;
5344 rhs_code = TREE_CODE (rhs);
5345 }
5346 }
5347 if (lhs_code == truthop_code && !rhs_only)
5348 {
5349 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5350 if (newlhs != NULL_TREE)
5351 {
5352 lhs = newlhs;
5353 lhs_code = TREE_CODE (lhs);
5354 }
5355 }
5356
5357 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5358 if (inv_code == rhs_code
5359 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5360 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5361 return lhs;
5362 if (!rhs_only && inv_code == lhs_code
5363 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5364 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5365 return rhs;
5366 if (rhs != orig_rhs || lhs != orig_lhs)
5367 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5368 lhs, rhs);
5369 return NULL_TREE;
5370 }
5371
5372 /* Find ways of folding logical expressions of LHS and RHS:
5373 Try to merge two comparisons to the same innermost item.
5374 Look for range tests like "ch >= '0' && ch <= '9'".
5375 Look for combinations of simple terms on machines with expensive branches
5376 and evaluate the RHS unconditionally.
5377
5378 For example, if we have p->a == 2 && p->b == 4 and we can make an
5379 object large enough to span both A and B, we can do this with a comparison
5380 against the object ANDed with the a mask.
5381
5382 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5383 operations to do this with one comparison.
5384
5385 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5386 function and the one above.
5387
5388 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5389 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5390
5391 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5392 two operands.
5393
5394 We return the simplified tree or 0 if no optimization is possible. */
5395
5396 static tree
5397 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5398 tree lhs, tree rhs)
5399 {
5400 /* If this is the "or" of two comparisons, we can do something if
5401 the comparisons are NE_EXPR. If this is the "and", we can do something
5402 if the comparisons are EQ_EXPR. I.e.,
5403 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5404
5405 WANTED_CODE is this operation code. For single bit fields, we can
5406 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5407 comparison for one-bit fields. */
5408
5409 enum tree_code wanted_code;
5410 enum tree_code lcode, rcode;
5411 tree ll_arg, lr_arg, rl_arg, rr_arg;
5412 tree ll_inner, lr_inner, rl_inner, rr_inner;
5413 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5414 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5415 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5416 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5417 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5418 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5419 machine_mode lnmode, rnmode;
5420 tree ll_mask, lr_mask, rl_mask, rr_mask;
5421 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5422 tree l_const, r_const;
5423 tree lntype, rntype, result;
5424 HOST_WIDE_INT first_bit, end_bit;
5425 int volatilep;
5426
5427 /* Start by getting the comparison codes. Fail if anything is volatile.
5428 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5429 it were surrounded with a NE_EXPR. */
5430
5431 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5432 return 0;
5433
5434 lcode = TREE_CODE (lhs);
5435 rcode = TREE_CODE (rhs);
5436
5437 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5438 {
5439 lhs = build2 (NE_EXPR, truth_type, lhs,
5440 build_int_cst (TREE_TYPE (lhs), 0));
5441 lcode = NE_EXPR;
5442 }
5443
5444 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5445 {
5446 rhs = build2 (NE_EXPR, truth_type, rhs,
5447 build_int_cst (TREE_TYPE (rhs), 0));
5448 rcode = NE_EXPR;
5449 }
5450
5451 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5452 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5453 return 0;
5454
5455 ll_arg = TREE_OPERAND (lhs, 0);
5456 lr_arg = TREE_OPERAND (lhs, 1);
5457 rl_arg = TREE_OPERAND (rhs, 0);
5458 rr_arg = TREE_OPERAND (rhs, 1);
5459
5460 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5461 if (simple_operand_p (ll_arg)
5462 && simple_operand_p (lr_arg))
5463 {
5464 if (operand_equal_p (ll_arg, rl_arg, 0)
5465 && operand_equal_p (lr_arg, rr_arg, 0))
5466 {
5467 result = combine_comparisons (loc, code, lcode, rcode,
5468 truth_type, ll_arg, lr_arg);
5469 if (result)
5470 return result;
5471 }
5472 else if (operand_equal_p (ll_arg, rr_arg, 0)
5473 && operand_equal_p (lr_arg, rl_arg, 0))
5474 {
5475 result = combine_comparisons (loc, code, lcode,
5476 swap_tree_comparison (rcode),
5477 truth_type, ll_arg, lr_arg);
5478 if (result)
5479 return result;
5480 }
5481 }
5482
5483 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5484 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5485
5486 /* If the RHS can be evaluated unconditionally and its operands are
5487 simple, it wins to evaluate the RHS unconditionally on machines
5488 with expensive branches. In this case, this isn't a comparison
5489 that can be merged. */
5490
5491 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5492 false) >= 2
5493 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5494 && simple_operand_p (rl_arg)
5495 && simple_operand_p (rr_arg))
5496 {
5497 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5498 if (code == TRUTH_OR_EXPR
5499 && lcode == NE_EXPR && integer_zerop (lr_arg)
5500 && rcode == NE_EXPR && integer_zerop (rr_arg)
5501 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5502 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5503 return build2_loc (loc, NE_EXPR, truth_type,
5504 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5505 ll_arg, rl_arg),
5506 build_int_cst (TREE_TYPE (ll_arg), 0));
5507
5508 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5509 if (code == TRUTH_AND_EXPR
5510 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5511 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5512 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5513 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5514 return build2_loc (loc, EQ_EXPR, truth_type,
5515 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5516 ll_arg, rl_arg),
5517 build_int_cst (TREE_TYPE (ll_arg), 0));
5518 }
5519
5520 /* See if the comparisons can be merged. Then get all the parameters for
5521 each side. */
5522
5523 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5524 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5525 return 0;
5526
5527 volatilep = 0;
5528 ll_inner = decode_field_reference (loc, ll_arg,
5529 &ll_bitsize, &ll_bitpos, &ll_mode,
5530 &ll_unsignedp, &volatilep, &ll_mask,
5531 &ll_and_mask);
5532 lr_inner = decode_field_reference (loc, lr_arg,
5533 &lr_bitsize, &lr_bitpos, &lr_mode,
5534 &lr_unsignedp, &volatilep, &lr_mask,
5535 &lr_and_mask);
5536 rl_inner = decode_field_reference (loc, rl_arg,
5537 &rl_bitsize, &rl_bitpos, &rl_mode,
5538 &rl_unsignedp, &volatilep, &rl_mask,
5539 &rl_and_mask);
5540 rr_inner = decode_field_reference (loc, rr_arg,
5541 &rr_bitsize, &rr_bitpos, &rr_mode,
5542 &rr_unsignedp, &volatilep, &rr_mask,
5543 &rr_and_mask);
5544
5545 /* It must be true that the inner operation on the lhs of each
5546 comparison must be the same if we are to be able to do anything.
5547 Then see if we have constants. If not, the same must be true for
5548 the rhs's. */
5549 if (volatilep || ll_inner == 0 || rl_inner == 0
5550 || ! operand_equal_p (ll_inner, rl_inner, 0))
5551 return 0;
5552
5553 if (TREE_CODE (lr_arg) == INTEGER_CST
5554 && TREE_CODE (rr_arg) == INTEGER_CST)
5555 l_const = lr_arg, r_const = rr_arg;
5556 else if (lr_inner == 0 || rr_inner == 0
5557 || ! operand_equal_p (lr_inner, rr_inner, 0))
5558 return 0;
5559 else
5560 l_const = r_const = 0;
5561
5562 /* If either comparison code is not correct for our logical operation,
5563 fail. However, we can convert a one-bit comparison against zero into
5564 the opposite comparison against that bit being set in the field. */
5565
5566 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5567 if (lcode != wanted_code)
5568 {
5569 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5570 {
5571 /* Make the left operand unsigned, since we are only interested
5572 in the value of one bit. Otherwise we are doing the wrong
5573 thing below. */
5574 ll_unsignedp = 1;
5575 l_const = ll_mask;
5576 }
5577 else
5578 return 0;
5579 }
5580
5581 /* This is analogous to the code for l_const above. */
5582 if (rcode != wanted_code)
5583 {
5584 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5585 {
5586 rl_unsignedp = 1;
5587 r_const = rl_mask;
5588 }
5589 else
5590 return 0;
5591 }
5592
5593 /* See if we can find a mode that contains both fields being compared on
5594 the left. If we can't, fail. Otherwise, update all constants and masks
5595 to be relative to a field of that size. */
5596 first_bit = MIN (ll_bitpos, rl_bitpos);
5597 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5598 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5599 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5600 volatilep);
5601 if (lnmode == VOIDmode)
5602 return 0;
5603
5604 lnbitsize = GET_MODE_BITSIZE (lnmode);
5605 lnbitpos = first_bit & ~ (lnbitsize - 1);
5606 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5607 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5608
5609 if (BYTES_BIG_ENDIAN)
5610 {
5611 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5612 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5613 }
5614
5615 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5616 size_int (xll_bitpos));
5617 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5618 size_int (xrl_bitpos));
5619
5620 if (l_const)
5621 {
5622 l_const = fold_convert_loc (loc, lntype, l_const);
5623 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5624 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5625 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5626 fold_build1_loc (loc, BIT_NOT_EXPR,
5627 lntype, ll_mask))))
5628 {
5629 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5630
5631 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5632 }
5633 }
5634 if (r_const)
5635 {
5636 r_const = fold_convert_loc (loc, lntype, r_const);
5637 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5638 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5639 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5640 fold_build1_loc (loc, BIT_NOT_EXPR,
5641 lntype, rl_mask))))
5642 {
5643 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5644
5645 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5646 }
5647 }
5648
5649 /* If the right sides are not constant, do the same for it. Also,
5650 disallow this optimization if a size or signedness mismatch occurs
5651 between the left and right sides. */
5652 if (l_const == 0)
5653 {
5654 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5655 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5656 /* Make sure the two fields on the right
5657 correspond to the left without being swapped. */
5658 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5659 return 0;
5660
5661 first_bit = MIN (lr_bitpos, rr_bitpos);
5662 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5663 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5664 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5665 volatilep);
5666 if (rnmode == VOIDmode)
5667 return 0;
5668
5669 rnbitsize = GET_MODE_BITSIZE (rnmode);
5670 rnbitpos = first_bit & ~ (rnbitsize - 1);
5671 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5672 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5673
5674 if (BYTES_BIG_ENDIAN)
5675 {
5676 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5677 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5678 }
5679
5680 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5681 rntype, lr_mask),
5682 size_int (xlr_bitpos));
5683 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5684 rntype, rr_mask),
5685 size_int (xrr_bitpos));
5686
5687 /* Make a mask that corresponds to both fields being compared.
5688 Do this for both items being compared. If the operands are the
5689 same size and the bits being compared are in the same position
5690 then we can do this by masking both and comparing the masked
5691 results. */
5692 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5693 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5694 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5695 {
5696 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5697 ll_unsignedp || rl_unsignedp);
5698 if (! all_ones_mask_p (ll_mask, lnbitsize))
5699 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5700
5701 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5702 lr_unsignedp || rr_unsignedp);
5703 if (! all_ones_mask_p (lr_mask, rnbitsize))
5704 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5705
5706 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5707 }
5708
5709 /* There is still another way we can do something: If both pairs of
5710 fields being compared are adjacent, we may be able to make a wider
5711 field containing them both.
5712
5713 Note that we still must mask the lhs/rhs expressions. Furthermore,
5714 the mask must be shifted to account for the shift done by
5715 make_bit_field_ref. */
5716 if ((ll_bitsize + ll_bitpos == rl_bitpos
5717 && lr_bitsize + lr_bitpos == rr_bitpos)
5718 || (ll_bitpos == rl_bitpos + rl_bitsize
5719 && lr_bitpos == rr_bitpos + rr_bitsize))
5720 {
5721 tree type;
5722
5723 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5724 ll_bitsize + rl_bitsize,
5725 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5726 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5727 lr_bitsize + rr_bitsize,
5728 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5729
5730 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5731 size_int (MIN (xll_bitpos, xrl_bitpos)));
5732 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5733 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5734
5735 /* Convert to the smaller type before masking out unwanted bits. */
5736 type = lntype;
5737 if (lntype != rntype)
5738 {
5739 if (lnbitsize > rnbitsize)
5740 {
5741 lhs = fold_convert_loc (loc, rntype, lhs);
5742 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5743 type = rntype;
5744 }
5745 else if (lnbitsize < rnbitsize)
5746 {
5747 rhs = fold_convert_loc (loc, lntype, rhs);
5748 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5749 type = lntype;
5750 }
5751 }
5752
5753 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5754 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5755
5756 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5757 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5758
5759 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5760 }
5761
5762 return 0;
5763 }
5764
5765 /* Handle the case of comparisons with constants. If there is something in
5766 common between the masks, those bits of the constants must be the same.
5767 If not, the condition is always false. Test for this to avoid generating
5768 incorrect code below. */
5769 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5770 if (! integer_zerop (result)
5771 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5772 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5773 {
5774 if (wanted_code == NE_EXPR)
5775 {
5776 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5777 return constant_boolean_node (true, truth_type);
5778 }
5779 else
5780 {
5781 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5782 return constant_boolean_node (false, truth_type);
5783 }
5784 }
5785
5786 /* Construct the expression we will return. First get the component
5787 reference we will make. Unless the mask is all ones the width of
5788 that field, perform the mask operation. Then compare with the
5789 merged constant. */
5790 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5791 ll_unsignedp || rl_unsignedp);
5792
5793 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5794 if (! all_ones_mask_p (ll_mask, lnbitsize))
5795 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5796
5797 return build2_loc (loc, wanted_code, truth_type, result,
5798 const_binop (BIT_IOR_EXPR, l_const, r_const));
5799 }
5800 \f
5801 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5802 constant. */
5803
5804 static tree
5805 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5806 tree op0, tree op1)
5807 {
5808 tree arg0 = op0;
5809 enum tree_code op_code;
5810 tree comp_const;
5811 tree minmax_const;
5812 int consts_equal, consts_lt;
5813 tree inner;
5814
5815 STRIP_SIGN_NOPS (arg0);
5816
5817 op_code = TREE_CODE (arg0);
5818 minmax_const = TREE_OPERAND (arg0, 1);
5819 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5820 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5821 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5822 inner = TREE_OPERAND (arg0, 0);
5823
5824 /* If something does not permit us to optimize, return the original tree. */
5825 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5826 || TREE_CODE (comp_const) != INTEGER_CST
5827 || TREE_OVERFLOW (comp_const)
5828 || TREE_CODE (minmax_const) != INTEGER_CST
5829 || TREE_OVERFLOW (minmax_const))
5830 return NULL_TREE;
5831
5832 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5833 and GT_EXPR, doing the rest with recursive calls using logical
5834 simplifications. */
5835 switch (code)
5836 {
5837 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5838 {
5839 tree tem
5840 = optimize_minmax_comparison (loc,
5841 invert_tree_comparison (code, false),
5842 type, op0, op1);
5843 if (tem)
5844 return invert_truthvalue_loc (loc, tem);
5845 return NULL_TREE;
5846 }
5847
5848 case GE_EXPR:
5849 return
5850 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5851 optimize_minmax_comparison
5852 (loc, EQ_EXPR, type, arg0, comp_const),
5853 optimize_minmax_comparison
5854 (loc, GT_EXPR, type, arg0, comp_const));
5855
5856 case EQ_EXPR:
5857 if (op_code == MAX_EXPR && consts_equal)
5858 /* MAX (X, 0) == 0 -> X <= 0 */
5859 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5860
5861 else if (op_code == MAX_EXPR && consts_lt)
5862 /* MAX (X, 0) == 5 -> X == 5 */
5863 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5864
5865 else if (op_code == MAX_EXPR)
5866 /* MAX (X, 0) == -1 -> false */
5867 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5868
5869 else if (consts_equal)
5870 /* MIN (X, 0) == 0 -> X >= 0 */
5871 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5872
5873 else if (consts_lt)
5874 /* MIN (X, 0) == 5 -> false */
5875 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5876
5877 else
5878 /* MIN (X, 0) == -1 -> X == -1 */
5879 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5880
5881 case GT_EXPR:
5882 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5883 /* MAX (X, 0) > 0 -> X > 0
5884 MAX (X, 0) > 5 -> X > 5 */
5885 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5886
5887 else if (op_code == MAX_EXPR)
5888 /* MAX (X, 0) > -1 -> true */
5889 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5890
5891 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5892 /* MIN (X, 0) > 0 -> false
5893 MIN (X, 0) > 5 -> false */
5894 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5895
5896 else
5897 /* MIN (X, 0) > -1 -> X > -1 */
5898 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5899
5900 default:
5901 return NULL_TREE;
5902 }
5903 }
5904 \f
5905 /* T is an integer expression that is being multiplied, divided, or taken a
5906 modulus (CODE says which and what kind of divide or modulus) by a
5907 constant C. See if we can eliminate that operation by folding it with
5908 other operations already in T. WIDE_TYPE, if non-null, is a type that
5909 should be used for the computation if wider than our type.
5910
5911 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5912 (X * 2) + (Y * 4). We must, however, be assured that either the original
5913 expression would not overflow or that overflow is undefined for the type
5914 in the language in question.
5915
5916 If we return a non-null expression, it is an equivalent form of the
5917 original computation, but need not be in the original type.
5918
5919 We set *STRICT_OVERFLOW_P to true if the return values depends on
5920 signed overflow being undefined. Otherwise we do not change
5921 *STRICT_OVERFLOW_P. */
5922
5923 static tree
5924 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5925 bool *strict_overflow_p)
5926 {
5927 /* To avoid exponential search depth, refuse to allow recursion past
5928 three levels. Beyond that (1) it's highly unlikely that we'll find
5929 something interesting and (2) we've probably processed it before
5930 when we built the inner expression. */
5931
5932 static int depth;
5933 tree ret;
5934
5935 if (depth > 3)
5936 return NULL;
5937
5938 depth++;
5939 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5940 depth--;
5941
5942 return ret;
5943 }
5944
5945 static tree
5946 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5947 bool *strict_overflow_p)
5948 {
5949 tree type = TREE_TYPE (t);
5950 enum tree_code tcode = TREE_CODE (t);
5951 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5952 > GET_MODE_SIZE (TYPE_MODE (type)))
5953 ? wide_type : type);
5954 tree t1, t2;
5955 int same_p = tcode == code;
5956 tree op0 = NULL_TREE, op1 = NULL_TREE;
5957 bool sub_strict_overflow_p;
5958
5959 /* Don't deal with constants of zero here; they confuse the code below. */
5960 if (integer_zerop (c))
5961 return NULL_TREE;
5962
5963 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5964 op0 = TREE_OPERAND (t, 0);
5965
5966 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5967 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5968
5969 /* Note that we need not handle conditional operations here since fold
5970 already handles those cases. So just do arithmetic here. */
5971 switch (tcode)
5972 {
5973 case INTEGER_CST:
5974 /* For a constant, we can always simplify if we are a multiply
5975 or (for divide and modulus) if it is a multiple of our constant. */
5976 if (code == MULT_EXPR
5977 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5978 return const_binop (code, fold_convert (ctype, t),
5979 fold_convert (ctype, c));
5980 break;
5981
5982 CASE_CONVERT: case NON_LVALUE_EXPR:
5983 /* If op0 is an expression ... */
5984 if ((COMPARISON_CLASS_P (op0)
5985 || UNARY_CLASS_P (op0)
5986 || BINARY_CLASS_P (op0)
5987 || VL_EXP_CLASS_P (op0)
5988 || EXPRESSION_CLASS_P (op0))
5989 /* ... and has wrapping overflow, and its type is smaller
5990 than ctype, then we cannot pass through as widening. */
5991 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5992 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5993 && (TYPE_PRECISION (ctype)
5994 > TYPE_PRECISION (TREE_TYPE (op0))))
5995 /* ... or this is a truncation (t is narrower than op0),
5996 then we cannot pass through this narrowing. */
5997 || (TYPE_PRECISION (type)
5998 < TYPE_PRECISION (TREE_TYPE (op0)))
5999 /* ... or signedness changes for division or modulus,
6000 then we cannot pass through this conversion. */
6001 || (code != MULT_EXPR
6002 && (TYPE_UNSIGNED (ctype)
6003 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6004 /* ... or has undefined overflow while the converted to
6005 type has not, we cannot do the operation in the inner type
6006 as that would introduce undefined overflow. */
6007 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6008 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6009 && !TYPE_OVERFLOW_UNDEFINED (type))))
6010 break;
6011
6012 /* Pass the constant down and see if we can make a simplification. If
6013 we can, replace this expression with the inner simplification for
6014 possible later conversion to our or some other type. */
6015 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6016 && TREE_CODE (t2) == INTEGER_CST
6017 && !TREE_OVERFLOW (t2)
6018 && (0 != (t1 = extract_muldiv (op0, t2, code,
6019 code == MULT_EXPR
6020 ? ctype : NULL_TREE,
6021 strict_overflow_p))))
6022 return t1;
6023 break;
6024
6025 case ABS_EXPR:
6026 /* If widening the type changes it from signed to unsigned, then we
6027 must avoid building ABS_EXPR itself as unsigned. */
6028 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6029 {
6030 tree cstype = (*signed_type_for) (ctype);
6031 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6032 != 0)
6033 {
6034 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6035 return fold_convert (ctype, t1);
6036 }
6037 break;
6038 }
6039 /* If the constant is negative, we cannot simplify this. */
6040 if (tree_int_cst_sgn (c) == -1)
6041 break;
6042 /* FALLTHROUGH */
6043 case NEGATE_EXPR:
6044 /* For division and modulus, type can't be unsigned, as e.g.
6045 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6046 For signed types, even with wrapping overflow, this is fine. */
6047 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6048 break;
6049 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6050 != 0)
6051 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6052 break;
6053
6054 case MIN_EXPR: case MAX_EXPR:
6055 /* If widening the type changes the signedness, then we can't perform
6056 this optimization as that changes the result. */
6057 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6058 break;
6059
6060 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6061 sub_strict_overflow_p = false;
6062 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6063 &sub_strict_overflow_p)) != 0
6064 && (t2 = extract_muldiv (op1, c, code, wide_type,
6065 &sub_strict_overflow_p)) != 0)
6066 {
6067 if (tree_int_cst_sgn (c) < 0)
6068 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6069 if (sub_strict_overflow_p)
6070 *strict_overflow_p = true;
6071 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6072 fold_convert (ctype, t2));
6073 }
6074 break;
6075
6076 case LSHIFT_EXPR: case RSHIFT_EXPR:
6077 /* If the second operand is constant, this is a multiplication
6078 or floor division, by a power of two, so we can treat it that
6079 way unless the multiplier or divisor overflows. Signed
6080 left-shift overflow is implementation-defined rather than
6081 undefined in C90, so do not convert signed left shift into
6082 multiplication. */
6083 if (TREE_CODE (op1) == INTEGER_CST
6084 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6085 /* const_binop may not detect overflow correctly,
6086 so check for it explicitly here. */
6087 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6088 && 0 != (t1 = fold_convert (ctype,
6089 const_binop (LSHIFT_EXPR,
6090 size_one_node,
6091 op1)))
6092 && !TREE_OVERFLOW (t1))
6093 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6094 ? MULT_EXPR : FLOOR_DIV_EXPR,
6095 ctype,
6096 fold_convert (ctype, op0),
6097 t1),
6098 c, code, wide_type, strict_overflow_p);
6099 break;
6100
6101 case PLUS_EXPR: case MINUS_EXPR:
6102 /* See if we can eliminate the operation on both sides. If we can, we
6103 can return a new PLUS or MINUS. If we can't, the only remaining
6104 cases where we can do anything are if the second operand is a
6105 constant. */
6106 sub_strict_overflow_p = false;
6107 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6108 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6109 if (t1 != 0 && t2 != 0
6110 && (code == MULT_EXPR
6111 /* If not multiplication, we can only do this if both operands
6112 are divisible by c. */
6113 || (multiple_of_p (ctype, op0, c)
6114 && multiple_of_p (ctype, op1, c))))
6115 {
6116 if (sub_strict_overflow_p)
6117 *strict_overflow_p = true;
6118 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6119 fold_convert (ctype, t2));
6120 }
6121
6122 /* If this was a subtraction, negate OP1 and set it to be an addition.
6123 This simplifies the logic below. */
6124 if (tcode == MINUS_EXPR)
6125 {
6126 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6127 /* If OP1 was not easily negatable, the constant may be OP0. */
6128 if (TREE_CODE (op0) == INTEGER_CST)
6129 {
6130 std::swap (op0, op1);
6131 std::swap (t1, t2);
6132 }
6133 }
6134
6135 if (TREE_CODE (op1) != INTEGER_CST)
6136 break;
6137
6138 /* If either OP1 or C are negative, this optimization is not safe for
6139 some of the division and remainder types while for others we need
6140 to change the code. */
6141 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6142 {
6143 if (code == CEIL_DIV_EXPR)
6144 code = FLOOR_DIV_EXPR;
6145 else if (code == FLOOR_DIV_EXPR)
6146 code = CEIL_DIV_EXPR;
6147 else if (code != MULT_EXPR
6148 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6149 break;
6150 }
6151
6152 /* If it's a multiply or a division/modulus operation of a multiple
6153 of our constant, do the operation and verify it doesn't overflow. */
6154 if (code == MULT_EXPR
6155 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6156 {
6157 op1 = const_binop (code, fold_convert (ctype, op1),
6158 fold_convert (ctype, c));
6159 /* We allow the constant to overflow with wrapping semantics. */
6160 if (op1 == 0
6161 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6162 break;
6163 }
6164 else
6165 break;
6166
6167 /* If we have an unsigned type, we cannot widen the operation since it
6168 will change the result if the original computation overflowed. */
6169 if (TYPE_UNSIGNED (ctype) && ctype != type)
6170 break;
6171
6172 /* If we were able to eliminate our operation from the first side,
6173 apply our operation to the second side and reform the PLUS. */
6174 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6175 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6176
6177 /* The last case is if we are a multiply. In that case, we can
6178 apply the distributive law to commute the multiply and addition
6179 if the multiplication of the constants doesn't overflow
6180 and overflow is defined. With undefined overflow
6181 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6182 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6183 return fold_build2 (tcode, ctype,
6184 fold_build2 (code, ctype,
6185 fold_convert (ctype, op0),
6186 fold_convert (ctype, c)),
6187 op1);
6188
6189 break;
6190
6191 case MULT_EXPR:
6192 /* We have a special case here if we are doing something like
6193 (C * 8) % 4 since we know that's zero. */
6194 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6195 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6196 /* If the multiplication can overflow we cannot optimize this. */
6197 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6198 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6199 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6200 {
6201 *strict_overflow_p = true;
6202 return omit_one_operand (type, integer_zero_node, op0);
6203 }
6204
6205 /* ... fall through ... */
6206
6207 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6208 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6209 /* If we can extract our operation from the LHS, do so and return a
6210 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6211 do something only if the second operand is a constant. */
6212 if (same_p
6213 && (t1 = extract_muldiv (op0, c, code, wide_type,
6214 strict_overflow_p)) != 0)
6215 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6216 fold_convert (ctype, op1));
6217 else if (tcode == MULT_EXPR && code == MULT_EXPR
6218 && (t1 = extract_muldiv (op1, c, code, wide_type,
6219 strict_overflow_p)) != 0)
6220 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6221 fold_convert (ctype, t1));
6222 else if (TREE_CODE (op1) != INTEGER_CST)
6223 return 0;
6224
6225 /* If these are the same operation types, we can associate them
6226 assuming no overflow. */
6227 if (tcode == code)
6228 {
6229 bool overflow_p = false;
6230 bool overflow_mul_p;
6231 signop sign = TYPE_SIGN (ctype);
6232 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6233 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6234 if (overflow_mul_p
6235 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6236 overflow_p = true;
6237 if (!overflow_p)
6238 {
6239 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6240 TYPE_SIGN (TREE_TYPE (op1)));
6241 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6242 wide_int_to_tree (ctype, mul));
6243 }
6244 }
6245
6246 /* If these operations "cancel" each other, we have the main
6247 optimizations of this pass, which occur when either constant is a
6248 multiple of the other, in which case we replace this with either an
6249 operation or CODE or TCODE.
6250
6251 If we have an unsigned type, we cannot do this since it will change
6252 the result if the original computation overflowed. */
6253 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6254 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6255 || (tcode == MULT_EXPR
6256 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6257 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6258 && code != MULT_EXPR)))
6259 {
6260 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6261 {
6262 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6263 *strict_overflow_p = true;
6264 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6265 fold_convert (ctype,
6266 const_binop (TRUNC_DIV_EXPR,
6267 op1, c)));
6268 }
6269 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6270 {
6271 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6272 *strict_overflow_p = true;
6273 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6274 fold_convert (ctype,
6275 const_binop (TRUNC_DIV_EXPR,
6276 c, op1)));
6277 }
6278 }
6279 break;
6280
6281 default:
6282 break;
6283 }
6284
6285 return 0;
6286 }
6287 \f
6288 /* Return a node which has the indicated constant VALUE (either 0 or
6289 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6290 and is of the indicated TYPE. */
6291
6292 tree
6293 constant_boolean_node (bool value, tree type)
6294 {
6295 if (type == integer_type_node)
6296 return value ? integer_one_node : integer_zero_node;
6297 else if (type == boolean_type_node)
6298 return value ? boolean_true_node : boolean_false_node;
6299 else if (TREE_CODE (type) == VECTOR_TYPE)
6300 return build_vector_from_val (type,
6301 build_int_cst (TREE_TYPE (type),
6302 value ? -1 : 0));
6303 else
6304 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6305 }
6306
6307
6308 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6309 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6310 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6311 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6312 COND is the first argument to CODE; otherwise (as in the example
6313 given here), it is the second argument. TYPE is the type of the
6314 original expression. Return NULL_TREE if no simplification is
6315 possible. */
6316
6317 static tree
6318 fold_binary_op_with_conditional_arg (location_t loc,
6319 enum tree_code code,
6320 tree type, tree op0, tree op1,
6321 tree cond, tree arg, int cond_first_p)
6322 {
6323 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6324 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6325 tree test, true_value, false_value;
6326 tree lhs = NULL_TREE;
6327 tree rhs = NULL_TREE;
6328 enum tree_code cond_code = COND_EXPR;
6329
6330 if (TREE_CODE (cond) == COND_EXPR
6331 || TREE_CODE (cond) == VEC_COND_EXPR)
6332 {
6333 test = TREE_OPERAND (cond, 0);
6334 true_value = TREE_OPERAND (cond, 1);
6335 false_value = TREE_OPERAND (cond, 2);
6336 /* If this operand throws an expression, then it does not make
6337 sense to try to perform a logical or arithmetic operation
6338 involving it. */
6339 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6340 lhs = true_value;
6341 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6342 rhs = false_value;
6343 }
6344 else
6345 {
6346 tree testtype = TREE_TYPE (cond);
6347 test = cond;
6348 true_value = constant_boolean_node (true, testtype);
6349 false_value = constant_boolean_node (false, testtype);
6350 }
6351
6352 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6353 cond_code = VEC_COND_EXPR;
6354
6355 /* This transformation is only worthwhile if we don't have to wrap ARG
6356 in a SAVE_EXPR and the operation can be simplified without recursing
6357 on at least one of the branches once its pushed inside the COND_EXPR. */
6358 if (!TREE_CONSTANT (arg)
6359 && (TREE_SIDE_EFFECTS (arg)
6360 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6361 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6362 return NULL_TREE;
6363
6364 arg = fold_convert_loc (loc, arg_type, arg);
6365 if (lhs == 0)
6366 {
6367 true_value = fold_convert_loc (loc, cond_type, true_value);
6368 if (cond_first_p)
6369 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6370 else
6371 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6372 }
6373 if (rhs == 0)
6374 {
6375 false_value = fold_convert_loc (loc, cond_type, false_value);
6376 if (cond_first_p)
6377 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6378 else
6379 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6380 }
6381
6382 /* Check that we have simplified at least one of the branches. */
6383 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6384 return NULL_TREE;
6385
6386 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6387 }
6388
6389 \f
6390 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6391
6392 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6393 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6394 ADDEND is the same as X.
6395
6396 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6397 and finite. The problematic cases are when X is zero, and its mode
6398 has signed zeros. In the case of rounding towards -infinity,
6399 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6400 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6401
6402 bool
6403 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6404 {
6405 if (!real_zerop (addend))
6406 return false;
6407
6408 /* Don't allow the fold with -fsignaling-nans. */
6409 if (HONOR_SNANS (element_mode (type)))
6410 return false;
6411
6412 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6413 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6414 return true;
6415
6416 /* In a vector or complex, we would need to check the sign of all zeros. */
6417 if (TREE_CODE (addend) != REAL_CST)
6418 return false;
6419
6420 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6421 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6422 negate = !negate;
6423
6424 /* The mode has signed zeros, and we have to honor their sign.
6425 In this situation, there is only one case we can return true for.
6426 X - 0 is the same as X unless rounding towards -infinity is
6427 supported. */
6428 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6429 }
6430
6431 /* Subroutine of fold() that optimizes comparisons of a division by
6432 a nonzero integer constant against an integer constant, i.e.
6433 X/C1 op C2.
6434
6435 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6436 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6437 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6438
6439 The function returns the constant folded tree if a simplification
6440 can be made, and NULL_TREE otherwise. */
6441
6442 static tree
6443 fold_div_compare (location_t loc,
6444 enum tree_code code, tree type, tree arg0, tree arg1)
6445 {
6446 tree prod, tmp, hi, lo;
6447 tree arg00 = TREE_OPERAND (arg0, 0);
6448 tree arg01 = TREE_OPERAND (arg0, 1);
6449 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6450 bool neg_overflow = false;
6451 bool overflow;
6452
6453 /* We have to do this the hard way to detect unsigned overflow.
6454 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6455 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6456 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6457 neg_overflow = false;
6458
6459 if (sign == UNSIGNED)
6460 {
6461 tmp = int_const_binop (MINUS_EXPR, arg01,
6462 build_int_cst (TREE_TYPE (arg01), 1));
6463 lo = prod;
6464
6465 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6466 val = wi::add (prod, tmp, sign, &overflow);
6467 hi = force_fit_type (TREE_TYPE (arg00), val,
6468 -1, overflow | TREE_OVERFLOW (prod));
6469 }
6470 else if (tree_int_cst_sgn (arg01) >= 0)
6471 {
6472 tmp = int_const_binop (MINUS_EXPR, arg01,
6473 build_int_cst (TREE_TYPE (arg01), 1));
6474 switch (tree_int_cst_sgn (arg1))
6475 {
6476 case -1:
6477 neg_overflow = true;
6478 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6479 hi = prod;
6480 break;
6481
6482 case 0:
6483 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6484 hi = tmp;
6485 break;
6486
6487 case 1:
6488 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6489 lo = prod;
6490 break;
6491
6492 default:
6493 gcc_unreachable ();
6494 }
6495 }
6496 else
6497 {
6498 /* A negative divisor reverses the relational operators. */
6499 code = swap_tree_comparison (code);
6500
6501 tmp = int_const_binop (PLUS_EXPR, arg01,
6502 build_int_cst (TREE_TYPE (arg01), 1));
6503 switch (tree_int_cst_sgn (arg1))
6504 {
6505 case -1:
6506 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6507 lo = prod;
6508 break;
6509
6510 case 0:
6511 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6512 lo = tmp;
6513 break;
6514
6515 case 1:
6516 neg_overflow = true;
6517 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6518 hi = prod;
6519 break;
6520
6521 default:
6522 gcc_unreachable ();
6523 }
6524 }
6525
6526 switch (code)
6527 {
6528 case EQ_EXPR:
6529 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6530 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6531 if (TREE_OVERFLOW (hi))
6532 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6533 if (TREE_OVERFLOW (lo))
6534 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6535 return build_range_check (loc, type, arg00, 1, lo, hi);
6536
6537 case NE_EXPR:
6538 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6539 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6540 if (TREE_OVERFLOW (hi))
6541 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6542 if (TREE_OVERFLOW (lo))
6543 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6544 return build_range_check (loc, type, arg00, 0, lo, hi);
6545
6546 case LT_EXPR:
6547 if (TREE_OVERFLOW (lo))
6548 {
6549 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6550 return omit_one_operand_loc (loc, type, tmp, arg00);
6551 }
6552 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6553
6554 case LE_EXPR:
6555 if (TREE_OVERFLOW (hi))
6556 {
6557 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6558 return omit_one_operand_loc (loc, type, tmp, arg00);
6559 }
6560 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6561
6562 case GT_EXPR:
6563 if (TREE_OVERFLOW (hi))
6564 {
6565 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6566 return omit_one_operand_loc (loc, type, tmp, arg00);
6567 }
6568 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6569
6570 case GE_EXPR:
6571 if (TREE_OVERFLOW (lo))
6572 {
6573 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6574 return omit_one_operand_loc (loc, type, tmp, arg00);
6575 }
6576 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6577
6578 default:
6579 break;
6580 }
6581
6582 return NULL_TREE;
6583 }
6584
6585
6586 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6587 equality/inequality test, then return a simplified form of the test
6588 using a sign testing. Otherwise return NULL. TYPE is the desired
6589 result type. */
6590
6591 static tree
6592 fold_single_bit_test_into_sign_test (location_t loc,
6593 enum tree_code code, tree arg0, tree arg1,
6594 tree result_type)
6595 {
6596 /* If this is testing a single bit, we can optimize the test. */
6597 if ((code == NE_EXPR || code == EQ_EXPR)
6598 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6599 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6600 {
6601 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6602 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6603 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6604
6605 if (arg00 != NULL_TREE
6606 /* This is only a win if casting to a signed type is cheap,
6607 i.e. when arg00's type is not a partial mode. */
6608 && TYPE_PRECISION (TREE_TYPE (arg00))
6609 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6610 {
6611 tree stype = signed_type_for (TREE_TYPE (arg00));
6612 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6613 result_type,
6614 fold_convert_loc (loc, stype, arg00),
6615 build_int_cst (stype, 0));
6616 }
6617 }
6618
6619 return NULL_TREE;
6620 }
6621
6622 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6623 equality/inequality test, then return a simplified form of
6624 the test using shifts and logical operations. Otherwise return
6625 NULL. TYPE is the desired result type. */
6626
6627 tree
6628 fold_single_bit_test (location_t loc, enum tree_code code,
6629 tree arg0, tree arg1, tree result_type)
6630 {
6631 /* If this is testing a single bit, we can optimize the test. */
6632 if ((code == NE_EXPR || code == EQ_EXPR)
6633 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6634 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6635 {
6636 tree inner = TREE_OPERAND (arg0, 0);
6637 tree type = TREE_TYPE (arg0);
6638 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6639 machine_mode operand_mode = TYPE_MODE (type);
6640 int ops_unsigned;
6641 tree signed_type, unsigned_type, intermediate_type;
6642 tree tem, one;
6643
6644 /* First, see if we can fold the single bit test into a sign-bit
6645 test. */
6646 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6647 result_type);
6648 if (tem)
6649 return tem;
6650
6651 /* Otherwise we have (A & C) != 0 where C is a single bit,
6652 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6653 Similarly for (A & C) == 0. */
6654
6655 /* If INNER is a right shift of a constant and it plus BITNUM does
6656 not overflow, adjust BITNUM and INNER. */
6657 if (TREE_CODE (inner) == RSHIFT_EXPR
6658 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6659 && bitnum < TYPE_PRECISION (type)
6660 && wi::ltu_p (TREE_OPERAND (inner, 1),
6661 TYPE_PRECISION (type) - bitnum))
6662 {
6663 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6664 inner = TREE_OPERAND (inner, 0);
6665 }
6666
6667 /* If we are going to be able to omit the AND below, we must do our
6668 operations as unsigned. If we must use the AND, we have a choice.
6669 Normally unsigned is faster, but for some machines signed is. */
6670 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6671 && !flag_syntax_only) ? 0 : 1;
6672
6673 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6674 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6675 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6676 inner = fold_convert_loc (loc, intermediate_type, inner);
6677
6678 if (bitnum != 0)
6679 inner = build2 (RSHIFT_EXPR, intermediate_type,
6680 inner, size_int (bitnum));
6681
6682 one = build_int_cst (intermediate_type, 1);
6683
6684 if (code == EQ_EXPR)
6685 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6686
6687 /* Put the AND last so it can combine with more things. */
6688 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6689
6690 /* Make sure to return the proper type. */
6691 inner = fold_convert_loc (loc, result_type, inner);
6692
6693 return inner;
6694 }
6695 return NULL_TREE;
6696 }
6697
6698 /* Check whether we are allowed to reorder operands arg0 and arg1,
6699 such that the evaluation of arg1 occurs before arg0. */
6700
6701 static bool
6702 reorder_operands_p (const_tree arg0, const_tree arg1)
6703 {
6704 if (! flag_evaluation_order)
6705 return true;
6706 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6707 return true;
6708 return ! TREE_SIDE_EFFECTS (arg0)
6709 && ! TREE_SIDE_EFFECTS (arg1);
6710 }
6711
6712 /* Test whether it is preferable two swap two operands, ARG0 and
6713 ARG1, for example because ARG0 is an integer constant and ARG1
6714 isn't. If REORDER is true, only recommend swapping if we can
6715 evaluate the operands in reverse order. */
6716
6717 bool
6718 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6719 {
6720 if (CONSTANT_CLASS_P (arg1))
6721 return 0;
6722 if (CONSTANT_CLASS_P (arg0))
6723 return 1;
6724
6725 STRIP_NOPS (arg0);
6726 STRIP_NOPS (arg1);
6727
6728 if (TREE_CONSTANT (arg1))
6729 return 0;
6730 if (TREE_CONSTANT (arg0))
6731 return 1;
6732
6733 if (reorder && flag_evaluation_order
6734 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6735 return 0;
6736
6737 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6738 for commutative and comparison operators. Ensuring a canonical
6739 form allows the optimizers to find additional redundancies without
6740 having to explicitly check for both orderings. */
6741 if (TREE_CODE (arg0) == SSA_NAME
6742 && TREE_CODE (arg1) == SSA_NAME
6743 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6744 return 1;
6745
6746 /* Put SSA_NAMEs last. */
6747 if (TREE_CODE (arg1) == SSA_NAME)
6748 return 0;
6749 if (TREE_CODE (arg0) == SSA_NAME)
6750 return 1;
6751
6752 /* Put variables last. */
6753 if (DECL_P (arg1))
6754 return 0;
6755 if (DECL_P (arg0))
6756 return 1;
6757
6758 return 0;
6759 }
6760
6761
6762 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6763 means A >= Y && A != MAX, but in this case we know that
6764 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6765
6766 static tree
6767 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6768 {
6769 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6770
6771 if (TREE_CODE (bound) == LT_EXPR)
6772 a = TREE_OPERAND (bound, 0);
6773 else if (TREE_CODE (bound) == GT_EXPR)
6774 a = TREE_OPERAND (bound, 1);
6775 else
6776 return NULL_TREE;
6777
6778 typea = TREE_TYPE (a);
6779 if (!INTEGRAL_TYPE_P (typea)
6780 && !POINTER_TYPE_P (typea))
6781 return NULL_TREE;
6782
6783 if (TREE_CODE (ineq) == LT_EXPR)
6784 {
6785 a1 = TREE_OPERAND (ineq, 1);
6786 y = TREE_OPERAND (ineq, 0);
6787 }
6788 else if (TREE_CODE (ineq) == GT_EXPR)
6789 {
6790 a1 = TREE_OPERAND (ineq, 0);
6791 y = TREE_OPERAND (ineq, 1);
6792 }
6793 else
6794 return NULL_TREE;
6795
6796 if (TREE_TYPE (a1) != typea)
6797 return NULL_TREE;
6798
6799 if (POINTER_TYPE_P (typea))
6800 {
6801 /* Convert the pointer types into integer before taking the difference. */
6802 tree ta = fold_convert_loc (loc, ssizetype, a);
6803 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6804 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6805 }
6806 else
6807 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6808
6809 if (!diff || !integer_onep (diff))
6810 return NULL_TREE;
6811
6812 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6813 }
6814
6815 /* Fold a sum or difference of at least one multiplication.
6816 Returns the folded tree or NULL if no simplification could be made. */
6817
6818 static tree
6819 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6820 tree arg0, tree arg1)
6821 {
6822 tree arg00, arg01, arg10, arg11;
6823 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6824
6825 /* (A * C) +- (B * C) -> (A+-B) * C.
6826 (A * C) +- A -> A * (C+-1).
6827 We are most concerned about the case where C is a constant,
6828 but other combinations show up during loop reduction. Since
6829 it is not difficult, try all four possibilities. */
6830
6831 if (TREE_CODE (arg0) == MULT_EXPR)
6832 {
6833 arg00 = TREE_OPERAND (arg0, 0);
6834 arg01 = TREE_OPERAND (arg0, 1);
6835 }
6836 else if (TREE_CODE (arg0) == INTEGER_CST)
6837 {
6838 arg00 = build_one_cst (type);
6839 arg01 = arg0;
6840 }
6841 else
6842 {
6843 /* We cannot generate constant 1 for fract. */
6844 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6845 return NULL_TREE;
6846 arg00 = arg0;
6847 arg01 = build_one_cst (type);
6848 }
6849 if (TREE_CODE (arg1) == MULT_EXPR)
6850 {
6851 arg10 = TREE_OPERAND (arg1, 0);
6852 arg11 = TREE_OPERAND (arg1, 1);
6853 }
6854 else if (TREE_CODE (arg1) == INTEGER_CST)
6855 {
6856 arg10 = build_one_cst (type);
6857 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6858 the purpose of this canonicalization. */
6859 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6860 && negate_expr_p (arg1)
6861 && code == PLUS_EXPR)
6862 {
6863 arg11 = negate_expr (arg1);
6864 code = MINUS_EXPR;
6865 }
6866 else
6867 arg11 = arg1;
6868 }
6869 else
6870 {
6871 /* We cannot generate constant 1 for fract. */
6872 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6873 return NULL_TREE;
6874 arg10 = arg1;
6875 arg11 = build_one_cst (type);
6876 }
6877 same = NULL_TREE;
6878
6879 if (operand_equal_p (arg01, arg11, 0))
6880 same = arg01, alt0 = arg00, alt1 = arg10;
6881 else if (operand_equal_p (arg00, arg10, 0))
6882 same = arg00, alt0 = arg01, alt1 = arg11;
6883 else if (operand_equal_p (arg00, arg11, 0))
6884 same = arg00, alt0 = arg01, alt1 = arg10;
6885 else if (operand_equal_p (arg01, arg10, 0))
6886 same = arg01, alt0 = arg00, alt1 = arg11;
6887
6888 /* No identical multiplicands; see if we can find a common
6889 power-of-two factor in non-power-of-two multiplies. This
6890 can help in multi-dimensional array access. */
6891 else if (tree_fits_shwi_p (arg01)
6892 && tree_fits_shwi_p (arg11))
6893 {
6894 HOST_WIDE_INT int01, int11, tmp;
6895 bool swap = false;
6896 tree maybe_same;
6897 int01 = tree_to_shwi (arg01);
6898 int11 = tree_to_shwi (arg11);
6899
6900 /* Move min of absolute values to int11. */
6901 if (absu_hwi (int01) < absu_hwi (int11))
6902 {
6903 tmp = int01, int01 = int11, int11 = tmp;
6904 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6905 maybe_same = arg01;
6906 swap = true;
6907 }
6908 else
6909 maybe_same = arg11;
6910
6911 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6912 /* The remainder should not be a constant, otherwise we
6913 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6914 increased the number of multiplications necessary. */
6915 && TREE_CODE (arg10) != INTEGER_CST)
6916 {
6917 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6918 build_int_cst (TREE_TYPE (arg00),
6919 int01 / int11));
6920 alt1 = arg10;
6921 same = maybe_same;
6922 if (swap)
6923 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6924 }
6925 }
6926
6927 if (same)
6928 return fold_build2_loc (loc, MULT_EXPR, type,
6929 fold_build2_loc (loc, code, type,
6930 fold_convert_loc (loc, type, alt0),
6931 fold_convert_loc (loc, type, alt1)),
6932 fold_convert_loc (loc, type, same));
6933
6934 return NULL_TREE;
6935 }
6936
6937 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6938 specified by EXPR into the buffer PTR of length LEN bytes.
6939 Return the number of bytes placed in the buffer, or zero
6940 upon failure. */
6941
6942 static int
6943 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6944 {
6945 tree type = TREE_TYPE (expr);
6946 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6947 int byte, offset, word, words;
6948 unsigned char value;
6949
6950 if ((off == -1 && total_bytes > len)
6951 || off >= total_bytes)
6952 return 0;
6953 if (off == -1)
6954 off = 0;
6955 words = total_bytes / UNITS_PER_WORD;
6956
6957 for (byte = 0; byte < total_bytes; byte++)
6958 {
6959 int bitpos = byte * BITS_PER_UNIT;
6960 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6961 number of bytes. */
6962 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
6963
6964 if (total_bytes > UNITS_PER_WORD)
6965 {
6966 word = byte / UNITS_PER_WORD;
6967 if (WORDS_BIG_ENDIAN)
6968 word = (words - 1) - word;
6969 offset = word * UNITS_PER_WORD;
6970 if (BYTES_BIG_ENDIAN)
6971 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6972 else
6973 offset += byte % UNITS_PER_WORD;
6974 }
6975 else
6976 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6977 if (offset >= off
6978 && offset - off < len)
6979 ptr[offset - off] = value;
6980 }
6981 return MIN (len, total_bytes - off);
6982 }
6983
6984
6985 /* Subroutine of native_encode_expr. Encode the FIXED_CST
6986 specified by EXPR into the buffer PTR of length LEN bytes.
6987 Return the number of bytes placed in the buffer, or zero
6988 upon failure. */
6989
6990 static int
6991 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
6992 {
6993 tree type = TREE_TYPE (expr);
6994 machine_mode mode = TYPE_MODE (type);
6995 int total_bytes = GET_MODE_SIZE (mode);
6996 FIXED_VALUE_TYPE value;
6997 tree i_value, i_type;
6998
6999 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7000 return 0;
7001
7002 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7003
7004 if (NULL_TREE == i_type
7005 || TYPE_PRECISION (i_type) != total_bytes)
7006 return 0;
7007
7008 value = TREE_FIXED_CST (expr);
7009 i_value = double_int_to_tree (i_type, value.data);
7010
7011 return native_encode_int (i_value, ptr, len, off);
7012 }
7013
7014
7015 /* Subroutine of native_encode_expr. Encode the REAL_CST
7016 specified by EXPR into the buffer PTR of length LEN bytes.
7017 Return the number of bytes placed in the buffer, or zero
7018 upon failure. */
7019
7020 static int
7021 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7022 {
7023 tree type = TREE_TYPE (expr);
7024 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7025 int byte, offset, word, words, bitpos;
7026 unsigned char value;
7027
7028 /* There are always 32 bits in each long, no matter the size of
7029 the hosts long. We handle floating point representations with
7030 up to 192 bits. */
7031 long tmp[6];
7032
7033 if ((off == -1 && total_bytes > len)
7034 || off >= total_bytes)
7035 return 0;
7036 if (off == -1)
7037 off = 0;
7038 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7039
7040 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7041
7042 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7043 bitpos += BITS_PER_UNIT)
7044 {
7045 byte = (bitpos / BITS_PER_UNIT) & 3;
7046 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7047
7048 if (UNITS_PER_WORD < 4)
7049 {
7050 word = byte / UNITS_PER_WORD;
7051 if (WORDS_BIG_ENDIAN)
7052 word = (words - 1) - word;
7053 offset = word * UNITS_PER_WORD;
7054 if (BYTES_BIG_ENDIAN)
7055 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7056 else
7057 offset += byte % UNITS_PER_WORD;
7058 }
7059 else
7060 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7061 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7062 if (offset >= off
7063 && offset - off < len)
7064 ptr[offset - off] = value;
7065 }
7066 return MIN (len, total_bytes - off);
7067 }
7068
7069 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7070 specified by EXPR into the buffer PTR of length LEN bytes.
7071 Return the number of bytes placed in the buffer, or zero
7072 upon failure. */
7073
7074 static int
7075 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7076 {
7077 int rsize, isize;
7078 tree part;
7079
7080 part = TREE_REALPART (expr);
7081 rsize = native_encode_expr (part, ptr, len, off);
7082 if (off == -1
7083 && rsize == 0)
7084 return 0;
7085 part = TREE_IMAGPART (expr);
7086 if (off != -1)
7087 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7088 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7089 if (off == -1
7090 && isize != rsize)
7091 return 0;
7092 return rsize + isize;
7093 }
7094
7095
7096 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7097 specified by EXPR into the buffer PTR of length LEN bytes.
7098 Return the number of bytes placed in the buffer, or zero
7099 upon failure. */
7100
7101 static int
7102 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7103 {
7104 unsigned i, count;
7105 int size, offset;
7106 tree itype, elem;
7107
7108 offset = 0;
7109 count = VECTOR_CST_NELTS (expr);
7110 itype = TREE_TYPE (TREE_TYPE (expr));
7111 size = GET_MODE_SIZE (TYPE_MODE (itype));
7112 for (i = 0; i < count; i++)
7113 {
7114 if (off >= size)
7115 {
7116 off -= size;
7117 continue;
7118 }
7119 elem = VECTOR_CST_ELT (expr, i);
7120 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7121 if ((off == -1 && res != size)
7122 || res == 0)
7123 return 0;
7124 offset += res;
7125 if (offset >= len)
7126 return offset;
7127 if (off != -1)
7128 off = 0;
7129 }
7130 return offset;
7131 }
7132
7133
7134 /* Subroutine of native_encode_expr. Encode the STRING_CST
7135 specified by EXPR into the buffer PTR of length LEN bytes.
7136 Return the number of bytes placed in the buffer, or zero
7137 upon failure. */
7138
7139 static int
7140 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7141 {
7142 tree type = TREE_TYPE (expr);
7143 HOST_WIDE_INT total_bytes;
7144
7145 if (TREE_CODE (type) != ARRAY_TYPE
7146 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7147 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7148 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7149 return 0;
7150 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7151 if ((off == -1 && total_bytes > len)
7152 || off >= total_bytes)
7153 return 0;
7154 if (off == -1)
7155 off = 0;
7156 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7157 {
7158 int written = 0;
7159 if (off < TREE_STRING_LENGTH (expr))
7160 {
7161 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7162 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7163 }
7164 memset (ptr + written, 0,
7165 MIN (total_bytes - written, len - written));
7166 }
7167 else
7168 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7169 return MIN (total_bytes - off, len);
7170 }
7171
7172
7173 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7174 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7175 buffer PTR of length LEN bytes. If OFF is not -1 then start
7176 the encoding at byte offset OFF and encode at most LEN bytes.
7177 Return the number of bytes placed in the buffer, or zero upon failure. */
7178
7179 int
7180 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7181 {
7182 /* We don't support starting at negative offset and -1 is special. */
7183 if (off < -1)
7184 return 0;
7185
7186 switch (TREE_CODE (expr))
7187 {
7188 case INTEGER_CST:
7189 return native_encode_int (expr, ptr, len, off);
7190
7191 case REAL_CST:
7192 return native_encode_real (expr, ptr, len, off);
7193
7194 case FIXED_CST:
7195 return native_encode_fixed (expr, ptr, len, off);
7196
7197 case COMPLEX_CST:
7198 return native_encode_complex (expr, ptr, len, off);
7199
7200 case VECTOR_CST:
7201 return native_encode_vector (expr, ptr, len, off);
7202
7203 case STRING_CST:
7204 return native_encode_string (expr, ptr, len, off);
7205
7206 default:
7207 return 0;
7208 }
7209 }
7210
7211
7212 /* Subroutine of native_interpret_expr. Interpret the contents of
7213 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7214 If the buffer cannot be interpreted, return NULL_TREE. */
7215
7216 static tree
7217 native_interpret_int (tree type, const unsigned char *ptr, int len)
7218 {
7219 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7220
7221 if (total_bytes > len
7222 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7223 return NULL_TREE;
7224
7225 wide_int result = wi::from_buffer (ptr, total_bytes);
7226
7227 return wide_int_to_tree (type, result);
7228 }
7229
7230
7231 /* Subroutine of native_interpret_expr. Interpret the contents of
7232 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7233 If the buffer cannot be interpreted, return NULL_TREE. */
7234
7235 static tree
7236 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7237 {
7238 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7239 double_int result;
7240 FIXED_VALUE_TYPE fixed_value;
7241
7242 if (total_bytes > len
7243 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7244 return NULL_TREE;
7245
7246 result = double_int::from_buffer (ptr, total_bytes);
7247 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7248
7249 return build_fixed (type, fixed_value);
7250 }
7251
7252
7253 /* Subroutine of native_interpret_expr. Interpret the contents of
7254 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7255 If the buffer cannot be interpreted, return NULL_TREE. */
7256
7257 static tree
7258 native_interpret_real (tree type, const unsigned char *ptr, int len)
7259 {
7260 machine_mode mode = TYPE_MODE (type);
7261 int total_bytes = GET_MODE_SIZE (mode);
7262 unsigned char value;
7263 /* There are always 32 bits in each long, no matter the size of
7264 the hosts long. We handle floating point representations with
7265 up to 192 bits. */
7266 REAL_VALUE_TYPE r;
7267 long tmp[6];
7268
7269 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7270 if (total_bytes > len || total_bytes > 24)
7271 return NULL_TREE;
7272 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7273
7274 memset (tmp, 0, sizeof (tmp));
7275 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7276 bitpos += BITS_PER_UNIT)
7277 {
7278 /* Both OFFSET and BYTE index within a long;
7279 bitpos indexes the whole float. */
7280 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7281 if (UNITS_PER_WORD < 4)
7282 {
7283 int word = byte / UNITS_PER_WORD;
7284 if (WORDS_BIG_ENDIAN)
7285 word = (words - 1) - word;
7286 offset = word * UNITS_PER_WORD;
7287 if (BYTES_BIG_ENDIAN)
7288 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7289 else
7290 offset += byte % UNITS_PER_WORD;
7291 }
7292 else
7293 {
7294 offset = byte;
7295 if (BYTES_BIG_ENDIAN)
7296 {
7297 /* Reverse bytes within each long, or within the entire float
7298 if it's smaller than a long (for HFmode). */
7299 offset = MIN (3, total_bytes - 1) - offset;
7300 gcc_assert (offset >= 0);
7301 }
7302 }
7303 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7304
7305 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7306 }
7307
7308 real_from_target (&r, tmp, mode);
7309 return build_real (type, r);
7310 }
7311
7312
7313 /* Subroutine of native_interpret_expr. Interpret the contents of
7314 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7315 If the buffer cannot be interpreted, return NULL_TREE. */
7316
7317 static tree
7318 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7319 {
7320 tree etype, rpart, ipart;
7321 int size;
7322
7323 etype = TREE_TYPE (type);
7324 size = GET_MODE_SIZE (TYPE_MODE (etype));
7325 if (size * 2 > len)
7326 return NULL_TREE;
7327 rpart = native_interpret_expr (etype, ptr, size);
7328 if (!rpart)
7329 return NULL_TREE;
7330 ipart = native_interpret_expr (etype, ptr+size, size);
7331 if (!ipart)
7332 return NULL_TREE;
7333 return build_complex (type, rpart, ipart);
7334 }
7335
7336
7337 /* Subroutine of native_interpret_expr. Interpret the contents of
7338 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7339 If the buffer cannot be interpreted, return NULL_TREE. */
7340
7341 static tree
7342 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7343 {
7344 tree etype, elem;
7345 int i, size, count;
7346 tree *elements;
7347
7348 etype = TREE_TYPE (type);
7349 size = GET_MODE_SIZE (TYPE_MODE (etype));
7350 count = TYPE_VECTOR_SUBPARTS (type);
7351 if (size * count > len)
7352 return NULL_TREE;
7353
7354 elements = XALLOCAVEC (tree, count);
7355 for (i = count - 1; i >= 0; i--)
7356 {
7357 elem = native_interpret_expr (etype, ptr+(i*size), size);
7358 if (!elem)
7359 return NULL_TREE;
7360 elements[i] = elem;
7361 }
7362 return build_vector (type, elements);
7363 }
7364
7365
7366 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7367 the buffer PTR of length LEN as a constant of type TYPE. For
7368 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7369 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7370 return NULL_TREE. */
7371
7372 tree
7373 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7374 {
7375 switch (TREE_CODE (type))
7376 {
7377 case INTEGER_TYPE:
7378 case ENUMERAL_TYPE:
7379 case BOOLEAN_TYPE:
7380 case POINTER_TYPE:
7381 case REFERENCE_TYPE:
7382 return native_interpret_int (type, ptr, len);
7383
7384 case REAL_TYPE:
7385 return native_interpret_real (type, ptr, len);
7386
7387 case FIXED_POINT_TYPE:
7388 return native_interpret_fixed (type, ptr, len);
7389
7390 case COMPLEX_TYPE:
7391 return native_interpret_complex (type, ptr, len);
7392
7393 case VECTOR_TYPE:
7394 return native_interpret_vector (type, ptr, len);
7395
7396 default:
7397 return NULL_TREE;
7398 }
7399 }
7400
7401 /* Returns true if we can interpret the contents of a native encoding
7402 as TYPE. */
7403
7404 static bool
7405 can_native_interpret_type_p (tree type)
7406 {
7407 switch (TREE_CODE (type))
7408 {
7409 case INTEGER_TYPE:
7410 case ENUMERAL_TYPE:
7411 case BOOLEAN_TYPE:
7412 case POINTER_TYPE:
7413 case REFERENCE_TYPE:
7414 case FIXED_POINT_TYPE:
7415 case REAL_TYPE:
7416 case COMPLEX_TYPE:
7417 case VECTOR_TYPE:
7418 return true;
7419 default:
7420 return false;
7421 }
7422 }
7423
7424 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7425 TYPE at compile-time. If we're unable to perform the conversion
7426 return NULL_TREE. */
7427
7428 static tree
7429 fold_view_convert_expr (tree type, tree expr)
7430 {
7431 /* We support up to 512-bit values (for V8DFmode). */
7432 unsigned char buffer[64];
7433 int len;
7434
7435 /* Check that the host and target are sane. */
7436 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7437 return NULL_TREE;
7438
7439 len = native_encode_expr (expr, buffer, sizeof (buffer));
7440 if (len == 0)
7441 return NULL_TREE;
7442
7443 return native_interpret_expr (type, buffer, len);
7444 }
7445
7446 /* Build an expression for the address of T. Folds away INDIRECT_REF
7447 to avoid confusing the gimplify process. */
7448
7449 tree
7450 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7451 {
7452 /* The size of the object is not relevant when talking about its address. */
7453 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7454 t = TREE_OPERAND (t, 0);
7455
7456 if (TREE_CODE (t) == INDIRECT_REF)
7457 {
7458 t = TREE_OPERAND (t, 0);
7459
7460 if (TREE_TYPE (t) != ptrtype)
7461 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7462 }
7463 else if (TREE_CODE (t) == MEM_REF
7464 && integer_zerop (TREE_OPERAND (t, 1)))
7465 return TREE_OPERAND (t, 0);
7466 else if (TREE_CODE (t) == MEM_REF
7467 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7468 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7469 TREE_OPERAND (t, 0),
7470 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7471 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7472 {
7473 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7474
7475 if (TREE_TYPE (t) != ptrtype)
7476 t = fold_convert_loc (loc, ptrtype, t);
7477 }
7478 else
7479 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7480
7481 return t;
7482 }
7483
7484 /* Build an expression for the address of T. */
7485
7486 tree
7487 build_fold_addr_expr_loc (location_t loc, tree t)
7488 {
7489 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7490
7491 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7492 }
7493
7494 /* Fold a unary expression of code CODE and type TYPE with operand
7495 OP0. Return the folded expression if folding is successful.
7496 Otherwise, return NULL_TREE. */
7497
7498 tree
7499 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7500 {
7501 tree tem;
7502 tree arg0;
7503 enum tree_code_class kind = TREE_CODE_CLASS (code);
7504
7505 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7506 && TREE_CODE_LENGTH (code) == 1);
7507
7508 arg0 = op0;
7509 if (arg0)
7510 {
7511 if (CONVERT_EXPR_CODE_P (code)
7512 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7513 {
7514 /* Don't use STRIP_NOPS, because signedness of argument type
7515 matters. */
7516 STRIP_SIGN_NOPS (arg0);
7517 }
7518 else
7519 {
7520 /* Strip any conversions that don't change the mode. This
7521 is safe for every expression, except for a comparison
7522 expression because its signedness is derived from its
7523 operands.
7524
7525 Note that this is done as an internal manipulation within
7526 the constant folder, in order to find the simplest
7527 representation of the arguments so that their form can be
7528 studied. In any cases, the appropriate type conversions
7529 should be put back in the tree that will get out of the
7530 constant folder. */
7531 STRIP_NOPS (arg0);
7532 }
7533
7534 if (CONSTANT_CLASS_P (arg0))
7535 {
7536 tree tem = const_unop (code, type, arg0);
7537 if (tem)
7538 {
7539 if (TREE_TYPE (tem) != type)
7540 tem = fold_convert_loc (loc, type, tem);
7541 return tem;
7542 }
7543 }
7544 }
7545
7546 tem = generic_simplify (loc, code, type, op0);
7547 if (tem)
7548 return tem;
7549
7550 if (TREE_CODE_CLASS (code) == tcc_unary)
7551 {
7552 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7553 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7554 fold_build1_loc (loc, code, type,
7555 fold_convert_loc (loc, TREE_TYPE (op0),
7556 TREE_OPERAND (arg0, 1))));
7557 else if (TREE_CODE (arg0) == COND_EXPR)
7558 {
7559 tree arg01 = TREE_OPERAND (arg0, 1);
7560 tree arg02 = TREE_OPERAND (arg0, 2);
7561 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7562 arg01 = fold_build1_loc (loc, code, type,
7563 fold_convert_loc (loc,
7564 TREE_TYPE (op0), arg01));
7565 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7566 arg02 = fold_build1_loc (loc, code, type,
7567 fold_convert_loc (loc,
7568 TREE_TYPE (op0), arg02));
7569 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7570 arg01, arg02);
7571
7572 /* If this was a conversion, and all we did was to move into
7573 inside the COND_EXPR, bring it back out. But leave it if
7574 it is a conversion from integer to integer and the
7575 result precision is no wider than a word since such a
7576 conversion is cheap and may be optimized away by combine,
7577 while it couldn't if it were outside the COND_EXPR. Then return
7578 so we don't get into an infinite recursion loop taking the
7579 conversion out and then back in. */
7580
7581 if ((CONVERT_EXPR_CODE_P (code)
7582 || code == NON_LVALUE_EXPR)
7583 && TREE_CODE (tem) == COND_EXPR
7584 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7585 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7586 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7587 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7588 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7589 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7590 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7591 && (INTEGRAL_TYPE_P
7592 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7593 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7594 || flag_syntax_only))
7595 tem = build1_loc (loc, code, type,
7596 build3 (COND_EXPR,
7597 TREE_TYPE (TREE_OPERAND
7598 (TREE_OPERAND (tem, 1), 0)),
7599 TREE_OPERAND (tem, 0),
7600 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7601 TREE_OPERAND (TREE_OPERAND (tem, 2),
7602 0)));
7603 return tem;
7604 }
7605 }
7606
7607 switch (code)
7608 {
7609 case NON_LVALUE_EXPR:
7610 if (!maybe_lvalue_p (op0))
7611 return fold_convert_loc (loc, type, op0);
7612 return NULL_TREE;
7613
7614 CASE_CONVERT:
7615 case FLOAT_EXPR:
7616 case FIX_TRUNC_EXPR:
7617 if (COMPARISON_CLASS_P (op0))
7618 {
7619 /* If we have (type) (a CMP b) and type is an integral type, return
7620 new expression involving the new type. Canonicalize
7621 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7622 non-integral type.
7623 Do not fold the result as that would not simplify further, also
7624 folding again results in recursions. */
7625 if (TREE_CODE (type) == BOOLEAN_TYPE)
7626 return build2_loc (loc, TREE_CODE (op0), type,
7627 TREE_OPERAND (op0, 0),
7628 TREE_OPERAND (op0, 1));
7629 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7630 && TREE_CODE (type) != VECTOR_TYPE)
7631 return build3_loc (loc, COND_EXPR, type, op0,
7632 constant_boolean_node (true, type),
7633 constant_boolean_node (false, type));
7634 }
7635
7636 /* Handle (T *)&A.B.C for A being of type T and B and C
7637 living at offset zero. This occurs frequently in
7638 C++ upcasting and then accessing the base. */
7639 if (TREE_CODE (op0) == ADDR_EXPR
7640 && POINTER_TYPE_P (type)
7641 && handled_component_p (TREE_OPERAND (op0, 0)))
7642 {
7643 HOST_WIDE_INT bitsize, bitpos;
7644 tree offset;
7645 machine_mode mode;
7646 int unsignedp, volatilep;
7647 tree base = TREE_OPERAND (op0, 0);
7648 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7649 &mode, &unsignedp, &volatilep, false);
7650 /* If the reference was to a (constant) zero offset, we can use
7651 the address of the base if it has the same base type
7652 as the result type and the pointer type is unqualified. */
7653 if (! offset && bitpos == 0
7654 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7655 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7656 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7657 return fold_convert_loc (loc, type,
7658 build_fold_addr_expr_loc (loc, base));
7659 }
7660
7661 if (TREE_CODE (op0) == MODIFY_EXPR
7662 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7663 /* Detect assigning a bitfield. */
7664 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7665 && DECL_BIT_FIELD
7666 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7667 {
7668 /* Don't leave an assignment inside a conversion
7669 unless assigning a bitfield. */
7670 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7671 /* First do the assignment, then return converted constant. */
7672 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7673 TREE_NO_WARNING (tem) = 1;
7674 TREE_USED (tem) = 1;
7675 return tem;
7676 }
7677
7678 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7679 constants (if x has signed type, the sign bit cannot be set
7680 in c). This folds extension into the BIT_AND_EXPR.
7681 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7682 very likely don't have maximal range for their precision and this
7683 transformation effectively doesn't preserve non-maximal ranges. */
7684 if (TREE_CODE (type) == INTEGER_TYPE
7685 && TREE_CODE (op0) == BIT_AND_EXPR
7686 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7687 {
7688 tree and_expr = op0;
7689 tree and0 = TREE_OPERAND (and_expr, 0);
7690 tree and1 = TREE_OPERAND (and_expr, 1);
7691 int change = 0;
7692
7693 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7694 || (TYPE_PRECISION (type)
7695 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7696 change = 1;
7697 else if (TYPE_PRECISION (TREE_TYPE (and1))
7698 <= HOST_BITS_PER_WIDE_INT
7699 && tree_fits_uhwi_p (and1))
7700 {
7701 unsigned HOST_WIDE_INT cst;
7702
7703 cst = tree_to_uhwi (and1);
7704 cst &= HOST_WIDE_INT_M1U
7705 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7706 change = (cst == 0);
7707 if (change
7708 && !flag_syntax_only
7709 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7710 == ZERO_EXTEND))
7711 {
7712 tree uns = unsigned_type_for (TREE_TYPE (and0));
7713 and0 = fold_convert_loc (loc, uns, and0);
7714 and1 = fold_convert_loc (loc, uns, and1);
7715 }
7716 }
7717 if (change)
7718 {
7719 tem = force_fit_type (type, wi::to_widest (and1), 0,
7720 TREE_OVERFLOW (and1));
7721 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7722 fold_convert_loc (loc, type, and0), tem);
7723 }
7724 }
7725
7726 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7727 when one of the new casts will fold away. Conservatively we assume
7728 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7729 if (POINTER_TYPE_P (type)
7730 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7731 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7732 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7733 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7734 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7735 {
7736 tree arg00 = TREE_OPERAND (arg0, 0);
7737 tree arg01 = TREE_OPERAND (arg0, 1);
7738
7739 return fold_build_pointer_plus_loc
7740 (loc, fold_convert_loc (loc, type, arg00), arg01);
7741 }
7742
7743 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7744 of the same precision, and X is an integer type not narrower than
7745 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7746 if (INTEGRAL_TYPE_P (type)
7747 && TREE_CODE (op0) == BIT_NOT_EXPR
7748 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7749 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7750 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7751 {
7752 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7753 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7754 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7755 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7756 fold_convert_loc (loc, type, tem));
7757 }
7758
7759 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7760 type of X and Y (integer types only). */
7761 if (INTEGRAL_TYPE_P (type)
7762 && TREE_CODE (op0) == MULT_EXPR
7763 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7764 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7765 {
7766 /* Be careful not to introduce new overflows. */
7767 tree mult_type;
7768 if (TYPE_OVERFLOW_WRAPS (type))
7769 mult_type = type;
7770 else
7771 mult_type = unsigned_type_for (type);
7772
7773 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7774 {
7775 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7776 fold_convert_loc (loc, mult_type,
7777 TREE_OPERAND (op0, 0)),
7778 fold_convert_loc (loc, mult_type,
7779 TREE_OPERAND (op0, 1)));
7780 return fold_convert_loc (loc, type, tem);
7781 }
7782 }
7783
7784 return NULL_TREE;
7785
7786 case VIEW_CONVERT_EXPR:
7787 if (TREE_CODE (op0) == MEM_REF)
7788 return fold_build2_loc (loc, MEM_REF, type,
7789 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7790
7791 return NULL_TREE;
7792
7793 case NEGATE_EXPR:
7794 tem = fold_negate_expr (loc, arg0);
7795 if (tem)
7796 return fold_convert_loc (loc, type, tem);
7797 return NULL_TREE;
7798
7799 case ABS_EXPR:
7800 /* Convert fabs((double)float) into (double)fabsf(float). */
7801 if (TREE_CODE (arg0) == NOP_EXPR
7802 && TREE_CODE (type) == REAL_TYPE)
7803 {
7804 tree targ0 = strip_float_extensions (arg0);
7805 if (targ0 != arg0)
7806 return fold_convert_loc (loc, type,
7807 fold_build1_loc (loc, ABS_EXPR,
7808 TREE_TYPE (targ0),
7809 targ0));
7810 }
7811 return NULL_TREE;
7812
7813 case BIT_NOT_EXPR:
7814 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7815 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7816 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7817 fold_convert_loc (loc, type,
7818 TREE_OPERAND (arg0, 0)))))
7819 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7820 fold_convert_loc (loc, type,
7821 TREE_OPERAND (arg0, 1)));
7822 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7823 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7824 fold_convert_loc (loc, type,
7825 TREE_OPERAND (arg0, 1)))))
7826 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7827 fold_convert_loc (loc, type,
7828 TREE_OPERAND (arg0, 0)), tem);
7829
7830 return NULL_TREE;
7831
7832 case TRUTH_NOT_EXPR:
7833 /* Note that the operand of this must be an int
7834 and its values must be 0 or 1.
7835 ("true" is a fixed value perhaps depending on the language,
7836 but we don't handle values other than 1 correctly yet.) */
7837 tem = fold_truth_not_expr (loc, arg0);
7838 if (!tem)
7839 return NULL_TREE;
7840 return fold_convert_loc (loc, type, tem);
7841
7842 case INDIRECT_REF:
7843 /* Fold *&X to X if X is an lvalue. */
7844 if (TREE_CODE (op0) == ADDR_EXPR)
7845 {
7846 tree op00 = TREE_OPERAND (op0, 0);
7847 if ((TREE_CODE (op00) == VAR_DECL
7848 || TREE_CODE (op00) == PARM_DECL
7849 || TREE_CODE (op00) == RESULT_DECL)
7850 && !TREE_READONLY (op00))
7851 return op00;
7852 }
7853 return NULL_TREE;
7854
7855 default:
7856 return NULL_TREE;
7857 } /* switch (code) */
7858 }
7859
7860
7861 /* If the operation was a conversion do _not_ mark a resulting constant
7862 with TREE_OVERFLOW if the original constant was not. These conversions
7863 have implementation defined behavior and retaining the TREE_OVERFLOW
7864 flag here would confuse later passes such as VRP. */
7865 tree
7866 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7867 tree type, tree op0)
7868 {
7869 tree res = fold_unary_loc (loc, code, type, op0);
7870 if (res
7871 && TREE_CODE (res) == INTEGER_CST
7872 && TREE_CODE (op0) == INTEGER_CST
7873 && CONVERT_EXPR_CODE_P (code))
7874 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7875
7876 return res;
7877 }
7878
7879 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7880 operands OP0 and OP1. LOC is the location of the resulting expression.
7881 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7882 Return the folded expression if folding is successful. Otherwise,
7883 return NULL_TREE. */
7884 static tree
7885 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7886 tree arg0, tree arg1, tree op0, tree op1)
7887 {
7888 tree tem;
7889
7890 /* We only do these simplifications if we are optimizing. */
7891 if (!optimize)
7892 return NULL_TREE;
7893
7894 /* Check for things like (A || B) && (A || C). We can convert this
7895 to A || (B && C). Note that either operator can be any of the four
7896 truth and/or operations and the transformation will still be
7897 valid. Also note that we only care about order for the
7898 ANDIF and ORIF operators. If B contains side effects, this
7899 might change the truth-value of A. */
7900 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7901 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7902 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7903 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7904 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7905 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7906 {
7907 tree a00 = TREE_OPERAND (arg0, 0);
7908 tree a01 = TREE_OPERAND (arg0, 1);
7909 tree a10 = TREE_OPERAND (arg1, 0);
7910 tree a11 = TREE_OPERAND (arg1, 1);
7911 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7912 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7913 && (code == TRUTH_AND_EXPR
7914 || code == TRUTH_OR_EXPR));
7915
7916 if (operand_equal_p (a00, a10, 0))
7917 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7918 fold_build2_loc (loc, code, type, a01, a11));
7919 else if (commutative && operand_equal_p (a00, a11, 0))
7920 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7921 fold_build2_loc (loc, code, type, a01, a10));
7922 else if (commutative && operand_equal_p (a01, a10, 0))
7923 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7924 fold_build2_loc (loc, code, type, a00, a11));
7925
7926 /* This case if tricky because we must either have commutative
7927 operators or else A10 must not have side-effects. */
7928
7929 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7930 && operand_equal_p (a01, a11, 0))
7931 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7932 fold_build2_loc (loc, code, type, a00, a10),
7933 a01);
7934 }
7935
7936 /* See if we can build a range comparison. */
7937 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7938 return tem;
7939
7940 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
7941 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
7942 {
7943 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
7944 if (tem)
7945 return fold_build2_loc (loc, code, type, tem, arg1);
7946 }
7947
7948 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
7949 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
7950 {
7951 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
7952 if (tem)
7953 return fold_build2_loc (loc, code, type, arg0, tem);
7954 }
7955
7956 /* Check for the possibility of merging component references. If our
7957 lhs is another similar operation, try to merge its rhs with our
7958 rhs. Then try to merge our lhs and rhs. */
7959 if (TREE_CODE (arg0) == code
7960 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
7961 TREE_OPERAND (arg0, 1), arg1)))
7962 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
7963
7964 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
7965 return tem;
7966
7967 if (LOGICAL_OP_NON_SHORT_CIRCUIT
7968 && (code == TRUTH_AND_EXPR
7969 || code == TRUTH_ANDIF_EXPR
7970 || code == TRUTH_OR_EXPR
7971 || code == TRUTH_ORIF_EXPR))
7972 {
7973 enum tree_code ncode, icode;
7974
7975 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
7976 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
7977 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
7978
7979 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
7980 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
7981 We don't want to pack more than two leafs to a non-IF AND/OR
7982 expression.
7983 If tree-code of left-hand operand isn't an AND/OR-IF code and not
7984 equal to IF-CODE, then we don't want to add right-hand operand.
7985 If the inner right-hand side of left-hand operand has
7986 side-effects, or isn't simple, then we can't add to it,
7987 as otherwise we might destroy if-sequence. */
7988 if (TREE_CODE (arg0) == icode
7989 && simple_operand_p_2 (arg1)
7990 /* Needed for sequence points to handle trappings, and
7991 side-effects. */
7992 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
7993 {
7994 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
7995 arg1);
7996 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
7997 tem);
7998 }
7999 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8000 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8001 else if (TREE_CODE (arg1) == icode
8002 && simple_operand_p_2 (arg0)
8003 /* Needed for sequence points to handle trappings, and
8004 side-effects. */
8005 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8006 {
8007 tem = fold_build2_loc (loc, ncode, type,
8008 arg0, TREE_OPERAND (arg1, 0));
8009 return fold_build2_loc (loc, icode, type, tem,
8010 TREE_OPERAND (arg1, 1));
8011 }
8012 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8013 into (A OR B).
8014 For sequence point consistancy, we need to check for trapping,
8015 and side-effects. */
8016 else if (code == icode && simple_operand_p_2 (arg0)
8017 && simple_operand_p_2 (arg1))
8018 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8019 }
8020
8021 return NULL_TREE;
8022 }
8023
8024 /* Fold a binary expression of code CODE and type TYPE with operands
8025 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8026 Return the folded expression if folding is successful. Otherwise,
8027 return NULL_TREE. */
8028
8029 static tree
8030 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8031 {
8032 enum tree_code compl_code;
8033
8034 if (code == MIN_EXPR)
8035 compl_code = MAX_EXPR;
8036 else if (code == MAX_EXPR)
8037 compl_code = MIN_EXPR;
8038 else
8039 gcc_unreachable ();
8040
8041 /* MIN (MAX (a, b), b) == b. */
8042 if (TREE_CODE (op0) == compl_code
8043 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8044 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8045
8046 /* MIN (MAX (b, a), b) == b. */
8047 if (TREE_CODE (op0) == compl_code
8048 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8049 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8050 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8051
8052 /* MIN (a, MAX (a, b)) == a. */
8053 if (TREE_CODE (op1) == compl_code
8054 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8055 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8056 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8057
8058 /* MIN (a, MAX (b, a)) == a. */
8059 if (TREE_CODE (op1) == compl_code
8060 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8061 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8062 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8063
8064 return NULL_TREE;
8065 }
8066
8067 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8068 by changing CODE to reduce the magnitude of constants involved in
8069 ARG0 of the comparison.
8070 Returns a canonicalized comparison tree if a simplification was
8071 possible, otherwise returns NULL_TREE.
8072 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8073 valid if signed overflow is undefined. */
8074
8075 static tree
8076 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8077 tree arg0, tree arg1,
8078 bool *strict_overflow_p)
8079 {
8080 enum tree_code code0 = TREE_CODE (arg0);
8081 tree t, cst0 = NULL_TREE;
8082 int sgn0;
8083
8084 /* Match A +- CST code arg1. We can change this only if overflow
8085 is undefined. */
8086 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8087 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8088 /* In principle pointers also have undefined overflow behavior,
8089 but that causes problems elsewhere. */
8090 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8091 && (code0 == MINUS_EXPR
8092 || code0 == PLUS_EXPR)
8093 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8094 return NULL_TREE;
8095
8096 /* Identify the constant in arg0 and its sign. */
8097 cst0 = TREE_OPERAND (arg0, 1);
8098 sgn0 = tree_int_cst_sgn (cst0);
8099
8100 /* Overflowed constants and zero will cause problems. */
8101 if (integer_zerop (cst0)
8102 || TREE_OVERFLOW (cst0))
8103 return NULL_TREE;
8104
8105 /* See if we can reduce the magnitude of the constant in
8106 arg0 by changing the comparison code. */
8107 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8108 if (code == LT_EXPR
8109 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8110 code = LE_EXPR;
8111 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8112 else if (code == GT_EXPR
8113 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8114 code = GE_EXPR;
8115 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8116 else if (code == LE_EXPR
8117 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8118 code = LT_EXPR;
8119 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8120 else if (code == GE_EXPR
8121 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8122 code = GT_EXPR;
8123 else
8124 return NULL_TREE;
8125 *strict_overflow_p = true;
8126
8127 /* Now build the constant reduced in magnitude. But not if that
8128 would produce one outside of its types range. */
8129 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8130 && ((sgn0 == 1
8131 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8132 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8133 || (sgn0 == -1
8134 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8135 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8136 return NULL_TREE;
8137
8138 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8139 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8140 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8141 t = fold_convert (TREE_TYPE (arg1), t);
8142
8143 return fold_build2_loc (loc, code, type, t, arg1);
8144 }
8145
8146 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8147 overflow further. Try to decrease the magnitude of constants involved
8148 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8149 and put sole constants at the second argument position.
8150 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8151
8152 static tree
8153 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8154 tree arg0, tree arg1)
8155 {
8156 tree t;
8157 bool strict_overflow_p;
8158 const char * const warnmsg = G_("assuming signed overflow does not occur "
8159 "when reducing constant in comparison");
8160
8161 /* Try canonicalization by simplifying arg0. */
8162 strict_overflow_p = false;
8163 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8164 &strict_overflow_p);
8165 if (t)
8166 {
8167 if (strict_overflow_p)
8168 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8169 return t;
8170 }
8171
8172 /* Try canonicalization by simplifying arg1 using the swapped
8173 comparison. */
8174 code = swap_tree_comparison (code);
8175 strict_overflow_p = false;
8176 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8177 &strict_overflow_p);
8178 if (t && strict_overflow_p)
8179 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8180 return t;
8181 }
8182
8183 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8184 space. This is used to avoid issuing overflow warnings for
8185 expressions like &p->x which can not wrap. */
8186
8187 static bool
8188 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8189 {
8190 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8191 return true;
8192
8193 if (bitpos < 0)
8194 return true;
8195
8196 wide_int wi_offset;
8197 int precision = TYPE_PRECISION (TREE_TYPE (base));
8198 if (offset == NULL_TREE)
8199 wi_offset = wi::zero (precision);
8200 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8201 return true;
8202 else
8203 wi_offset = offset;
8204
8205 bool overflow;
8206 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8207 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8208 if (overflow)
8209 return true;
8210
8211 if (!wi::fits_uhwi_p (total))
8212 return true;
8213
8214 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8215 if (size <= 0)
8216 return true;
8217
8218 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8219 array. */
8220 if (TREE_CODE (base) == ADDR_EXPR)
8221 {
8222 HOST_WIDE_INT base_size;
8223
8224 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8225 if (base_size > 0 && size < base_size)
8226 size = base_size;
8227 }
8228
8229 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8230 }
8231
8232 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8233 kind INTEGER_CST. This makes sure to properly sign-extend the
8234 constant. */
8235
8236 static HOST_WIDE_INT
8237 size_low_cst (const_tree t)
8238 {
8239 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8240 int prec = TYPE_PRECISION (TREE_TYPE (t));
8241 if (prec < HOST_BITS_PER_WIDE_INT)
8242 return sext_hwi (w, prec);
8243 return w;
8244 }
8245
8246 /* Subroutine of fold_binary. This routine performs all of the
8247 transformations that are common to the equality/inequality
8248 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8249 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8250 fold_binary should call fold_binary. Fold a comparison with
8251 tree code CODE and type TYPE with operands OP0 and OP1. Return
8252 the folded comparison or NULL_TREE. */
8253
8254 static tree
8255 fold_comparison (location_t loc, enum tree_code code, tree type,
8256 tree op0, tree op1)
8257 {
8258 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8259 tree arg0, arg1, tem;
8260
8261 arg0 = op0;
8262 arg1 = op1;
8263
8264 STRIP_SIGN_NOPS (arg0);
8265 STRIP_SIGN_NOPS (arg1);
8266
8267 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8268 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8269 && (equality_code
8270 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8271 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8272 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8273 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8274 && TREE_CODE (arg1) == INTEGER_CST
8275 && !TREE_OVERFLOW (arg1))
8276 {
8277 const enum tree_code
8278 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8279 tree const1 = TREE_OPERAND (arg0, 1);
8280 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8281 tree variable = TREE_OPERAND (arg0, 0);
8282 tree new_const = int_const_binop (reverse_op, const2, const1);
8283
8284 /* If the constant operation overflowed this can be
8285 simplified as a comparison against INT_MAX/INT_MIN. */
8286 if (TREE_OVERFLOW (new_const)
8287 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8288 {
8289 int const1_sgn = tree_int_cst_sgn (const1);
8290 enum tree_code code2 = code;
8291
8292 /* Get the sign of the constant on the lhs if the
8293 operation were VARIABLE + CONST1. */
8294 if (TREE_CODE (arg0) == MINUS_EXPR)
8295 const1_sgn = -const1_sgn;
8296
8297 /* The sign of the constant determines if we overflowed
8298 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8299 Canonicalize to the INT_MIN overflow by swapping the comparison
8300 if necessary. */
8301 if (const1_sgn == -1)
8302 code2 = swap_tree_comparison (code);
8303
8304 /* We now can look at the canonicalized case
8305 VARIABLE + 1 CODE2 INT_MIN
8306 and decide on the result. */
8307 switch (code2)
8308 {
8309 case EQ_EXPR:
8310 case LT_EXPR:
8311 case LE_EXPR:
8312 return
8313 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8314
8315 case NE_EXPR:
8316 case GE_EXPR:
8317 case GT_EXPR:
8318 return
8319 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8320
8321 default:
8322 gcc_unreachable ();
8323 }
8324 }
8325 else
8326 {
8327 if (!equality_code)
8328 fold_overflow_warning ("assuming signed overflow does not occur "
8329 "when changing X +- C1 cmp C2 to "
8330 "X cmp C2 -+ C1",
8331 WARN_STRICT_OVERFLOW_COMPARISON);
8332 return fold_build2_loc (loc, code, type, variable, new_const);
8333 }
8334 }
8335
8336 /* For comparisons of pointers we can decompose it to a compile time
8337 comparison of the base objects and the offsets into the object.
8338 This requires at least one operand being an ADDR_EXPR or a
8339 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8340 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8341 && (TREE_CODE (arg0) == ADDR_EXPR
8342 || TREE_CODE (arg1) == ADDR_EXPR
8343 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8344 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8345 {
8346 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8347 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8348 machine_mode mode;
8349 int volatilep, unsignedp;
8350 bool indirect_base0 = false, indirect_base1 = false;
8351
8352 /* Get base and offset for the access. Strip ADDR_EXPR for
8353 get_inner_reference, but put it back by stripping INDIRECT_REF
8354 off the base object if possible. indirect_baseN will be true
8355 if baseN is not an address but refers to the object itself. */
8356 base0 = arg0;
8357 if (TREE_CODE (arg0) == ADDR_EXPR)
8358 {
8359 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8360 &bitsize, &bitpos0, &offset0, &mode,
8361 &unsignedp, &volatilep, false);
8362 if (TREE_CODE (base0) == INDIRECT_REF)
8363 base0 = TREE_OPERAND (base0, 0);
8364 else
8365 indirect_base0 = true;
8366 }
8367 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8368 {
8369 base0 = TREE_OPERAND (arg0, 0);
8370 STRIP_SIGN_NOPS (base0);
8371 if (TREE_CODE (base0) == ADDR_EXPR)
8372 {
8373 base0 = TREE_OPERAND (base0, 0);
8374 indirect_base0 = true;
8375 }
8376 offset0 = TREE_OPERAND (arg0, 1);
8377 if (tree_fits_shwi_p (offset0))
8378 {
8379 HOST_WIDE_INT off = size_low_cst (offset0);
8380 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8381 * BITS_PER_UNIT)
8382 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8383 {
8384 bitpos0 = off * BITS_PER_UNIT;
8385 offset0 = NULL_TREE;
8386 }
8387 }
8388 }
8389
8390 base1 = arg1;
8391 if (TREE_CODE (arg1) == ADDR_EXPR)
8392 {
8393 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8394 &bitsize, &bitpos1, &offset1, &mode,
8395 &unsignedp, &volatilep, false);
8396 if (TREE_CODE (base1) == INDIRECT_REF)
8397 base1 = TREE_OPERAND (base1, 0);
8398 else
8399 indirect_base1 = true;
8400 }
8401 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8402 {
8403 base1 = TREE_OPERAND (arg1, 0);
8404 STRIP_SIGN_NOPS (base1);
8405 if (TREE_CODE (base1) == ADDR_EXPR)
8406 {
8407 base1 = TREE_OPERAND (base1, 0);
8408 indirect_base1 = true;
8409 }
8410 offset1 = TREE_OPERAND (arg1, 1);
8411 if (tree_fits_shwi_p (offset1))
8412 {
8413 HOST_WIDE_INT off = size_low_cst (offset1);
8414 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8415 * BITS_PER_UNIT)
8416 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8417 {
8418 bitpos1 = off * BITS_PER_UNIT;
8419 offset1 = NULL_TREE;
8420 }
8421 }
8422 }
8423
8424 /* If we have equivalent bases we might be able to simplify. */
8425 if (indirect_base0 == indirect_base1
8426 && operand_equal_p (base0, base1,
8427 indirect_base0 ? OEP_ADDRESS_OF : 0))
8428 {
8429 /* We can fold this expression to a constant if the non-constant
8430 offset parts are equal. */
8431 if ((offset0 == offset1
8432 || (offset0 && offset1
8433 && operand_equal_p (offset0, offset1, 0)))
8434 && (code == EQ_EXPR
8435 || code == NE_EXPR
8436 || (indirect_base0 && DECL_P (base0))
8437 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8438
8439 {
8440 if (!equality_code
8441 && bitpos0 != bitpos1
8442 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8443 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8444 fold_overflow_warning (("assuming pointer wraparound does not "
8445 "occur when comparing P +- C1 with "
8446 "P +- C2"),
8447 WARN_STRICT_OVERFLOW_CONDITIONAL);
8448
8449 switch (code)
8450 {
8451 case EQ_EXPR:
8452 return constant_boolean_node (bitpos0 == bitpos1, type);
8453 case NE_EXPR:
8454 return constant_boolean_node (bitpos0 != bitpos1, type);
8455 case LT_EXPR:
8456 return constant_boolean_node (bitpos0 < bitpos1, type);
8457 case LE_EXPR:
8458 return constant_boolean_node (bitpos0 <= bitpos1, type);
8459 case GE_EXPR:
8460 return constant_boolean_node (bitpos0 >= bitpos1, type);
8461 case GT_EXPR:
8462 return constant_boolean_node (bitpos0 > bitpos1, type);
8463 default:;
8464 }
8465 }
8466 /* We can simplify the comparison to a comparison of the variable
8467 offset parts if the constant offset parts are equal.
8468 Be careful to use signed sizetype here because otherwise we
8469 mess with array offsets in the wrong way. This is possible
8470 because pointer arithmetic is restricted to retain within an
8471 object and overflow on pointer differences is undefined as of
8472 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8473 else if (bitpos0 == bitpos1
8474 && (equality_code
8475 || (indirect_base0 && DECL_P (base0))
8476 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8477 {
8478 /* By converting to signed sizetype we cover middle-end pointer
8479 arithmetic which operates on unsigned pointer types of size
8480 type size and ARRAY_REF offsets which are properly sign or
8481 zero extended from their type in case it is narrower than
8482 sizetype. */
8483 if (offset0 == NULL_TREE)
8484 offset0 = build_int_cst (ssizetype, 0);
8485 else
8486 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8487 if (offset1 == NULL_TREE)
8488 offset1 = build_int_cst (ssizetype, 0);
8489 else
8490 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8491
8492 if (!equality_code
8493 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8494 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8495 fold_overflow_warning (("assuming pointer wraparound does not "
8496 "occur when comparing P +- C1 with "
8497 "P +- C2"),
8498 WARN_STRICT_OVERFLOW_COMPARISON);
8499
8500 return fold_build2_loc (loc, code, type, offset0, offset1);
8501 }
8502 }
8503 /* For equal offsets we can simplify to a comparison of the
8504 base addresses. */
8505 else if (bitpos0 == bitpos1
8506 && (indirect_base0
8507 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8508 && (indirect_base1
8509 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8510 && ((offset0 == offset1)
8511 || (offset0 && offset1
8512 && operand_equal_p (offset0, offset1, 0))))
8513 {
8514 if (indirect_base0)
8515 base0 = build_fold_addr_expr_loc (loc, base0);
8516 if (indirect_base1)
8517 base1 = build_fold_addr_expr_loc (loc, base1);
8518 return fold_build2_loc (loc, code, type, base0, base1);
8519 }
8520 }
8521
8522 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8523 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8524 the resulting offset is smaller in absolute value than the
8525 original one and has the same sign. */
8526 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8527 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8528 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8529 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8530 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8531 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8532 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8533 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8534 {
8535 tree const1 = TREE_OPERAND (arg0, 1);
8536 tree const2 = TREE_OPERAND (arg1, 1);
8537 tree variable1 = TREE_OPERAND (arg0, 0);
8538 tree variable2 = TREE_OPERAND (arg1, 0);
8539 tree cst;
8540 const char * const warnmsg = G_("assuming signed overflow does not "
8541 "occur when combining constants around "
8542 "a comparison");
8543
8544 /* Put the constant on the side where it doesn't overflow and is
8545 of lower absolute value and of same sign than before. */
8546 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8547 ? MINUS_EXPR : PLUS_EXPR,
8548 const2, const1);
8549 if (!TREE_OVERFLOW (cst)
8550 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8551 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8552 {
8553 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8554 return fold_build2_loc (loc, code, type,
8555 variable1,
8556 fold_build2_loc (loc, TREE_CODE (arg1),
8557 TREE_TYPE (arg1),
8558 variable2, cst));
8559 }
8560
8561 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8562 ? MINUS_EXPR : PLUS_EXPR,
8563 const1, const2);
8564 if (!TREE_OVERFLOW (cst)
8565 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8566 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8567 {
8568 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8569 return fold_build2_loc (loc, code, type,
8570 fold_build2_loc (loc, TREE_CODE (arg0),
8571 TREE_TYPE (arg0),
8572 variable1, cst),
8573 variable2);
8574 }
8575 }
8576
8577 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8578 if (tem)
8579 return tem;
8580
8581 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8582 constant, we can simplify it. */
8583 if (TREE_CODE (arg1) == INTEGER_CST
8584 && (TREE_CODE (arg0) == MIN_EXPR
8585 || TREE_CODE (arg0) == MAX_EXPR)
8586 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8587 {
8588 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8589 if (tem)
8590 return tem;
8591 }
8592
8593 /* If we are comparing an expression that just has comparisons
8594 of two integer values, arithmetic expressions of those comparisons,
8595 and constants, we can simplify it. There are only three cases
8596 to check: the two values can either be equal, the first can be
8597 greater, or the second can be greater. Fold the expression for
8598 those three values. Since each value must be 0 or 1, we have
8599 eight possibilities, each of which corresponds to the constant 0
8600 or 1 or one of the six possible comparisons.
8601
8602 This handles common cases like (a > b) == 0 but also handles
8603 expressions like ((x > y) - (y > x)) > 0, which supposedly
8604 occur in macroized code. */
8605
8606 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8607 {
8608 tree cval1 = 0, cval2 = 0;
8609 int save_p = 0;
8610
8611 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8612 /* Don't handle degenerate cases here; they should already
8613 have been handled anyway. */
8614 && cval1 != 0 && cval2 != 0
8615 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8616 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8617 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8618 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8619 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8620 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8621 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8622 {
8623 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8624 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8625
8626 /* We can't just pass T to eval_subst in case cval1 or cval2
8627 was the same as ARG1. */
8628
8629 tree high_result
8630 = fold_build2_loc (loc, code, type,
8631 eval_subst (loc, arg0, cval1, maxval,
8632 cval2, minval),
8633 arg1);
8634 tree equal_result
8635 = fold_build2_loc (loc, code, type,
8636 eval_subst (loc, arg0, cval1, maxval,
8637 cval2, maxval),
8638 arg1);
8639 tree low_result
8640 = fold_build2_loc (loc, code, type,
8641 eval_subst (loc, arg0, cval1, minval,
8642 cval2, maxval),
8643 arg1);
8644
8645 /* All three of these results should be 0 or 1. Confirm they are.
8646 Then use those values to select the proper code to use. */
8647
8648 if (TREE_CODE (high_result) == INTEGER_CST
8649 && TREE_CODE (equal_result) == INTEGER_CST
8650 && TREE_CODE (low_result) == INTEGER_CST)
8651 {
8652 /* Make a 3-bit mask with the high-order bit being the
8653 value for `>', the next for '=', and the low for '<'. */
8654 switch ((integer_onep (high_result) * 4)
8655 + (integer_onep (equal_result) * 2)
8656 + integer_onep (low_result))
8657 {
8658 case 0:
8659 /* Always false. */
8660 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8661 case 1:
8662 code = LT_EXPR;
8663 break;
8664 case 2:
8665 code = EQ_EXPR;
8666 break;
8667 case 3:
8668 code = LE_EXPR;
8669 break;
8670 case 4:
8671 code = GT_EXPR;
8672 break;
8673 case 5:
8674 code = NE_EXPR;
8675 break;
8676 case 6:
8677 code = GE_EXPR;
8678 break;
8679 case 7:
8680 /* Always true. */
8681 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8682 }
8683
8684 if (save_p)
8685 {
8686 tem = save_expr (build2 (code, type, cval1, cval2));
8687 SET_EXPR_LOCATION (tem, loc);
8688 return tem;
8689 }
8690 return fold_build2_loc (loc, code, type, cval1, cval2);
8691 }
8692 }
8693 }
8694
8695 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8696 into a single range test. */
8697 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8698 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8699 && TREE_CODE (arg1) == INTEGER_CST
8700 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8701 && !integer_zerop (TREE_OPERAND (arg0, 1))
8702 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8703 && !TREE_OVERFLOW (arg1))
8704 {
8705 tem = fold_div_compare (loc, code, type, arg0, arg1);
8706 if (tem != NULL_TREE)
8707 return tem;
8708 }
8709
8710 return NULL_TREE;
8711 }
8712
8713
8714 /* Subroutine of fold_binary. Optimize complex multiplications of the
8715 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8716 argument EXPR represents the expression "z" of type TYPE. */
8717
8718 static tree
8719 fold_mult_zconjz (location_t loc, tree type, tree expr)
8720 {
8721 tree itype = TREE_TYPE (type);
8722 tree rpart, ipart, tem;
8723
8724 if (TREE_CODE (expr) == COMPLEX_EXPR)
8725 {
8726 rpart = TREE_OPERAND (expr, 0);
8727 ipart = TREE_OPERAND (expr, 1);
8728 }
8729 else if (TREE_CODE (expr) == COMPLEX_CST)
8730 {
8731 rpart = TREE_REALPART (expr);
8732 ipart = TREE_IMAGPART (expr);
8733 }
8734 else
8735 {
8736 expr = save_expr (expr);
8737 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8738 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8739 }
8740
8741 rpart = save_expr (rpart);
8742 ipart = save_expr (ipart);
8743 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8744 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8745 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8746 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8747 build_zero_cst (itype));
8748 }
8749
8750
8751 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8752 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8753
8754 static bool
8755 vec_cst_ctor_to_array (tree arg, tree *elts)
8756 {
8757 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8758
8759 if (TREE_CODE (arg) == VECTOR_CST)
8760 {
8761 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8762 elts[i] = VECTOR_CST_ELT (arg, i);
8763 }
8764 else if (TREE_CODE (arg) == CONSTRUCTOR)
8765 {
8766 constructor_elt *elt;
8767
8768 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8769 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8770 return false;
8771 else
8772 elts[i] = elt->value;
8773 }
8774 else
8775 return false;
8776 for (; i < nelts; i++)
8777 elts[i]
8778 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8779 return true;
8780 }
8781
8782 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8783 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8784 NULL_TREE otherwise. */
8785
8786 static tree
8787 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8788 {
8789 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8790 tree *elts;
8791 bool need_ctor = false;
8792
8793 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8794 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8795 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8796 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8797 return NULL_TREE;
8798
8799 elts = XALLOCAVEC (tree, nelts * 3);
8800 if (!vec_cst_ctor_to_array (arg0, elts)
8801 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8802 return NULL_TREE;
8803
8804 for (i = 0; i < nelts; i++)
8805 {
8806 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8807 need_ctor = true;
8808 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8809 }
8810
8811 if (need_ctor)
8812 {
8813 vec<constructor_elt, va_gc> *v;
8814 vec_alloc (v, nelts);
8815 for (i = 0; i < nelts; i++)
8816 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8817 return build_constructor (type, v);
8818 }
8819 else
8820 return build_vector (type, &elts[2 * nelts]);
8821 }
8822
8823 /* Try to fold a pointer difference of type TYPE two address expressions of
8824 array references AREF0 and AREF1 using location LOC. Return a
8825 simplified expression for the difference or NULL_TREE. */
8826
8827 static tree
8828 fold_addr_of_array_ref_difference (location_t loc, tree type,
8829 tree aref0, tree aref1)
8830 {
8831 tree base0 = TREE_OPERAND (aref0, 0);
8832 tree base1 = TREE_OPERAND (aref1, 0);
8833 tree base_offset = build_int_cst (type, 0);
8834
8835 /* If the bases are array references as well, recurse. If the bases
8836 are pointer indirections compute the difference of the pointers.
8837 If the bases are equal, we are set. */
8838 if ((TREE_CODE (base0) == ARRAY_REF
8839 && TREE_CODE (base1) == ARRAY_REF
8840 && (base_offset
8841 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8842 || (INDIRECT_REF_P (base0)
8843 && INDIRECT_REF_P (base1)
8844 && (base_offset
8845 = fold_binary_loc (loc, MINUS_EXPR, type,
8846 fold_convert (type, TREE_OPERAND (base0, 0)),
8847 fold_convert (type,
8848 TREE_OPERAND (base1, 0)))))
8849 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8850 {
8851 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8852 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8853 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8854 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8855 return fold_build2_loc (loc, PLUS_EXPR, type,
8856 base_offset,
8857 fold_build2_loc (loc, MULT_EXPR, type,
8858 diff, esz));
8859 }
8860 return NULL_TREE;
8861 }
8862
8863 /* If the real or vector real constant CST of type TYPE has an exact
8864 inverse, return it, else return NULL. */
8865
8866 tree
8867 exact_inverse (tree type, tree cst)
8868 {
8869 REAL_VALUE_TYPE r;
8870 tree unit_type, *elts;
8871 machine_mode mode;
8872 unsigned vec_nelts, i;
8873
8874 switch (TREE_CODE (cst))
8875 {
8876 case REAL_CST:
8877 r = TREE_REAL_CST (cst);
8878
8879 if (exact_real_inverse (TYPE_MODE (type), &r))
8880 return build_real (type, r);
8881
8882 return NULL_TREE;
8883
8884 case VECTOR_CST:
8885 vec_nelts = VECTOR_CST_NELTS (cst);
8886 elts = XALLOCAVEC (tree, vec_nelts);
8887 unit_type = TREE_TYPE (type);
8888 mode = TYPE_MODE (unit_type);
8889
8890 for (i = 0; i < vec_nelts; i++)
8891 {
8892 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8893 if (!exact_real_inverse (mode, &r))
8894 return NULL_TREE;
8895 elts[i] = build_real (unit_type, r);
8896 }
8897
8898 return build_vector (type, elts);
8899
8900 default:
8901 return NULL_TREE;
8902 }
8903 }
8904
8905 /* Mask out the tz least significant bits of X of type TYPE where
8906 tz is the number of trailing zeroes in Y. */
8907 static wide_int
8908 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8909 {
8910 int tz = wi::ctz (y);
8911 if (tz > 0)
8912 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8913 return x;
8914 }
8915
8916 /* Return true when T is an address and is known to be nonzero.
8917 For floating point we further ensure that T is not denormal.
8918 Similar logic is present in nonzero_address in rtlanal.h.
8919
8920 If the return value is based on the assumption that signed overflow
8921 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8922 change *STRICT_OVERFLOW_P. */
8923
8924 static bool
8925 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8926 {
8927 tree type = TREE_TYPE (t);
8928 enum tree_code code;
8929
8930 /* Doing something useful for floating point would need more work. */
8931 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8932 return false;
8933
8934 code = TREE_CODE (t);
8935 switch (TREE_CODE_CLASS (code))
8936 {
8937 case tcc_unary:
8938 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8939 strict_overflow_p);
8940 case tcc_binary:
8941 case tcc_comparison:
8942 return tree_binary_nonzero_warnv_p (code, type,
8943 TREE_OPERAND (t, 0),
8944 TREE_OPERAND (t, 1),
8945 strict_overflow_p);
8946 case tcc_constant:
8947 case tcc_declaration:
8948 case tcc_reference:
8949 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8950
8951 default:
8952 break;
8953 }
8954
8955 switch (code)
8956 {
8957 case TRUTH_NOT_EXPR:
8958 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8959 strict_overflow_p);
8960
8961 case TRUTH_AND_EXPR:
8962 case TRUTH_OR_EXPR:
8963 case TRUTH_XOR_EXPR:
8964 return tree_binary_nonzero_warnv_p (code, type,
8965 TREE_OPERAND (t, 0),
8966 TREE_OPERAND (t, 1),
8967 strict_overflow_p);
8968
8969 case COND_EXPR:
8970 case CONSTRUCTOR:
8971 case OBJ_TYPE_REF:
8972 case ASSERT_EXPR:
8973 case ADDR_EXPR:
8974 case WITH_SIZE_EXPR:
8975 case SSA_NAME:
8976 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8977
8978 case COMPOUND_EXPR:
8979 case MODIFY_EXPR:
8980 case BIND_EXPR:
8981 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
8982 strict_overflow_p);
8983
8984 case SAVE_EXPR:
8985 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
8986 strict_overflow_p);
8987
8988 case CALL_EXPR:
8989 {
8990 tree fndecl = get_callee_fndecl (t);
8991 if (!fndecl) return false;
8992 if (flag_delete_null_pointer_checks && !flag_check_new
8993 && DECL_IS_OPERATOR_NEW (fndecl)
8994 && !TREE_NOTHROW (fndecl))
8995 return true;
8996 if (flag_delete_null_pointer_checks
8997 && lookup_attribute ("returns_nonnull",
8998 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
8999 return true;
9000 return alloca_call_p (t);
9001 }
9002
9003 default:
9004 break;
9005 }
9006 return false;
9007 }
9008
9009 /* Return true when T is an address and is known to be nonzero.
9010 Handle warnings about undefined signed overflow. */
9011
9012 static bool
9013 tree_expr_nonzero_p (tree t)
9014 {
9015 bool ret, strict_overflow_p;
9016
9017 strict_overflow_p = false;
9018 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9019 if (strict_overflow_p)
9020 fold_overflow_warning (("assuming signed overflow does not occur when "
9021 "determining that expression is always "
9022 "non-zero"),
9023 WARN_STRICT_OVERFLOW_MISC);
9024 return ret;
9025 }
9026
9027 /* Fold a binary expression of code CODE and type TYPE with operands
9028 OP0 and OP1. LOC is the location of the resulting expression.
9029 Return the folded expression if folding is successful. Otherwise,
9030 return NULL_TREE. */
9031
9032 tree
9033 fold_binary_loc (location_t loc,
9034 enum tree_code code, tree type, tree op0, tree op1)
9035 {
9036 enum tree_code_class kind = TREE_CODE_CLASS (code);
9037 tree arg0, arg1, tem;
9038 tree t1 = NULL_TREE;
9039 bool strict_overflow_p;
9040 unsigned int prec;
9041
9042 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9043 && TREE_CODE_LENGTH (code) == 2
9044 && op0 != NULL_TREE
9045 && op1 != NULL_TREE);
9046
9047 arg0 = op0;
9048 arg1 = op1;
9049
9050 /* Strip any conversions that don't change the mode. This is
9051 safe for every expression, except for a comparison expression
9052 because its signedness is derived from its operands. So, in
9053 the latter case, only strip conversions that don't change the
9054 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9055 preserved.
9056
9057 Note that this is done as an internal manipulation within the
9058 constant folder, in order to find the simplest representation
9059 of the arguments so that their form can be studied. In any
9060 cases, the appropriate type conversions should be put back in
9061 the tree that will get out of the constant folder. */
9062
9063 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9064 {
9065 STRIP_SIGN_NOPS (arg0);
9066 STRIP_SIGN_NOPS (arg1);
9067 }
9068 else
9069 {
9070 STRIP_NOPS (arg0);
9071 STRIP_NOPS (arg1);
9072 }
9073
9074 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9075 constant but we can't do arithmetic on them. */
9076 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9077 {
9078 tem = const_binop (code, type, arg0, arg1);
9079 if (tem != NULL_TREE)
9080 {
9081 if (TREE_TYPE (tem) != type)
9082 tem = fold_convert_loc (loc, type, tem);
9083 return tem;
9084 }
9085 }
9086
9087 /* If this is a commutative operation, and ARG0 is a constant, move it
9088 to ARG1 to reduce the number of tests below. */
9089 if (commutative_tree_code (code)
9090 && tree_swap_operands_p (arg0, arg1, true))
9091 return fold_build2_loc (loc, code, type, op1, op0);
9092
9093 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9094 to ARG1 to reduce the number of tests below. */
9095 if (kind == tcc_comparison
9096 && tree_swap_operands_p (arg0, arg1, true))
9097 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9098
9099 tem = generic_simplify (loc, code, type, op0, op1);
9100 if (tem)
9101 return tem;
9102
9103 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9104
9105 First check for cases where an arithmetic operation is applied to a
9106 compound, conditional, or comparison operation. Push the arithmetic
9107 operation inside the compound or conditional to see if any folding
9108 can then be done. Convert comparison to conditional for this purpose.
9109 The also optimizes non-constant cases that used to be done in
9110 expand_expr.
9111
9112 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9113 one of the operands is a comparison and the other is a comparison, a
9114 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9115 code below would make the expression more complex. Change it to a
9116 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9117 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9118
9119 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9120 || code == EQ_EXPR || code == NE_EXPR)
9121 && TREE_CODE (type) != VECTOR_TYPE
9122 && ((truth_value_p (TREE_CODE (arg0))
9123 && (truth_value_p (TREE_CODE (arg1))
9124 || (TREE_CODE (arg1) == BIT_AND_EXPR
9125 && integer_onep (TREE_OPERAND (arg1, 1)))))
9126 || (truth_value_p (TREE_CODE (arg1))
9127 && (truth_value_p (TREE_CODE (arg0))
9128 || (TREE_CODE (arg0) == BIT_AND_EXPR
9129 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9130 {
9131 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9132 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9133 : TRUTH_XOR_EXPR,
9134 boolean_type_node,
9135 fold_convert_loc (loc, boolean_type_node, arg0),
9136 fold_convert_loc (loc, boolean_type_node, arg1));
9137
9138 if (code == EQ_EXPR)
9139 tem = invert_truthvalue_loc (loc, tem);
9140
9141 return fold_convert_loc (loc, type, tem);
9142 }
9143
9144 if (TREE_CODE_CLASS (code) == tcc_binary
9145 || TREE_CODE_CLASS (code) == tcc_comparison)
9146 {
9147 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9148 {
9149 tem = fold_build2_loc (loc, code, type,
9150 fold_convert_loc (loc, TREE_TYPE (op0),
9151 TREE_OPERAND (arg0, 1)), op1);
9152 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9153 tem);
9154 }
9155 if (TREE_CODE (arg1) == COMPOUND_EXPR
9156 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9157 {
9158 tem = fold_build2_loc (loc, code, type, op0,
9159 fold_convert_loc (loc, TREE_TYPE (op1),
9160 TREE_OPERAND (arg1, 1)));
9161 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9162 tem);
9163 }
9164
9165 if (TREE_CODE (arg0) == COND_EXPR
9166 || TREE_CODE (arg0) == VEC_COND_EXPR
9167 || COMPARISON_CLASS_P (arg0))
9168 {
9169 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9170 arg0, arg1,
9171 /*cond_first_p=*/1);
9172 if (tem != NULL_TREE)
9173 return tem;
9174 }
9175
9176 if (TREE_CODE (arg1) == COND_EXPR
9177 || TREE_CODE (arg1) == VEC_COND_EXPR
9178 || COMPARISON_CLASS_P (arg1))
9179 {
9180 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9181 arg1, arg0,
9182 /*cond_first_p=*/0);
9183 if (tem != NULL_TREE)
9184 return tem;
9185 }
9186 }
9187
9188 switch (code)
9189 {
9190 case MEM_REF:
9191 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9192 if (TREE_CODE (arg0) == ADDR_EXPR
9193 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9194 {
9195 tree iref = TREE_OPERAND (arg0, 0);
9196 return fold_build2 (MEM_REF, type,
9197 TREE_OPERAND (iref, 0),
9198 int_const_binop (PLUS_EXPR, arg1,
9199 TREE_OPERAND (iref, 1)));
9200 }
9201
9202 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9203 if (TREE_CODE (arg0) == ADDR_EXPR
9204 && handled_component_p (TREE_OPERAND (arg0, 0)))
9205 {
9206 tree base;
9207 HOST_WIDE_INT coffset;
9208 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9209 &coffset);
9210 if (!base)
9211 return NULL_TREE;
9212 return fold_build2 (MEM_REF, type,
9213 build_fold_addr_expr (base),
9214 int_const_binop (PLUS_EXPR, arg1,
9215 size_int (coffset)));
9216 }
9217
9218 return NULL_TREE;
9219
9220 case POINTER_PLUS_EXPR:
9221 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9222 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9223 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9224 return fold_convert_loc (loc, type,
9225 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9226 fold_convert_loc (loc, sizetype,
9227 arg1),
9228 fold_convert_loc (loc, sizetype,
9229 arg0)));
9230
9231 return NULL_TREE;
9232
9233 case PLUS_EXPR:
9234 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9235 {
9236 /* X + (X / CST) * -CST is X % CST. */
9237 if (TREE_CODE (arg1) == MULT_EXPR
9238 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9239 && operand_equal_p (arg0,
9240 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9241 {
9242 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9243 tree cst1 = TREE_OPERAND (arg1, 1);
9244 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9245 cst1, cst0);
9246 if (sum && integer_zerop (sum))
9247 return fold_convert_loc (loc, type,
9248 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9249 TREE_TYPE (arg0), arg0,
9250 cst0));
9251 }
9252 }
9253
9254 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9255 one. Make sure the type is not saturating and has the signedness of
9256 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9257 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9258 if ((TREE_CODE (arg0) == MULT_EXPR
9259 || TREE_CODE (arg1) == MULT_EXPR)
9260 && !TYPE_SATURATING (type)
9261 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9262 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9263 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9264 {
9265 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9266 if (tem)
9267 return tem;
9268 }
9269
9270 if (! FLOAT_TYPE_P (type))
9271 {
9272 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9273 (plus (plus (mult) (mult)) (foo)) so that we can
9274 take advantage of the factoring cases below. */
9275 if (ANY_INTEGRAL_TYPE_P (type)
9276 && TYPE_OVERFLOW_WRAPS (type)
9277 && (((TREE_CODE (arg0) == PLUS_EXPR
9278 || TREE_CODE (arg0) == MINUS_EXPR)
9279 && TREE_CODE (arg1) == MULT_EXPR)
9280 || ((TREE_CODE (arg1) == PLUS_EXPR
9281 || TREE_CODE (arg1) == MINUS_EXPR)
9282 && TREE_CODE (arg0) == MULT_EXPR)))
9283 {
9284 tree parg0, parg1, parg, marg;
9285 enum tree_code pcode;
9286
9287 if (TREE_CODE (arg1) == MULT_EXPR)
9288 parg = arg0, marg = arg1;
9289 else
9290 parg = arg1, marg = arg0;
9291 pcode = TREE_CODE (parg);
9292 parg0 = TREE_OPERAND (parg, 0);
9293 parg1 = TREE_OPERAND (parg, 1);
9294 STRIP_NOPS (parg0);
9295 STRIP_NOPS (parg1);
9296
9297 if (TREE_CODE (parg0) == MULT_EXPR
9298 && TREE_CODE (parg1) != MULT_EXPR)
9299 return fold_build2_loc (loc, pcode, type,
9300 fold_build2_loc (loc, PLUS_EXPR, type,
9301 fold_convert_loc (loc, type,
9302 parg0),
9303 fold_convert_loc (loc, type,
9304 marg)),
9305 fold_convert_loc (loc, type, parg1));
9306 if (TREE_CODE (parg0) != MULT_EXPR
9307 && TREE_CODE (parg1) == MULT_EXPR)
9308 return
9309 fold_build2_loc (loc, PLUS_EXPR, type,
9310 fold_convert_loc (loc, type, parg0),
9311 fold_build2_loc (loc, pcode, type,
9312 fold_convert_loc (loc, type, marg),
9313 fold_convert_loc (loc, type,
9314 parg1)));
9315 }
9316 }
9317 else
9318 {
9319 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9320 to __complex__ ( x, y ). This is not the same for SNaNs or
9321 if signed zeros are involved. */
9322 if (!HONOR_SNANS (element_mode (arg0))
9323 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9324 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9325 {
9326 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9327 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9328 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9329 bool arg0rz = false, arg0iz = false;
9330 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9331 || (arg0i && (arg0iz = real_zerop (arg0i))))
9332 {
9333 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9334 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9335 if (arg0rz && arg1i && real_zerop (arg1i))
9336 {
9337 tree rp = arg1r ? arg1r
9338 : build1 (REALPART_EXPR, rtype, arg1);
9339 tree ip = arg0i ? arg0i
9340 : build1 (IMAGPART_EXPR, rtype, arg0);
9341 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9342 }
9343 else if (arg0iz && arg1r && real_zerop (arg1r))
9344 {
9345 tree rp = arg0r ? arg0r
9346 : build1 (REALPART_EXPR, rtype, arg0);
9347 tree ip = arg1i ? arg1i
9348 : build1 (IMAGPART_EXPR, rtype, arg1);
9349 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9350 }
9351 }
9352 }
9353
9354 if (flag_unsafe_math_optimizations
9355 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9356 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9357 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9358 return tem;
9359
9360 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9361 We associate floats only if the user has specified
9362 -fassociative-math. */
9363 if (flag_associative_math
9364 && TREE_CODE (arg1) == PLUS_EXPR
9365 && TREE_CODE (arg0) != MULT_EXPR)
9366 {
9367 tree tree10 = TREE_OPERAND (arg1, 0);
9368 tree tree11 = TREE_OPERAND (arg1, 1);
9369 if (TREE_CODE (tree11) == MULT_EXPR
9370 && TREE_CODE (tree10) == MULT_EXPR)
9371 {
9372 tree tree0;
9373 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9374 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9375 }
9376 }
9377 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9378 We associate floats only if the user has specified
9379 -fassociative-math. */
9380 if (flag_associative_math
9381 && TREE_CODE (arg0) == PLUS_EXPR
9382 && TREE_CODE (arg1) != MULT_EXPR)
9383 {
9384 tree tree00 = TREE_OPERAND (arg0, 0);
9385 tree tree01 = TREE_OPERAND (arg0, 1);
9386 if (TREE_CODE (tree01) == MULT_EXPR
9387 && TREE_CODE (tree00) == MULT_EXPR)
9388 {
9389 tree tree0;
9390 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9391 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9392 }
9393 }
9394 }
9395
9396 bit_rotate:
9397 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9398 is a rotate of A by C1 bits. */
9399 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9400 is a rotate of A by B bits. */
9401 {
9402 enum tree_code code0, code1;
9403 tree rtype;
9404 code0 = TREE_CODE (arg0);
9405 code1 = TREE_CODE (arg1);
9406 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9407 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9408 && operand_equal_p (TREE_OPERAND (arg0, 0),
9409 TREE_OPERAND (arg1, 0), 0)
9410 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9411 TYPE_UNSIGNED (rtype))
9412 /* Only create rotates in complete modes. Other cases are not
9413 expanded properly. */
9414 && (element_precision (rtype)
9415 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9416 {
9417 tree tree01, tree11;
9418 enum tree_code code01, code11;
9419
9420 tree01 = TREE_OPERAND (arg0, 1);
9421 tree11 = TREE_OPERAND (arg1, 1);
9422 STRIP_NOPS (tree01);
9423 STRIP_NOPS (tree11);
9424 code01 = TREE_CODE (tree01);
9425 code11 = TREE_CODE (tree11);
9426 if (code01 == INTEGER_CST
9427 && code11 == INTEGER_CST
9428 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9429 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9430 {
9431 tem = build2_loc (loc, LROTATE_EXPR,
9432 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9433 TREE_OPERAND (arg0, 0),
9434 code0 == LSHIFT_EXPR
9435 ? TREE_OPERAND (arg0, 1)
9436 : TREE_OPERAND (arg1, 1));
9437 return fold_convert_loc (loc, type, tem);
9438 }
9439 else if (code11 == MINUS_EXPR)
9440 {
9441 tree tree110, tree111;
9442 tree110 = TREE_OPERAND (tree11, 0);
9443 tree111 = TREE_OPERAND (tree11, 1);
9444 STRIP_NOPS (tree110);
9445 STRIP_NOPS (tree111);
9446 if (TREE_CODE (tree110) == INTEGER_CST
9447 && 0 == compare_tree_int (tree110,
9448 element_precision
9449 (TREE_TYPE (TREE_OPERAND
9450 (arg0, 0))))
9451 && operand_equal_p (tree01, tree111, 0))
9452 return
9453 fold_convert_loc (loc, type,
9454 build2 ((code0 == LSHIFT_EXPR
9455 ? LROTATE_EXPR
9456 : RROTATE_EXPR),
9457 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9458 TREE_OPERAND (arg0, 0),
9459 TREE_OPERAND (arg0, 1)));
9460 }
9461 else if (code01 == MINUS_EXPR)
9462 {
9463 tree tree010, tree011;
9464 tree010 = TREE_OPERAND (tree01, 0);
9465 tree011 = TREE_OPERAND (tree01, 1);
9466 STRIP_NOPS (tree010);
9467 STRIP_NOPS (tree011);
9468 if (TREE_CODE (tree010) == INTEGER_CST
9469 && 0 == compare_tree_int (tree010,
9470 element_precision
9471 (TREE_TYPE (TREE_OPERAND
9472 (arg0, 0))))
9473 && operand_equal_p (tree11, tree011, 0))
9474 return fold_convert_loc
9475 (loc, type,
9476 build2 ((code0 != LSHIFT_EXPR
9477 ? LROTATE_EXPR
9478 : RROTATE_EXPR),
9479 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9480 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9481 }
9482 }
9483 }
9484
9485 associate:
9486 /* In most languages, can't associate operations on floats through
9487 parentheses. Rather than remember where the parentheses were, we
9488 don't associate floats at all, unless the user has specified
9489 -fassociative-math.
9490 And, we need to make sure type is not saturating. */
9491
9492 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9493 && !TYPE_SATURATING (type))
9494 {
9495 tree var0, con0, lit0, minus_lit0;
9496 tree var1, con1, lit1, minus_lit1;
9497 tree atype = type;
9498 bool ok = true;
9499
9500 /* Split both trees into variables, constants, and literals. Then
9501 associate each group together, the constants with literals,
9502 then the result with variables. This increases the chances of
9503 literals being recombined later and of generating relocatable
9504 expressions for the sum of a constant and literal. */
9505 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9506 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9507 code == MINUS_EXPR);
9508
9509 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9510 if (code == MINUS_EXPR)
9511 code = PLUS_EXPR;
9512
9513 /* With undefined overflow prefer doing association in a type
9514 which wraps on overflow, if that is one of the operand types. */
9515 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9516 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9517 {
9518 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9519 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9520 atype = TREE_TYPE (arg0);
9521 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9522 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9523 atype = TREE_TYPE (arg1);
9524 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9525 }
9526
9527 /* With undefined overflow we can only associate constants with one
9528 variable, and constants whose association doesn't overflow. */
9529 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9530 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9531 {
9532 if (var0 && var1)
9533 {
9534 tree tmp0 = var0;
9535 tree tmp1 = var1;
9536 bool one_neg = false;
9537
9538 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9539 {
9540 tmp0 = TREE_OPERAND (tmp0, 0);
9541 one_neg = !one_neg;
9542 }
9543 if (CONVERT_EXPR_P (tmp0)
9544 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9545 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9546 <= TYPE_PRECISION (atype)))
9547 tmp0 = TREE_OPERAND (tmp0, 0);
9548 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9549 {
9550 tmp1 = TREE_OPERAND (tmp1, 0);
9551 one_neg = !one_neg;
9552 }
9553 if (CONVERT_EXPR_P (tmp1)
9554 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9555 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9556 <= TYPE_PRECISION (atype)))
9557 tmp1 = TREE_OPERAND (tmp1, 0);
9558 /* The only case we can still associate with two variables
9559 is if they cancel out. */
9560 if (!one_neg
9561 || !operand_equal_p (tmp0, tmp1, 0))
9562 ok = false;
9563 }
9564 }
9565
9566 /* Only do something if we found more than two objects. Otherwise,
9567 nothing has changed and we risk infinite recursion. */
9568 if (ok
9569 && (2 < ((var0 != 0) + (var1 != 0)
9570 + (con0 != 0) + (con1 != 0)
9571 + (lit0 != 0) + (lit1 != 0)
9572 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9573 {
9574 bool any_overflows = false;
9575 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9576 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9577 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9578 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9579 var0 = associate_trees (loc, var0, var1, code, atype);
9580 con0 = associate_trees (loc, con0, con1, code, atype);
9581 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9582 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9583 code, atype);
9584
9585 /* Preserve the MINUS_EXPR if the negative part of the literal is
9586 greater than the positive part. Otherwise, the multiplicative
9587 folding code (i.e extract_muldiv) may be fooled in case
9588 unsigned constants are subtracted, like in the following
9589 example: ((X*2 + 4) - 8U)/2. */
9590 if (minus_lit0 && lit0)
9591 {
9592 if (TREE_CODE (lit0) == INTEGER_CST
9593 && TREE_CODE (minus_lit0) == INTEGER_CST
9594 && tree_int_cst_lt (lit0, minus_lit0))
9595 {
9596 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9597 MINUS_EXPR, atype);
9598 lit0 = 0;
9599 }
9600 else
9601 {
9602 lit0 = associate_trees (loc, lit0, minus_lit0,
9603 MINUS_EXPR, atype);
9604 minus_lit0 = 0;
9605 }
9606 }
9607
9608 /* Don't introduce overflows through reassociation. */
9609 if (!any_overflows
9610 && ((lit0 && TREE_OVERFLOW_P (lit0))
9611 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9612 return NULL_TREE;
9613
9614 if (minus_lit0)
9615 {
9616 if (con0 == 0)
9617 return
9618 fold_convert_loc (loc, type,
9619 associate_trees (loc, var0, minus_lit0,
9620 MINUS_EXPR, atype));
9621 else
9622 {
9623 con0 = associate_trees (loc, con0, minus_lit0,
9624 MINUS_EXPR, atype);
9625 return
9626 fold_convert_loc (loc, type,
9627 associate_trees (loc, var0, con0,
9628 PLUS_EXPR, atype));
9629 }
9630 }
9631
9632 con0 = associate_trees (loc, con0, lit0, code, atype);
9633 return
9634 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9635 code, atype));
9636 }
9637 }
9638
9639 return NULL_TREE;
9640
9641 case MINUS_EXPR:
9642 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9643 if (TREE_CODE (arg0) == NEGATE_EXPR
9644 && negate_expr_p (arg1)
9645 && reorder_operands_p (arg0, arg1))
9646 return fold_build2_loc (loc, MINUS_EXPR, type,
9647 fold_convert_loc (loc, type,
9648 negate_expr (arg1)),
9649 fold_convert_loc (loc, type,
9650 TREE_OPERAND (arg0, 0)));
9651
9652 if (! FLOAT_TYPE_P (type))
9653 {
9654 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9655 any power of 2 minus 1. */
9656 if (TREE_CODE (arg0) == BIT_AND_EXPR
9657 && TREE_CODE (arg1) == BIT_AND_EXPR
9658 && operand_equal_p (TREE_OPERAND (arg0, 0),
9659 TREE_OPERAND (arg1, 0), 0))
9660 {
9661 tree mask0 = TREE_OPERAND (arg0, 1);
9662 tree mask1 = TREE_OPERAND (arg1, 1);
9663 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
9664
9665 if (operand_equal_p (tem, mask1, 0))
9666 {
9667 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
9668 TREE_OPERAND (arg0, 0), mask1);
9669 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
9670 }
9671 }
9672 }
9673
9674 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9675 __complex__ ( x, -y ). This is not the same for SNaNs or if
9676 signed zeros are involved. */
9677 if (!HONOR_SNANS (element_mode (arg0))
9678 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9679 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9680 {
9681 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9682 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9683 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9684 bool arg0rz = false, arg0iz = false;
9685 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9686 || (arg0i && (arg0iz = real_zerop (arg0i))))
9687 {
9688 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9689 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9690 if (arg0rz && arg1i && real_zerop (arg1i))
9691 {
9692 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9693 arg1r ? arg1r
9694 : build1 (REALPART_EXPR, rtype, arg1));
9695 tree ip = arg0i ? arg0i
9696 : build1 (IMAGPART_EXPR, rtype, arg0);
9697 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9698 }
9699 else if (arg0iz && arg1r && real_zerop (arg1r))
9700 {
9701 tree rp = arg0r ? arg0r
9702 : build1 (REALPART_EXPR, rtype, arg0);
9703 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9704 arg1i ? arg1i
9705 : build1 (IMAGPART_EXPR, rtype, arg1));
9706 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9707 }
9708 }
9709 }
9710
9711 /* A - B -> A + (-B) if B is easily negatable. */
9712 if (negate_expr_p (arg1)
9713 && !TYPE_OVERFLOW_SANITIZED (type)
9714 && ((FLOAT_TYPE_P (type)
9715 /* Avoid this transformation if B is a positive REAL_CST. */
9716 && (TREE_CODE (arg1) != REAL_CST
9717 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9718 || INTEGRAL_TYPE_P (type)))
9719 return fold_build2_loc (loc, PLUS_EXPR, type,
9720 fold_convert_loc (loc, type, arg0),
9721 fold_convert_loc (loc, type,
9722 negate_expr (arg1)));
9723
9724 /* Fold &a[i] - &a[j] to i-j. */
9725 if (TREE_CODE (arg0) == ADDR_EXPR
9726 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9727 && TREE_CODE (arg1) == ADDR_EXPR
9728 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9729 {
9730 tree tem = fold_addr_of_array_ref_difference (loc, type,
9731 TREE_OPERAND (arg0, 0),
9732 TREE_OPERAND (arg1, 0));
9733 if (tem)
9734 return tem;
9735 }
9736
9737 if (FLOAT_TYPE_P (type)
9738 && flag_unsafe_math_optimizations
9739 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9740 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9741 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9742 return tem;
9743
9744 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9745 one. Make sure the type is not saturating and has the signedness of
9746 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9747 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9748 if ((TREE_CODE (arg0) == MULT_EXPR
9749 || TREE_CODE (arg1) == MULT_EXPR)
9750 && !TYPE_SATURATING (type)
9751 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9752 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9753 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9754 {
9755 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9756 if (tem)
9757 return tem;
9758 }
9759
9760 goto associate;
9761
9762 case MULT_EXPR:
9763 if (! FLOAT_TYPE_P (type))
9764 {
9765 /* Transform x * -C into -x * C if x is easily negatable. */
9766 if (TREE_CODE (arg1) == INTEGER_CST
9767 && tree_int_cst_sgn (arg1) == -1
9768 && negate_expr_p (arg0)
9769 && (tem = negate_expr (arg1)) != arg1
9770 && !TREE_OVERFLOW (tem))
9771 return fold_build2_loc (loc, MULT_EXPR, type,
9772 fold_convert_loc (loc, type,
9773 negate_expr (arg0)),
9774 tem);
9775
9776 /* (A + A) * C -> A * 2 * C */
9777 if (TREE_CODE (arg0) == PLUS_EXPR
9778 && TREE_CODE (arg1) == INTEGER_CST
9779 && operand_equal_p (TREE_OPERAND (arg0, 0),
9780 TREE_OPERAND (arg0, 1), 0))
9781 return fold_build2_loc (loc, MULT_EXPR, type,
9782 omit_one_operand_loc (loc, type,
9783 TREE_OPERAND (arg0, 0),
9784 TREE_OPERAND (arg0, 1)),
9785 fold_build2_loc (loc, MULT_EXPR, type,
9786 build_int_cst (type, 2) , arg1));
9787
9788 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9789 sign-changing only. */
9790 if (TREE_CODE (arg1) == INTEGER_CST
9791 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9792 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9793 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9794
9795 strict_overflow_p = false;
9796 if (TREE_CODE (arg1) == INTEGER_CST
9797 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9798 &strict_overflow_p)))
9799 {
9800 if (strict_overflow_p)
9801 fold_overflow_warning (("assuming signed overflow does not "
9802 "occur when simplifying "
9803 "multiplication"),
9804 WARN_STRICT_OVERFLOW_MISC);
9805 return fold_convert_loc (loc, type, tem);
9806 }
9807
9808 /* Optimize z * conj(z) for integer complex numbers. */
9809 if (TREE_CODE (arg0) == CONJ_EXPR
9810 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9811 return fold_mult_zconjz (loc, type, arg1);
9812 if (TREE_CODE (arg1) == CONJ_EXPR
9813 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9814 return fold_mult_zconjz (loc, type, arg0);
9815 }
9816 else
9817 {
9818 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9819 This is not the same for NaNs or if signed zeros are
9820 involved. */
9821 if (!HONOR_NANS (arg0)
9822 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9823 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9824 && TREE_CODE (arg1) == COMPLEX_CST
9825 && real_zerop (TREE_REALPART (arg1)))
9826 {
9827 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9828 if (real_onep (TREE_IMAGPART (arg1)))
9829 return
9830 fold_build2_loc (loc, COMPLEX_EXPR, type,
9831 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9832 rtype, arg0)),
9833 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9834 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9835 return
9836 fold_build2_loc (loc, COMPLEX_EXPR, type,
9837 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9838 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9839 rtype, arg0)));
9840 }
9841
9842 /* Optimize z * conj(z) for floating point complex numbers.
9843 Guarded by flag_unsafe_math_optimizations as non-finite
9844 imaginary components don't produce scalar results. */
9845 if (flag_unsafe_math_optimizations
9846 && TREE_CODE (arg0) == CONJ_EXPR
9847 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9848 return fold_mult_zconjz (loc, type, arg1);
9849 if (flag_unsafe_math_optimizations
9850 && TREE_CODE (arg1) == CONJ_EXPR
9851 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9852 return fold_mult_zconjz (loc, type, arg0);
9853
9854 if (flag_unsafe_math_optimizations)
9855 {
9856
9857 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9858 if (!in_gimple_form
9859 && optimize
9860 && operand_equal_p (arg0, arg1, 0))
9861 {
9862 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9863
9864 if (powfn)
9865 {
9866 tree arg = build_real (type, dconst2);
9867 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9868 }
9869 }
9870 }
9871 }
9872 goto associate;
9873
9874 case BIT_IOR_EXPR:
9875 /* Canonicalize (X & C1) | C2. */
9876 if (TREE_CODE (arg0) == BIT_AND_EXPR
9877 && TREE_CODE (arg1) == INTEGER_CST
9878 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9879 {
9880 int width = TYPE_PRECISION (type), w;
9881 wide_int c1 = TREE_OPERAND (arg0, 1);
9882 wide_int c2 = arg1;
9883
9884 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9885 if ((c1 & c2) == c1)
9886 return omit_one_operand_loc (loc, type, arg1,
9887 TREE_OPERAND (arg0, 0));
9888
9889 wide_int msk = wi::mask (width, false,
9890 TYPE_PRECISION (TREE_TYPE (arg1)));
9891
9892 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9893 if (msk.and_not (c1 | c2) == 0)
9894 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9895 TREE_OPERAND (arg0, 0), arg1);
9896
9897 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9898 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9899 mode which allows further optimizations. */
9900 c1 &= msk;
9901 c2 &= msk;
9902 wide_int c3 = c1.and_not (c2);
9903 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9904 {
9905 wide_int mask = wi::mask (w, false,
9906 TYPE_PRECISION (type));
9907 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9908 {
9909 c3 = mask;
9910 break;
9911 }
9912 }
9913
9914 if (c3 != c1)
9915 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9916 fold_build2_loc (loc, BIT_AND_EXPR, type,
9917 TREE_OPERAND (arg0, 0),
9918 wide_int_to_tree (type,
9919 c3)),
9920 arg1);
9921 }
9922
9923 /* (X & ~Y) | (~X & Y) is X ^ Y */
9924 if (TREE_CODE (arg0) == BIT_AND_EXPR
9925 && TREE_CODE (arg1) == BIT_AND_EXPR)
9926 {
9927 tree a0, a1, l0, l1, n0, n1;
9928
9929 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9930 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9931
9932 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9933 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9934
9935 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
9936 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
9937
9938 if ((operand_equal_p (n0, a0, 0)
9939 && operand_equal_p (n1, a1, 0))
9940 || (operand_equal_p (n0, a1, 0)
9941 && operand_equal_p (n1, a0, 0)))
9942 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
9943 }
9944
9945 /* See if this can be simplified into a rotate first. If that
9946 is unsuccessful continue in the association code. */
9947 goto bit_rotate;
9948
9949 case BIT_XOR_EXPR:
9950 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9951 if (TREE_CODE (arg0) == BIT_AND_EXPR
9952 && INTEGRAL_TYPE_P (type)
9953 && integer_onep (TREE_OPERAND (arg0, 1))
9954 && integer_onep (arg1))
9955 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9956 build_zero_cst (TREE_TYPE (arg0)));
9957
9958 /* See if this can be simplified into a rotate first. If that
9959 is unsuccessful continue in the association code. */
9960 goto bit_rotate;
9961
9962 case BIT_AND_EXPR:
9963 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9964 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9965 && INTEGRAL_TYPE_P (type)
9966 && integer_onep (TREE_OPERAND (arg0, 1))
9967 && integer_onep (arg1))
9968 {
9969 tree tem2;
9970 tem = TREE_OPERAND (arg0, 0);
9971 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9972 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9973 tem, tem2);
9974 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9975 build_zero_cst (TREE_TYPE (tem)));
9976 }
9977 /* Fold ~X & 1 as (X & 1) == 0. */
9978 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9979 && INTEGRAL_TYPE_P (type)
9980 && integer_onep (arg1))
9981 {
9982 tree tem2;
9983 tem = TREE_OPERAND (arg0, 0);
9984 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9985 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9986 tem, tem2);
9987 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9988 build_zero_cst (TREE_TYPE (tem)));
9989 }
9990 /* Fold !X & 1 as X == 0. */
9991 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9992 && integer_onep (arg1))
9993 {
9994 tem = TREE_OPERAND (arg0, 0);
9995 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9996 build_zero_cst (TREE_TYPE (tem)));
9997 }
9998
9999 /* Fold (X ^ Y) & Y as ~X & Y. */
10000 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10001 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10002 {
10003 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10004 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10005 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10006 fold_convert_loc (loc, type, arg1));
10007 }
10008 /* Fold (X ^ Y) & X as ~Y & X. */
10009 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10010 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10011 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10012 {
10013 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10014 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10015 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10016 fold_convert_loc (loc, type, arg1));
10017 }
10018 /* Fold X & (X ^ Y) as X & ~Y. */
10019 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10020 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10021 {
10022 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10023 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10024 fold_convert_loc (loc, type, arg0),
10025 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10026 }
10027 /* Fold X & (Y ^ X) as ~Y & X. */
10028 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10029 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10030 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10031 {
10032 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10033 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10034 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10035 fold_convert_loc (loc, type, arg0));
10036 }
10037
10038 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10039 multiple of 1 << CST. */
10040 if (TREE_CODE (arg1) == INTEGER_CST)
10041 {
10042 wide_int cst1 = arg1;
10043 wide_int ncst1 = -cst1;
10044 if ((cst1 & ncst1) == ncst1
10045 && multiple_of_p (type, arg0,
10046 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10047 return fold_convert_loc (loc, type, arg0);
10048 }
10049
10050 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10051 bits from CST2. */
10052 if (TREE_CODE (arg1) == INTEGER_CST
10053 && TREE_CODE (arg0) == MULT_EXPR
10054 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10055 {
10056 wide_int warg1 = arg1;
10057 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10058
10059 if (masked == 0)
10060 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10061 arg0, arg1);
10062 else if (masked != warg1)
10063 {
10064 /* Avoid the transform if arg1 is a mask of some
10065 mode which allows further optimizations. */
10066 int pop = wi::popcount (warg1);
10067 if (!(pop >= BITS_PER_UNIT
10068 && exact_log2 (pop) != -1
10069 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10070 return fold_build2_loc (loc, code, type, op0,
10071 wide_int_to_tree (type, masked));
10072 }
10073 }
10074
10075 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10076 ((A & N) + B) & M -> (A + B) & M
10077 Similarly if (N & M) == 0,
10078 ((A | N) + B) & M -> (A + B) & M
10079 and for - instead of + (or unary - instead of +)
10080 and/or ^ instead of |.
10081 If B is constant and (B & M) == 0, fold into A & M. */
10082 if (TREE_CODE (arg1) == INTEGER_CST)
10083 {
10084 wide_int cst1 = arg1;
10085 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10086 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10087 && (TREE_CODE (arg0) == PLUS_EXPR
10088 || TREE_CODE (arg0) == MINUS_EXPR
10089 || TREE_CODE (arg0) == NEGATE_EXPR)
10090 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10091 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10092 {
10093 tree pmop[2];
10094 int which = 0;
10095 wide_int cst0;
10096
10097 /* Now we know that arg0 is (C + D) or (C - D) or
10098 -C and arg1 (M) is == (1LL << cst) - 1.
10099 Store C into PMOP[0] and D into PMOP[1]. */
10100 pmop[0] = TREE_OPERAND (arg0, 0);
10101 pmop[1] = NULL;
10102 if (TREE_CODE (arg0) != NEGATE_EXPR)
10103 {
10104 pmop[1] = TREE_OPERAND (arg0, 1);
10105 which = 1;
10106 }
10107
10108 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10109 which = -1;
10110
10111 for (; which >= 0; which--)
10112 switch (TREE_CODE (pmop[which]))
10113 {
10114 case BIT_AND_EXPR:
10115 case BIT_IOR_EXPR:
10116 case BIT_XOR_EXPR:
10117 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10118 != INTEGER_CST)
10119 break;
10120 cst0 = TREE_OPERAND (pmop[which], 1);
10121 cst0 &= cst1;
10122 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10123 {
10124 if (cst0 != cst1)
10125 break;
10126 }
10127 else if (cst0 != 0)
10128 break;
10129 /* If C or D is of the form (A & N) where
10130 (N & M) == M, or of the form (A | N) or
10131 (A ^ N) where (N & M) == 0, replace it with A. */
10132 pmop[which] = TREE_OPERAND (pmop[which], 0);
10133 break;
10134 case INTEGER_CST:
10135 /* If C or D is a N where (N & M) == 0, it can be
10136 omitted (assumed 0). */
10137 if ((TREE_CODE (arg0) == PLUS_EXPR
10138 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10139 && (cst1 & pmop[which]) == 0)
10140 pmop[which] = NULL;
10141 break;
10142 default:
10143 break;
10144 }
10145
10146 /* Only build anything new if we optimized one or both arguments
10147 above. */
10148 if (pmop[0] != TREE_OPERAND (arg0, 0)
10149 || (TREE_CODE (arg0) != NEGATE_EXPR
10150 && pmop[1] != TREE_OPERAND (arg0, 1)))
10151 {
10152 tree utype = TREE_TYPE (arg0);
10153 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10154 {
10155 /* Perform the operations in a type that has defined
10156 overflow behavior. */
10157 utype = unsigned_type_for (TREE_TYPE (arg0));
10158 if (pmop[0] != NULL)
10159 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10160 if (pmop[1] != NULL)
10161 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10162 }
10163
10164 if (TREE_CODE (arg0) == NEGATE_EXPR)
10165 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10166 else if (TREE_CODE (arg0) == PLUS_EXPR)
10167 {
10168 if (pmop[0] != NULL && pmop[1] != NULL)
10169 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10170 pmop[0], pmop[1]);
10171 else if (pmop[0] != NULL)
10172 tem = pmop[0];
10173 else if (pmop[1] != NULL)
10174 tem = pmop[1];
10175 else
10176 return build_int_cst (type, 0);
10177 }
10178 else if (pmop[0] == NULL)
10179 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10180 else
10181 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10182 pmop[0], pmop[1]);
10183 /* TEM is now the new binary +, - or unary - replacement. */
10184 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10185 fold_convert_loc (loc, utype, arg1));
10186 return fold_convert_loc (loc, type, tem);
10187 }
10188 }
10189 }
10190
10191 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10192 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10193 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10194 {
10195 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10196
10197 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10198 if (mask == -1)
10199 return
10200 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10201 }
10202
10203 goto associate;
10204
10205 case RDIV_EXPR:
10206 /* Don't touch a floating-point divide by zero unless the mode
10207 of the constant can represent infinity. */
10208 if (TREE_CODE (arg1) == REAL_CST
10209 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10210 && real_zerop (arg1))
10211 return NULL_TREE;
10212
10213 /* (-A) / (-B) -> A / B */
10214 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10215 return fold_build2_loc (loc, RDIV_EXPR, type,
10216 TREE_OPERAND (arg0, 0),
10217 negate_expr (arg1));
10218 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10219 return fold_build2_loc (loc, RDIV_EXPR, type,
10220 negate_expr (arg0),
10221 TREE_OPERAND (arg1, 0));
10222
10223 /* Convert A/B/C to A/(B*C). */
10224 if (flag_reciprocal_math
10225 && TREE_CODE (arg0) == RDIV_EXPR)
10226 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10227 fold_build2_loc (loc, MULT_EXPR, type,
10228 TREE_OPERAND (arg0, 1), arg1));
10229
10230 /* Convert A/(B/C) to (A/B)*C. */
10231 if (flag_reciprocal_math
10232 && TREE_CODE (arg1) == RDIV_EXPR)
10233 return fold_build2_loc (loc, MULT_EXPR, type,
10234 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
10235 TREE_OPERAND (arg1, 0)),
10236 TREE_OPERAND (arg1, 1));
10237
10238 /* Convert C1/(X*C2) into (C1/C2)/X. */
10239 if (flag_reciprocal_math
10240 && TREE_CODE (arg1) == MULT_EXPR
10241 && TREE_CODE (arg0) == REAL_CST
10242 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10243 {
10244 tree tem = const_binop (RDIV_EXPR, arg0,
10245 TREE_OPERAND (arg1, 1));
10246 if (tem)
10247 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10248 TREE_OPERAND (arg1, 0));
10249 }
10250
10251 return NULL_TREE;
10252
10253 case TRUNC_DIV_EXPR:
10254 /* Optimize (X & (-A)) / A where A is a power of 2,
10255 to X >> log2(A) */
10256 if (TREE_CODE (arg0) == BIT_AND_EXPR
10257 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
10258 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
10259 {
10260 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
10261 arg1, TREE_OPERAND (arg0, 1));
10262 if (sum && integer_zerop (sum)) {
10263 tree pow2 = build_int_cst (integer_type_node,
10264 wi::exact_log2 (arg1));
10265 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10266 TREE_OPERAND (arg0, 0), pow2);
10267 }
10268 }
10269
10270 /* Fall through */
10271
10272 case FLOOR_DIV_EXPR:
10273 /* Simplify A / (B << N) where A and B are positive and B is
10274 a power of 2, to A >> (N + log2(B)). */
10275 strict_overflow_p = false;
10276 if (TREE_CODE (arg1) == LSHIFT_EXPR
10277 && (TYPE_UNSIGNED (type)
10278 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10279 {
10280 tree sval = TREE_OPERAND (arg1, 0);
10281 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10282 {
10283 tree sh_cnt = TREE_OPERAND (arg1, 1);
10284 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10285 wi::exact_log2 (sval));
10286
10287 if (strict_overflow_p)
10288 fold_overflow_warning (("assuming signed overflow does not "
10289 "occur when simplifying A / (B << N)"),
10290 WARN_STRICT_OVERFLOW_MISC);
10291
10292 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10293 sh_cnt, pow2);
10294 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10295 fold_convert_loc (loc, type, arg0), sh_cnt);
10296 }
10297 }
10298
10299 /* Fall through */
10300
10301 case ROUND_DIV_EXPR:
10302 case CEIL_DIV_EXPR:
10303 case EXACT_DIV_EXPR:
10304 if (integer_zerop (arg1))
10305 return NULL_TREE;
10306
10307 /* Convert -A / -B to A / B when the type is signed and overflow is
10308 undefined. */
10309 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10310 && TREE_CODE (arg0) == NEGATE_EXPR
10311 && negate_expr_p (arg1))
10312 {
10313 if (INTEGRAL_TYPE_P (type))
10314 fold_overflow_warning (("assuming signed overflow does not occur "
10315 "when distributing negation across "
10316 "division"),
10317 WARN_STRICT_OVERFLOW_MISC);
10318 return fold_build2_loc (loc, code, type,
10319 fold_convert_loc (loc, type,
10320 TREE_OPERAND (arg0, 0)),
10321 fold_convert_loc (loc, type,
10322 negate_expr (arg1)));
10323 }
10324 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10325 && TREE_CODE (arg1) == NEGATE_EXPR
10326 && negate_expr_p (arg0))
10327 {
10328 if (INTEGRAL_TYPE_P (type))
10329 fold_overflow_warning (("assuming signed overflow does not occur "
10330 "when distributing negation across "
10331 "division"),
10332 WARN_STRICT_OVERFLOW_MISC);
10333 return fold_build2_loc (loc, code, type,
10334 fold_convert_loc (loc, type,
10335 negate_expr (arg0)),
10336 fold_convert_loc (loc, type,
10337 TREE_OPERAND (arg1, 0)));
10338 }
10339
10340 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10341 operation, EXACT_DIV_EXPR.
10342
10343 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10344 At one time others generated faster code, it's not clear if they do
10345 after the last round to changes to the DIV code in expmed.c. */
10346 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10347 && multiple_of_p (type, arg0, arg1))
10348 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10349 fold_convert (type, arg0),
10350 fold_convert (type, arg1));
10351
10352 strict_overflow_p = false;
10353 if (TREE_CODE (arg1) == INTEGER_CST
10354 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10355 &strict_overflow_p)))
10356 {
10357 if (strict_overflow_p)
10358 fold_overflow_warning (("assuming signed overflow does not occur "
10359 "when simplifying division"),
10360 WARN_STRICT_OVERFLOW_MISC);
10361 return fold_convert_loc (loc, type, tem);
10362 }
10363
10364 return NULL_TREE;
10365
10366 case CEIL_MOD_EXPR:
10367 case FLOOR_MOD_EXPR:
10368 case ROUND_MOD_EXPR:
10369 case TRUNC_MOD_EXPR:
10370 strict_overflow_p = false;
10371 if (TREE_CODE (arg1) == INTEGER_CST
10372 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10373 &strict_overflow_p)))
10374 {
10375 if (strict_overflow_p)
10376 fold_overflow_warning (("assuming signed overflow does not occur "
10377 "when simplifying modulus"),
10378 WARN_STRICT_OVERFLOW_MISC);
10379 return fold_convert_loc (loc, type, tem);
10380 }
10381
10382 return NULL_TREE;
10383
10384 case LROTATE_EXPR:
10385 case RROTATE_EXPR:
10386 case RSHIFT_EXPR:
10387 case LSHIFT_EXPR:
10388 /* Since negative shift count is not well-defined,
10389 don't try to compute it in the compiler. */
10390 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10391 return NULL_TREE;
10392
10393 prec = element_precision (type);
10394
10395 /* If we have a rotate of a bit operation with the rotate count and
10396 the second operand of the bit operation both constant,
10397 permute the two operations. */
10398 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10399 && (TREE_CODE (arg0) == BIT_AND_EXPR
10400 || TREE_CODE (arg0) == BIT_IOR_EXPR
10401 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10402 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10403 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10404 fold_build2_loc (loc, code, type,
10405 TREE_OPERAND (arg0, 0), arg1),
10406 fold_build2_loc (loc, code, type,
10407 TREE_OPERAND (arg0, 1), arg1));
10408
10409 /* Two consecutive rotates adding up to the some integer
10410 multiple of the precision of the type can be ignored. */
10411 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10412 && TREE_CODE (arg0) == RROTATE_EXPR
10413 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10414 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10415 prec) == 0)
10416 return TREE_OPERAND (arg0, 0);
10417
10418 return NULL_TREE;
10419
10420 case MIN_EXPR:
10421 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
10422 if (tem)
10423 return tem;
10424 goto associate;
10425
10426 case MAX_EXPR:
10427 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
10428 if (tem)
10429 return tem;
10430 goto associate;
10431
10432 case TRUTH_ANDIF_EXPR:
10433 /* Note that the operands of this must be ints
10434 and their values must be 0 or 1.
10435 ("true" is a fixed value perhaps depending on the language.) */
10436 /* If first arg is constant zero, return it. */
10437 if (integer_zerop (arg0))
10438 return fold_convert_loc (loc, type, arg0);
10439 case TRUTH_AND_EXPR:
10440 /* If either arg is constant true, drop it. */
10441 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10442 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10443 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10444 /* Preserve sequence points. */
10445 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10446 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10447 /* If second arg is constant zero, result is zero, but first arg
10448 must be evaluated. */
10449 if (integer_zerop (arg1))
10450 return omit_one_operand_loc (loc, type, arg1, arg0);
10451 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10452 case will be handled here. */
10453 if (integer_zerop (arg0))
10454 return omit_one_operand_loc (loc, type, arg0, arg1);
10455
10456 /* !X && X is always false. */
10457 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10458 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10459 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10460 /* X && !X is always false. */
10461 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10462 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10463 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10464
10465 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10466 means A >= Y && A != MAX, but in this case we know that
10467 A < X <= MAX. */
10468
10469 if (!TREE_SIDE_EFFECTS (arg0)
10470 && !TREE_SIDE_EFFECTS (arg1))
10471 {
10472 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10473 if (tem && !operand_equal_p (tem, arg0, 0))
10474 return fold_build2_loc (loc, code, type, tem, arg1);
10475
10476 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10477 if (tem && !operand_equal_p (tem, arg1, 0))
10478 return fold_build2_loc (loc, code, type, arg0, tem);
10479 }
10480
10481 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10482 != NULL_TREE)
10483 return tem;
10484
10485 return NULL_TREE;
10486
10487 case TRUTH_ORIF_EXPR:
10488 /* Note that the operands of this must be ints
10489 and their values must be 0 or true.
10490 ("true" is a fixed value perhaps depending on the language.) */
10491 /* If first arg is constant true, return it. */
10492 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10493 return fold_convert_loc (loc, type, arg0);
10494 case TRUTH_OR_EXPR:
10495 /* If either arg is constant zero, drop it. */
10496 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10497 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10498 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10499 /* Preserve sequence points. */
10500 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10501 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10502 /* If second arg is constant true, result is true, but we must
10503 evaluate first arg. */
10504 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10505 return omit_one_operand_loc (loc, type, arg1, arg0);
10506 /* Likewise for first arg, but note this only occurs here for
10507 TRUTH_OR_EXPR. */
10508 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10509 return omit_one_operand_loc (loc, type, arg0, arg1);
10510
10511 /* !X || X is always true. */
10512 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10513 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10514 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10515 /* X || !X is always true. */
10516 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10517 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10518 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10519
10520 /* (X && !Y) || (!X && Y) is X ^ Y */
10521 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10522 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10523 {
10524 tree a0, a1, l0, l1, n0, n1;
10525
10526 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10527 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10528
10529 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10530 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10531
10532 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10533 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10534
10535 if ((operand_equal_p (n0, a0, 0)
10536 && operand_equal_p (n1, a1, 0))
10537 || (operand_equal_p (n0, a1, 0)
10538 && operand_equal_p (n1, a0, 0)))
10539 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10540 }
10541
10542 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10543 != NULL_TREE)
10544 return tem;
10545
10546 return NULL_TREE;
10547
10548 case TRUTH_XOR_EXPR:
10549 /* If the second arg is constant zero, drop it. */
10550 if (integer_zerop (arg1))
10551 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10552 /* If the second arg is constant true, this is a logical inversion. */
10553 if (integer_onep (arg1))
10554 {
10555 tem = invert_truthvalue_loc (loc, arg0);
10556 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10557 }
10558 /* Identical arguments cancel to zero. */
10559 if (operand_equal_p (arg0, arg1, 0))
10560 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10561
10562 /* !X ^ X is always true. */
10563 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10564 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10565 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10566
10567 /* X ^ !X is always true. */
10568 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10569 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10570 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10571
10572 return NULL_TREE;
10573
10574 case EQ_EXPR:
10575 case NE_EXPR:
10576 STRIP_NOPS (arg0);
10577 STRIP_NOPS (arg1);
10578
10579 tem = fold_comparison (loc, code, type, op0, op1);
10580 if (tem != NULL_TREE)
10581 return tem;
10582
10583 /* bool_var != 1 becomes !bool_var. */
10584 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10585 && code == NE_EXPR)
10586 return fold_convert_loc (loc, type,
10587 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10588 TREE_TYPE (arg0), arg0));
10589
10590 /* bool_var == 0 becomes !bool_var. */
10591 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10592 && code == EQ_EXPR)
10593 return fold_convert_loc (loc, type,
10594 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10595 TREE_TYPE (arg0), arg0));
10596
10597 /* !exp != 0 becomes !exp */
10598 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10599 && code == NE_EXPR)
10600 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10601
10602 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10603 if ((TREE_CODE (arg0) == PLUS_EXPR
10604 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10605 || TREE_CODE (arg0) == MINUS_EXPR)
10606 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10607 0)),
10608 arg1, 0)
10609 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10610 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10611 {
10612 tree val = TREE_OPERAND (arg0, 1);
10613 return omit_two_operands_loc (loc, type,
10614 fold_build2_loc (loc, code, type,
10615 val,
10616 build_int_cst (TREE_TYPE (val),
10617 0)),
10618 TREE_OPERAND (arg0, 0), arg1);
10619 }
10620
10621 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10622 if (TREE_CODE (arg0) == MINUS_EXPR
10623 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10624 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10625 1)),
10626 arg1, 0)
10627 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10628 {
10629 return omit_two_operands_loc (loc, type,
10630 code == NE_EXPR
10631 ? boolean_true_node : boolean_false_node,
10632 TREE_OPERAND (arg0, 1), arg1);
10633 }
10634
10635 /* If this is an EQ or NE comparison with zero and ARG0 is
10636 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10637 two operations, but the latter can be done in one less insn
10638 on machines that have only two-operand insns or on which a
10639 constant cannot be the first operand. */
10640 if (TREE_CODE (arg0) == BIT_AND_EXPR
10641 && integer_zerop (arg1))
10642 {
10643 tree arg00 = TREE_OPERAND (arg0, 0);
10644 tree arg01 = TREE_OPERAND (arg0, 1);
10645 if (TREE_CODE (arg00) == LSHIFT_EXPR
10646 && integer_onep (TREE_OPERAND (arg00, 0)))
10647 {
10648 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10649 arg01, TREE_OPERAND (arg00, 1));
10650 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10651 build_int_cst (TREE_TYPE (arg0), 1));
10652 return fold_build2_loc (loc, code, type,
10653 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10654 arg1);
10655 }
10656 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10657 && integer_onep (TREE_OPERAND (arg01, 0)))
10658 {
10659 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10660 arg00, TREE_OPERAND (arg01, 1));
10661 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10662 build_int_cst (TREE_TYPE (arg0), 1));
10663 return fold_build2_loc (loc, code, type,
10664 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10665 arg1);
10666 }
10667 }
10668
10669 /* If this is an NE or EQ comparison of zero against the result of a
10670 signed MOD operation whose second operand is a power of 2, make
10671 the MOD operation unsigned since it is simpler and equivalent. */
10672 if (integer_zerop (arg1)
10673 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10674 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10675 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10676 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10677 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10678 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10679 {
10680 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10681 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10682 fold_convert_loc (loc, newtype,
10683 TREE_OPERAND (arg0, 0)),
10684 fold_convert_loc (loc, newtype,
10685 TREE_OPERAND (arg0, 1)));
10686
10687 return fold_build2_loc (loc, code, type, newmod,
10688 fold_convert_loc (loc, newtype, arg1));
10689 }
10690
10691 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10692 C1 is a valid shift constant, and C2 is a power of two, i.e.
10693 a single bit. */
10694 if (TREE_CODE (arg0) == BIT_AND_EXPR
10695 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10696 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10697 == INTEGER_CST
10698 && integer_pow2p (TREE_OPERAND (arg0, 1))
10699 && integer_zerop (arg1))
10700 {
10701 tree itype = TREE_TYPE (arg0);
10702 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10703 prec = TYPE_PRECISION (itype);
10704
10705 /* Check for a valid shift count. */
10706 if (wi::ltu_p (arg001, prec))
10707 {
10708 tree arg01 = TREE_OPERAND (arg0, 1);
10709 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10710 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10711 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10712 can be rewritten as (X & (C2 << C1)) != 0. */
10713 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10714 {
10715 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10716 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10717 return fold_build2_loc (loc, code, type, tem,
10718 fold_convert_loc (loc, itype, arg1));
10719 }
10720 /* Otherwise, for signed (arithmetic) shifts,
10721 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10722 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10723 else if (!TYPE_UNSIGNED (itype))
10724 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10725 arg000, build_int_cst (itype, 0));
10726 /* Otherwise, of unsigned (logical) shifts,
10727 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10728 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10729 else
10730 return omit_one_operand_loc (loc, type,
10731 code == EQ_EXPR ? integer_one_node
10732 : integer_zero_node,
10733 arg000);
10734 }
10735 }
10736
10737 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10738 Similarly for NE_EXPR. */
10739 if (TREE_CODE (arg0) == BIT_AND_EXPR
10740 && TREE_CODE (arg1) == INTEGER_CST
10741 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10742 {
10743 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10744 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10745 TREE_OPERAND (arg0, 1));
10746 tree dandnotc
10747 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10748 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10749 notc);
10750 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10751 if (integer_nonzerop (dandnotc))
10752 return omit_one_operand_loc (loc, type, rslt, arg0);
10753 }
10754
10755 /* If this is a comparison of a field, we may be able to simplify it. */
10756 if ((TREE_CODE (arg0) == COMPONENT_REF
10757 || TREE_CODE (arg0) == BIT_FIELD_REF)
10758 /* Handle the constant case even without -O
10759 to make sure the warnings are given. */
10760 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10761 {
10762 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10763 if (t1)
10764 return t1;
10765 }
10766
10767 /* Optimize comparisons of strlen vs zero to a compare of the
10768 first character of the string vs zero. To wit,
10769 strlen(ptr) == 0 => *ptr == 0
10770 strlen(ptr) != 0 => *ptr != 0
10771 Other cases should reduce to one of these two (or a constant)
10772 due to the return value of strlen being unsigned. */
10773 if (TREE_CODE (arg0) == CALL_EXPR
10774 && integer_zerop (arg1))
10775 {
10776 tree fndecl = get_callee_fndecl (arg0);
10777
10778 if (fndecl
10779 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10780 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10781 && call_expr_nargs (arg0) == 1
10782 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10783 {
10784 tree iref = build_fold_indirect_ref_loc (loc,
10785 CALL_EXPR_ARG (arg0, 0));
10786 return fold_build2_loc (loc, code, type, iref,
10787 build_int_cst (TREE_TYPE (iref), 0));
10788 }
10789 }
10790
10791 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10792 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10793 if (TREE_CODE (arg0) == RSHIFT_EXPR
10794 && integer_zerop (arg1)
10795 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10796 {
10797 tree arg00 = TREE_OPERAND (arg0, 0);
10798 tree arg01 = TREE_OPERAND (arg0, 1);
10799 tree itype = TREE_TYPE (arg00);
10800 if (wi::eq_p (arg01, element_precision (itype) - 1))
10801 {
10802 if (TYPE_UNSIGNED (itype))
10803 {
10804 itype = signed_type_for (itype);
10805 arg00 = fold_convert_loc (loc, itype, arg00);
10806 }
10807 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10808 type, arg00, build_zero_cst (itype));
10809 }
10810 }
10811
10812 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10813 (X & C) == 0 when C is a single bit. */
10814 if (TREE_CODE (arg0) == BIT_AND_EXPR
10815 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10816 && integer_zerop (arg1)
10817 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10818 {
10819 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10820 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10821 TREE_OPERAND (arg0, 1));
10822 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10823 type, tem,
10824 fold_convert_loc (loc, TREE_TYPE (arg0),
10825 arg1));
10826 }
10827
10828 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10829 constant C is a power of two, i.e. a single bit. */
10830 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10831 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10832 && integer_zerop (arg1)
10833 && integer_pow2p (TREE_OPERAND (arg0, 1))
10834 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10835 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10836 {
10837 tree arg00 = TREE_OPERAND (arg0, 0);
10838 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10839 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10840 }
10841
10842 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10843 when is C is a power of two, i.e. a single bit. */
10844 if (TREE_CODE (arg0) == BIT_AND_EXPR
10845 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10846 && integer_zerop (arg1)
10847 && integer_pow2p (TREE_OPERAND (arg0, 1))
10848 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10849 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10850 {
10851 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10852 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10853 arg000, TREE_OPERAND (arg0, 1));
10854 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10855 tem, build_int_cst (TREE_TYPE (tem), 0));
10856 }
10857
10858 if (integer_zerop (arg1)
10859 && tree_expr_nonzero_p (arg0))
10860 {
10861 tree res = constant_boolean_node (code==NE_EXPR, type);
10862 return omit_one_operand_loc (loc, type, res, arg0);
10863 }
10864
10865 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10866 if (TREE_CODE (arg0) == BIT_AND_EXPR
10867 && TREE_CODE (arg1) == BIT_AND_EXPR)
10868 {
10869 tree arg00 = TREE_OPERAND (arg0, 0);
10870 tree arg01 = TREE_OPERAND (arg0, 1);
10871 tree arg10 = TREE_OPERAND (arg1, 0);
10872 tree arg11 = TREE_OPERAND (arg1, 1);
10873 tree itype = TREE_TYPE (arg0);
10874
10875 if (operand_equal_p (arg01, arg11, 0))
10876 return fold_build2_loc (loc, code, type,
10877 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10878 fold_build2_loc (loc,
10879 BIT_XOR_EXPR, itype,
10880 arg00, arg10),
10881 arg01),
10882 build_zero_cst (itype));
10883
10884 if (operand_equal_p (arg01, arg10, 0))
10885 return fold_build2_loc (loc, code, type,
10886 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10887 fold_build2_loc (loc,
10888 BIT_XOR_EXPR, itype,
10889 arg00, arg11),
10890 arg01),
10891 build_zero_cst (itype));
10892
10893 if (operand_equal_p (arg00, arg11, 0))
10894 return fold_build2_loc (loc, code, type,
10895 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10896 fold_build2_loc (loc,
10897 BIT_XOR_EXPR, itype,
10898 arg01, arg10),
10899 arg00),
10900 build_zero_cst (itype));
10901
10902 if (operand_equal_p (arg00, arg10, 0))
10903 return fold_build2_loc (loc, code, type,
10904 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10905 fold_build2_loc (loc,
10906 BIT_XOR_EXPR, itype,
10907 arg01, arg11),
10908 arg00),
10909 build_zero_cst (itype));
10910 }
10911
10912 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10913 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10914 {
10915 tree arg00 = TREE_OPERAND (arg0, 0);
10916 tree arg01 = TREE_OPERAND (arg0, 1);
10917 tree arg10 = TREE_OPERAND (arg1, 0);
10918 tree arg11 = TREE_OPERAND (arg1, 1);
10919 tree itype = TREE_TYPE (arg0);
10920
10921 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10922 operand_equal_p guarantees no side-effects so we don't need
10923 to use omit_one_operand on Z. */
10924 if (operand_equal_p (arg01, arg11, 0))
10925 return fold_build2_loc (loc, code, type, arg00,
10926 fold_convert_loc (loc, TREE_TYPE (arg00),
10927 arg10));
10928 if (operand_equal_p (arg01, arg10, 0))
10929 return fold_build2_loc (loc, code, type, arg00,
10930 fold_convert_loc (loc, TREE_TYPE (arg00),
10931 arg11));
10932 if (operand_equal_p (arg00, arg11, 0))
10933 return fold_build2_loc (loc, code, type, arg01,
10934 fold_convert_loc (loc, TREE_TYPE (arg01),
10935 arg10));
10936 if (operand_equal_p (arg00, arg10, 0))
10937 return fold_build2_loc (loc, code, type, arg01,
10938 fold_convert_loc (loc, TREE_TYPE (arg01),
10939 arg11));
10940
10941 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10942 if (TREE_CODE (arg01) == INTEGER_CST
10943 && TREE_CODE (arg11) == INTEGER_CST)
10944 {
10945 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10946 fold_convert_loc (loc, itype, arg11));
10947 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10948 return fold_build2_loc (loc, code, type, tem,
10949 fold_convert_loc (loc, itype, arg10));
10950 }
10951 }
10952
10953 /* Attempt to simplify equality/inequality comparisons of complex
10954 values. Only lower the comparison if the result is known or
10955 can be simplified to a single scalar comparison. */
10956 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10957 || TREE_CODE (arg0) == COMPLEX_CST)
10958 && (TREE_CODE (arg1) == COMPLEX_EXPR
10959 || TREE_CODE (arg1) == COMPLEX_CST))
10960 {
10961 tree real0, imag0, real1, imag1;
10962 tree rcond, icond;
10963
10964 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10965 {
10966 real0 = TREE_OPERAND (arg0, 0);
10967 imag0 = TREE_OPERAND (arg0, 1);
10968 }
10969 else
10970 {
10971 real0 = TREE_REALPART (arg0);
10972 imag0 = TREE_IMAGPART (arg0);
10973 }
10974
10975 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10976 {
10977 real1 = TREE_OPERAND (arg1, 0);
10978 imag1 = TREE_OPERAND (arg1, 1);
10979 }
10980 else
10981 {
10982 real1 = TREE_REALPART (arg1);
10983 imag1 = TREE_IMAGPART (arg1);
10984 }
10985
10986 rcond = fold_binary_loc (loc, code, type, real0, real1);
10987 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10988 {
10989 if (integer_zerop (rcond))
10990 {
10991 if (code == EQ_EXPR)
10992 return omit_two_operands_loc (loc, type, boolean_false_node,
10993 imag0, imag1);
10994 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10995 }
10996 else
10997 {
10998 if (code == NE_EXPR)
10999 return omit_two_operands_loc (loc, type, boolean_true_node,
11000 imag0, imag1);
11001 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11002 }
11003 }
11004
11005 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11006 if (icond && TREE_CODE (icond) == INTEGER_CST)
11007 {
11008 if (integer_zerop (icond))
11009 {
11010 if (code == EQ_EXPR)
11011 return omit_two_operands_loc (loc, type, boolean_false_node,
11012 real0, real1);
11013 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11014 }
11015 else
11016 {
11017 if (code == NE_EXPR)
11018 return omit_two_operands_loc (loc, type, boolean_true_node,
11019 real0, real1);
11020 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11021 }
11022 }
11023 }
11024
11025 return NULL_TREE;
11026
11027 case LT_EXPR:
11028 case GT_EXPR:
11029 case LE_EXPR:
11030 case GE_EXPR:
11031 tem = fold_comparison (loc, code, type, op0, op1);
11032 if (tem != NULL_TREE)
11033 return tem;
11034
11035 /* Transform comparisons of the form X +- C CMP X. */
11036 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11037 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11038 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11039 && !HONOR_SNANS (arg0))
11040 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11041 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11042 {
11043 tree arg01 = TREE_OPERAND (arg0, 1);
11044 enum tree_code code0 = TREE_CODE (arg0);
11045 int is_positive;
11046
11047 if (TREE_CODE (arg01) == REAL_CST)
11048 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11049 else
11050 is_positive = tree_int_cst_sgn (arg01);
11051
11052 /* (X - c) > X becomes false. */
11053 if (code == GT_EXPR
11054 && ((code0 == MINUS_EXPR && is_positive >= 0)
11055 || (code0 == PLUS_EXPR && is_positive <= 0)))
11056 {
11057 if (TREE_CODE (arg01) == INTEGER_CST
11058 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11059 fold_overflow_warning (("assuming signed overflow does not "
11060 "occur when assuming that (X - c) > X "
11061 "is always false"),
11062 WARN_STRICT_OVERFLOW_ALL);
11063 return constant_boolean_node (0, type);
11064 }
11065
11066 /* Likewise (X + c) < X becomes false. */
11067 if (code == LT_EXPR
11068 && ((code0 == PLUS_EXPR && is_positive >= 0)
11069 || (code0 == MINUS_EXPR && is_positive <= 0)))
11070 {
11071 if (TREE_CODE (arg01) == INTEGER_CST
11072 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11073 fold_overflow_warning (("assuming signed overflow does not "
11074 "occur when assuming that "
11075 "(X + c) < X is always false"),
11076 WARN_STRICT_OVERFLOW_ALL);
11077 return constant_boolean_node (0, type);
11078 }
11079
11080 /* Convert (X - c) <= X to true. */
11081 if (!HONOR_NANS (arg1)
11082 && code == LE_EXPR
11083 && ((code0 == MINUS_EXPR && is_positive >= 0)
11084 || (code0 == PLUS_EXPR && is_positive <= 0)))
11085 {
11086 if (TREE_CODE (arg01) == INTEGER_CST
11087 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11088 fold_overflow_warning (("assuming signed overflow does not "
11089 "occur when assuming that "
11090 "(X - c) <= X is always true"),
11091 WARN_STRICT_OVERFLOW_ALL);
11092 return constant_boolean_node (1, type);
11093 }
11094
11095 /* Convert (X + c) >= X to true. */
11096 if (!HONOR_NANS (arg1)
11097 && code == GE_EXPR
11098 && ((code0 == PLUS_EXPR && is_positive >= 0)
11099 || (code0 == MINUS_EXPR && is_positive <= 0)))
11100 {
11101 if (TREE_CODE (arg01) == INTEGER_CST
11102 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11103 fold_overflow_warning (("assuming signed overflow does not "
11104 "occur when assuming that "
11105 "(X + c) >= X is always true"),
11106 WARN_STRICT_OVERFLOW_ALL);
11107 return constant_boolean_node (1, type);
11108 }
11109
11110 if (TREE_CODE (arg01) == INTEGER_CST)
11111 {
11112 /* Convert X + c > X and X - c < X to true for integers. */
11113 if (code == GT_EXPR
11114 && ((code0 == PLUS_EXPR && is_positive > 0)
11115 || (code0 == MINUS_EXPR && is_positive < 0)))
11116 {
11117 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11118 fold_overflow_warning (("assuming signed overflow does "
11119 "not occur when assuming that "
11120 "(X + c) > X is always true"),
11121 WARN_STRICT_OVERFLOW_ALL);
11122 return constant_boolean_node (1, type);
11123 }
11124
11125 if (code == LT_EXPR
11126 && ((code0 == MINUS_EXPR && is_positive > 0)
11127 || (code0 == PLUS_EXPR && is_positive < 0)))
11128 {
11129 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11130 fold_overflow_warning (("assuming signed overflow does "
11131 "not occur when assuming that "
11132 "(X - c) < X is always true"),
11133 WARN_STRICT_OVERFLOW_ALL);
11134 return constant_boolean_node (1, type);
11135 }
11136
11137 /* Convert X + c <= X and X - c >= X to false for integers. */
11138 if (code == LE_EXPR
11139 && ((code0 == PLUS_EXPR && is_positive > 0)
11140 || (code0 == MINUS_EXPR && is_positive < 0)))
11141 {
11142 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11143 fold_overflow_warning (("assuming signed overflow does "
11144 "not occur when assuming that "
11145 "(X + c) <= X is always false"),
11146 WARN_STRICT_OVERFLOW_ALL);
11147 return constant_boolean_node (0, type);
11148 }
11149
11150 if (code == GE_EXPR
11151 && ((code0 == MINUS_EXPR && is_positive > 0)
11152 || (code0 == PLUS_EXPR && is_positive < 0)))
11153 {
11154 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11155 fold_overflow_warning (("assuming signed overflow does "
11156 "not occur when assuming that "
11157 "(X - c) >= X is always false"),
11158 WARN_STRICT_OVERFLOW_ALL);
11159 return constant_boolean_node (0, type);
11160 }
11161 }
11162 }
11163
11164 /* If we are comparing an ABS_EXPR with a constant, we can
11165 convert all the cases into explicit comparisons, but they may
11166 well not be faster than doing the ABS and one comparison.
11167 But ABS (X) <= C is a range comparison, which becomes a subtraction
11168 and a comparison, and is probably faster. */
11169 if (code == LE_EXPR
11170 && TREE_CODE (arg1) == INTEGER_CST
11171 && TREE_CODE (arg0) == ABS_EXPR
11172 && ! TREE_SIDE_EFFECTS (arg0)
11173 && (0 != (tem = negate_expr (arg1)))
11174 && TREE_CODE (tem) == INTEGER_CST
11175 && !TREE_OVERFLOW (tem))
11176 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11177 build2 (GE_EXPR, type,
11178 TREE_OPERAND (arg0, 0), tem),
11179 build2 (LE_EXPR, type,
11180 TREE_OPERAND (arg0, 0), arg1));
11181
11182 /* Convert ABS_EXPR<x> >= 0 to true. */
11183 strict_overflow_p = false;
11184 if (code == GE_EXPR
11185 && (integer_zerop (arg1)
11186 || (! HONOR_NANS (arg0)
11187 && real_zerop (arg1)))
11188 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11189 {
11190 if (strict_overflow_p)
11191 fold_overflow_warning (("assuming signed overflow does not occur "
11192 "when simplifying comparison of "
11193 "absolute value and zero"),
11194 WARN_STRICT_OVERFLOW_CONDITIONAL);
11195 return omit_one_operand_loc (loc, type,
11196 constant_boolean_node (true, type),
11197 arg0);
11198 }
11199
11200 /* Convert ABS_EXPR<x> < 0 to false. */
11201 strict_overflow_p = false;
11202 if (code == LT_EXPR
11203 && (integer_zerop (arg1) || real_zerop (arg1))
11204 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11205 {
11206 if (strict_overflow_p)
11207 fold_overflow_warning (("assuming signed overflow does not occur "
11208 "when simplifying comparison of "
11209 "absolute value and zero"),
11210 WARN_STRICT_OVERFLOW_CONDITIONAL);
11211 return omit_one_operand_loc (loc, type,
11212 constant_boolean_node (false, type),
11213 arg0);
11214 }
11215
11216 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11217 and similarly for >= into !=. */
11218 if ((code == LT_EXPR || code == GE_EXPR)
11219 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11220 && TREE_CODE (arg1) == LSHIFT_EXPR
11221 && integer_onep (TREE_OPERAND (arg1, 0)))
11222 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11223 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11224 TREE_OPERAND (arg1, 1)),
11225 build_zero_cst (TREE_TYPE (arg0)));
11226
11227 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11228 otherwise Y might be >= # of bits in X's type and thus e.g.
11229 (unsigned char) (1 << Y) for Y 15 might be 0.
11230 If the cast is widening, then 1 << Y should have unsigned type,
11231 otherwise if Y is number of bits in the signed shift type minus 1,
11232 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11233 31 might be 0xffffffff80000000. */
11234 if ((code == LT_EXPR || code == GE_EXPR)
11235 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11236 && CONVERT_EXPR_P (arg1)
11237 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11238 && (element_precision (TREE_TYPE (arg1))
11239 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11240 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11241 || (element_precision (TREE_TYPE (arg1))
11242 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11243 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11244 {
11245 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11246 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11247 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11248 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11249 build_zero_cst (TREE_TYPE (arg0)));
11250 }
11251
11252 return NULL_TREE;
11253
11254 case UNORDERED_EXPR:
11255 case ORDERED_EXPR:
11256 case UNLT_EXPR:
11257 case UNLE_EXPR:
11258 case UNGT_EXPR:
11259 case UNGE_EXPR:
11260 case UNEQ_EXPR:
11261 case LTGT_EXPR:
11262 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11263 {
11264 tree targ0 = strip_float_extensions (arg0);
11265 tree targ1 = strip_float_extensions (arg1);
11266 tree newtype = TREE_TYPE (targ0);
11267
11268 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11269 newtype = TREE_TYPE (targ1);
11270
11271 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11272 return fold_build2_loc (loc, code, type,
11273 fold_convert_loc (loc, newtype, targ0),
11274 fold_convert_loc (loc, newtype, targ1));
11275 }
11276
11277 return NULL_TREE;
11278
11279 case COMPOUND_EXPR:
11280 /* When pedantic, a compound expression can be neither an lvalue
11281 nor an integer constant expression. */
11282 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11283 return NULL_TREE;
11284 /* Don't let (0, 0) be null pointer constant. */
11285 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11286 : fold_convert_loc (loc, type, arg1);
11287 return pedantic_non_lvalue_loc (loc, tem);
11288
11289 case ASSERT_EXPR:
11290 /* An ASSERT_EXPR should never be passed to fold_binary. */
11291 gcc_unreachable ();
11292
11293 default:
11294 return NULL_TREE;
11295 } /* switch (code) */
11296 }
11297
11298 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11299 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11300 of GOTO_EXPR. */
11301
11302 static tree
11303 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11304 {
11305 switch (TREE_CODE (*tp))
11306 {
11307 case LABEL_EXPR:
11308 return *tp;
11309
11310 case GOTO_EXPR:
11311 *walk_subtrees = 0;
11312
11313 /* ... fall through ... */
11314
11315 default:
11316 return NULL_TREE;
11317 }
11318 }
11319
11320 /* Return whether the sub-tree ST contains a label which is accessible from
11321 outside the sub-tree. */
11322
11323 static bool
11324 contains_label_p (tree st)
11325 {
11326 return
11327 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11328 }
11329
11330 /* Fold a ternary expression of code CODE and type TYPE with operands
11331 OP0, OP1, and OP2. Return the folded expression if folding is
11332 successful. Otherwise, return NULL_TREE. */
11333
11334 tree
11335 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11336 tree op0, tree op1, tree op2)
11337 {
11338 tree tem;
11339 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11340 enum tree_code_class kind = TREE_CODE_CLASS (code);
11341
11342 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11343 && TREE_CODE_LENGTH (code) == 3);
11344
11345 /* If this is a commutative operation, and OP0 is a constant, move it
11346 to OP1 to reduce the number of tests below. */
11347 if (commutative_ternary_tree_code (code)
11348 && tree_swap_operands_p (op0, op1, true))
11349 return fold_build3_loc (loc, code, type, op1, op0, op2);
11350
11351 tem = generic_simplify (loc, code, type, op0, op1, op2);
11352 if (tem)
11353 return tem;
11354
11355 /* Strip any conversions that don't change the mode. This is safe
11356 for every expression, except for a comparison expression because
11357 its signedness is derived from its operands. So, in the latter
11358 case, only strip conversions that don't change the signedness.
11359
11360 Note that this is done as an internal manipulation within the
11361 constant folder, in order to find the simplest representation of
11362 the arguments so that their form can be studied. In any cases,
11363 the appropriate type conversions should be put back in the tree
11364 that will get out of the constant folder. */
11365 if (op0)
11366 {
11367 arg0 = op0;
11368 STRIP_NOPS (arg0);
11369 }
11370
11371 if (op1)
11372 {
11373 arg1 = op1;
11374 STRIP_NOPS (arg1);
11375 }
11376
11377 if (op2)
11378 {
11379 arg2 = op2;
11380 STRIP_NOPS (arg2);
11381 }
11382
11383 switch (code)
11384 {
11385 case COMPONENT_REF:
11386 if (TREE_CODE (arg0) == CONSTRUCTOR
11387 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11388 {
11389 unsigned HOST_WIDE_INT idx;
11390 tree field, value;
11391 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11392 if (field == arg1)
11393 return value;
11394 }
11395 return NULL_TREE;
11396
11397 case COND_EXPR:
11398 case VEC_COND_EXPR:
11399 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11400 so all simple results must be passed through pedantic_non_lvalue. */
11401 if (TREE_CODE (arg0) == INTEGER_CST)
11402 {
11403 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11404 tem = integer_zerop (arg0) ? op2 : op1;
11405 /* Only optimize constant conditions when the selected branch
11406 has the same type as the COND_EXPR. This avoids optimizing
11407 away "c ? x : throw", where the throw has a void type.
11408 Avoid throwing away that operand which contains label. */
11409 if ((!TREE_SIDE_EFFECTS (unused_op)
11410 || !contains_label_p (unused_op))
11411 && (! VOID_TYPE_P (TREE_TYPE (tem))
11412 || VOID_TYPE_P (type)))
11413 return pedantic_non_lvalue_loc (loc, tem);
11414 return NULL_TREE;
11415 }
11416 else if (TREE_CODE (arg0) == VECTOR_CST)
11417 {
11418 if ((TREE_CODE (arg1) == VECTOR_CST
11419 || TREE_CODE (arg1) == CONSTRUCTOR)
11420 && (TREE_CODE (arg2) == VECTOR_CST
11421 || TREE_CODE (arg2) == CONSTRUCTOR))
11422 {
11423 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11424 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11425 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11426 for (i = 0; i < nelts; i++)
11427 {
11428 tree val = VECTOR_CST_ELT (arg0, i);
11429 if (integer_all_onesp (val))
11430 sel[i] = i;
11431 else if (integer_zerop (val))
11432 sel[i] = nelts + i;
11433 else /* Currently unreachable. */
11434 return NULL_TREE;
11435 }
11436 tree t = fold_vec_perm (type, arg1, arg2, sel);
11437 if (t != NULL_TREE)
11438 return t;
11439 }
11440 }
11441
11442 /* If we have A op B ? A : C, we may be able to convert this to a
11443 simpler expression, depending on the operation and the values
11444 of B and C. Signed zeros prevent all of these transformations,
11445 for reasons given above each one.
11446
11447 Also try swapping the arguments and inverting the conditional. */
11448 if (COMPARISON_CLASS_P (arg0)
11449 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11450 arg1, TREE_OPERAND (arg0, 1))
11451 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11452 {
11453 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11454 if (tem)
11455 return tem;
11456 }
11457
11458 if (COMPARISON_CLASS_P (arg0)
11459 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11460 op2,
11461 TREE_OPERAND (arg0, 1))
11462 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11463 {
11464 location_t loc0 = expr_location_or (arg0, loc);
11465 tem = fold_invert_truthvalue (loc0, arg0);
11466 if (tem && COMPARISON_CLASS_P (tem))
11467 {
11468 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11469 if (tem)
11470 return tem;
11471 }
11472 }
11473
11474 /* If the second operand is simpler than the third, swap them
11475 since that produces better jump optimization results. */
11476 if (truth_value_p (TREE_CODE (arg0))
11477 && tree_swap_operands_p (op1, op2, false))
11478 {
11479 location_t loc0 = expr_location_or (arg0, loc);
11480 /* See if this can be inverted. If it can't, possibly because
11481 it was a floating-point inequality comparison, don't do
11482 anything. */
11483 tem = fold_invert_truthvalue (loc0, arg0);
11484 if (tem)
11485 return fold_build3_loc (loc, code, type, tem, op2, op1);
11486 }
11487
11488 /* Convert A ? 1 : 0 to simply A. */
11489 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11490 : (integer_onep (op1)
11491 && !VECTOR_TYPE_P (type)))
11492 && integer_zerop (op2)
11493 /* If we try to convert OP0 to our type, the
11494 call to fold will try to move the conversion inside
11495 a COND, which will recurse. In that case, the COND_EXPR
11496 is probably the best choice, so leave it alone. */
11497 && type == TREE_TYPE (arg0))
11498 return pedantic_non_lvalue_loc (loc, arg0);
11499
11500 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11501 over COND_EXPR in cases such as floating point comparisons. */
11502 if (integer_zerop (op1)
11503 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11504 : (integer_onep (op2)
11505 && !VECTOR_TYPE_P (type)))
11506 && truth_value_p (TREE_CODE (arg0)))
11507 return pedantic_non_lvalue_loc (loc,
11508 fold_convert_loc (loc, type,
11509 invert_truthvalue_loc (loc,
11510 arg0)));
11511
11512 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11513 if (TREE_CODE (arg0) == LT_EXPR
11514 && integer_zerop (TREE_OPERAND (arg0, 1))
11515 && integer_zerop (op2)
11516 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11517 {
11518 /* sign_bit_p looks through both zero and sign extensions,
11519 but for this optimization only sign extensions are
11520 usable. */
11521 tree tem2 = TREE_OPERAND (arg0, 0);
11522 while (tem != tem2)
11523 {
11524 if (TREE_CODE (tem2) != NOP_EXPR
11525 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11526 {
11527 tem = NULL_TREE;
11528 break;
11529 }
11530 tem2 = TREE_OPERAND (tem2, 0);
11531 }
11532 /* sign_bit_p only checks ARG1 bits within A's precision.
11533 If <sign bit of A> has wider type than A, bits outside
11534 of A's precision in <sign bit of A> need to be checked.
11535 If they are all 0, this optimization needs to be done
11536 in unsigned A's type, if they are all 1 in signed A's type,
11537 otherwise this can't be done. */
11538 if (tem
11539 && TYPE_PRECISION (TREE_TYPE (tem))
11540 < TYPE_PRECISION (TREE_TYPE (arg1))
11541 && TYPE_PRECISION (TREE_TYPE (tem))
11542 < TYPE_PRECISION (type))
11543 {
11544 int inner_width, outer_width;
11545 tree tem_type;
11546
11547 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11548 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11549 if (outer_width > TYPE_PRECISION (type))
11550 outer_width = TYPE_PRECISION (type);
11551
11552 wide_int mask = wi::shifted_mask
11553 (inner_width, outer_width - inner_width, false,
11554 TYPE_PRECISION (TREE_TYPE (arg1)));
11555
11556 wide_int common = mask & arg1;
11557 if (common == mask)
11558 {
11559 tem_type = signed_type_for (TREE_TYPE (tem));
11560 tem = fold_convert_loc (loc, tem_type, tem);
11561 }
11562 else if (common == 0)
11563 {
11564 tem_type = unsigned_type_for (TREE_TYPE (tem));
11565 tem = fold_convert_loc (loc, tem_type, tem);
11566 }
11567 else
11568 tem = NULL;
11569 }
11570
11571 if (tem)
11572 return
11573 fold_convert_loc (loc, type,
11574 fold_build2_loc (loc, BIT_AND_EXPR,
11575 TREE_TYPE (tem), tem,
11576 fold_convert_loc (loc,
11577 TREE_TYPE (tem),
11578 arg1)));
11579 }
11580
11581 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11582 already handled above. */
11583 if (TREE_CODE (arg0) == BIT_AND_EXPR
11584 && integer_onep (TREE_OPERAND (arg0, 1))
11585 && integer_zerop (op2)
11586 && integer_pow2p (arg1))
11587 {
11588 tree tem = TREE_OPERAND (arg0, 0);
11589 STRIP_NOPS (tem);
11590 if (TREE_CODE (tem) == RSHIFT_EXPR
11591 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11592 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11593 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11594 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11595 TREE_OPERAND (tem, 0), arg1);
11596 }
11597
11598 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11599 is probably obsolete because the first operand should be a
11600 truth value (that's why we have the two cases above), but let's
11601 leave it in until we can confirm this for all front-ends. */
11602 if (integer_zerop (op2)
11603 && TREE_CODE (arg0) == NE_EXPR
11604 && integer_zerop (TREE_OPERAND (arg0, 1))
11605 && integer_pow2p (arg1)
11606 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11607 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11608 arg1, OEP_ONLY_CONST))
11609 return pedantic_non_lvalue_loc (loc,
11610 fold_convert_loc (loc, type,
11611 TREE_OPERAND (arg0, 0)));
11612
11613 /* Disable the transformations below for vectors, since
11614 fold_binary_op_with_conditional_arg may undo them immediately,
11615 yielding an infinite loop. */
11616 if (code == VEC_COND_EXPR)
11617 return NULL_TREE;
11618
11619 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11620 if (integer_zerop (op2)
11621 && truth_value_p (TREE_CODE (arg0))
11622 && truth_value_p (TREE_CODE (arg1))
11623 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11624 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11625 : TRUTH_ANDIF_EXPR,
11626 type, fold_convert_loc (loc, type, arg0), arg1);
11627
11628 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11629 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11630 && truth_value_p (TREE_CODE (arg0))
11631 && truth_value_p (TREE_CODE (arg1))
11632 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11633 {
11634 location_t loc0 = expr_location_or (arg0, loc);
11635 /* Only perform transformation if ARG0 is easily inverted. */
11636 tem = fold_invert_truthvalue (loc0, arg0);
11637 if (tem)
11638 return fold_build2_loc (loc, code == VEC_COND_EXPR
11639 ? BIT_IOR_EXPR
11640 : TRUTH_ORIF_EXPR,
11641 type, fold_convert_loc (loc, type, tem),
11642 arg1);
11643 }
11644
11645 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11646 if (integer_zerop (arg1)
11647 && truth_value_p (TREE_CODE (arg0))
11648 && truth_value_p (TREE_CODE (op2))
11649 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11650 {
11651 location_t loc0 = expr_location_or (arg0, loc);
11652 /* Only perform transformation if ARG0 is easily inverted. */
11653 tem = fold_invert_truthvalue (loc0, arg0);
11654 if (tem)
11655 return fold_build2_loc (loc, code == VEC_COND_EXPR
11656 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11657 type, fold_convert_loc (loc, type, tem),
11658 op2);
11659 }
11660
11661 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11662 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11663 && truth_value_p (TREE_CODE (arg0))
11664 && truth_value_p (TREE_CODE (op2))
11665 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11666 return fold_build2_loc (loc, code == VEC_COND_EXPR
11667 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11668 type, fold_convert_loc (loc, type, arg0), op2);
11669
11670 return NULL_TREE;
11671
11672 case CALL_EXPR:
11673 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11674 of fold_ternary on them. */
11675 gcc_unreachable ();
11676
11677 case BIT_FIELD_REF:
11678 if ((TREE_CODE (arg0) == VECTOR_CST
11679 || (TREE_CODE (arg0) == CONSTRUCTOR
11680 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11681 && (type == TREE_TYPE (TREE_TYPE (arg0))
11682 || (TREE_CODE (type) == VECTOR_TYPE
11683 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11684 {
11685 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11686 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11687 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11688 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11689
11690 if (n != 0
11691 && (idx % width) == 0
11692 && (n % width) == 0
11693 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11694 {
11695 idx = idx / width;
11696 n = n / width;
11697
11698 if (TREE_CODE (arg0) == VECTOR_CST)
11699 {
11700 if (n == 1)
11701 return VECTOR_CST_ELT (arg0, idx);
11702
11703 tree *vals = XALLOCAVEC (tree, n);
11704 for (unsigned i = 0; i < n; ++i)
11705 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11706 return build_vector (type, vals);
11707 }
11708
11709 /* Constructor elements can be subvectors. */
11710 unsigned HOST_WIDE_INT k = 1;
11711 if (CONSTRUCTOR_NELTS (arg0) != 0)
11712 {
11713 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11714 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11715 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11716 }
11717
11718 /* We keep an exact subset of the constructor elements. */
11719 if ((idx % k) == 0 && (n % k) == 0)
11720 {
11721 if (CONSTRUCTOR_NELTS (arg0) == 0)
11722 return build_constructor (type, NULL);
11723 idx /= k;
11724 n /= k;
11725 if (n == 1)
11726 {
11727 if (idx < CONSTRUCTOR_NELTS (arg0))
11728 return CONSTRUCTOR_ELT (arg0, idx)->value;
11729 return build_zero_cst (type);
11730 }
11731
11732 vec<constructor_elt, va_gc> *vals;
11733 vec_alloc (vals, n);
11734 for (unsigned i = 0;
11735 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11736 ++i)
11737 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11738 CONSTRUCTOR_ELT
11739 (arg0, idx + i)->value);
11740 return build_constructor (type, vals);
11741 }
11742 /* The bitfield references a single constructor element. */
11743 else if (idx + n <= (idx / k + 1) * k)
11744 {
11745 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11746 return build_zero_cst (type);
11747 else if (n == k)
11748 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11749 else
11750 return fold_build3_loc (loc, code, type,
11751 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11752 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11753 }
11754 }
11755 }
11756
11757 /* A bit-field-ref that referenced the full argument can be stripped. */
11758 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11759 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11760 && integer_zerop (op2))
11761 return fold_convert_loc (loc, type, arg0);
11762
11763 /* On constants we can use native encode/interpret to constant
11764 fold (nearly) all BIT_FIELD_REFs. */
11765 if (CONSTANT_CLASS_P (arg0)
11766 && can_native_interpret_type_p (type)
11767 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11768 /* This limitation should not be necessary, we just need to
11769 round this up to mode size. */
11770 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11771 /* Need bit-shifting of the buffer to relax the following. */
11772 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11773 {
11774 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11775 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11776 unsigned HOST_WIDE_INT clen;
11777 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11778 /* ??? We cannot tell native_encode_expr to start at
11779 some random byte only. So limit us to a reasonable amount
11780 of work. */
11781 if (clen <= 4096)
11782 {
11783 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11784 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11785 if (len > 0
11786 && len * BITS_PER_UNIT >= bitpos + bitsize)
11787 {
11788 tree v = native_interpret_expr (type,
11789 b + bitpos / BITS_PER_UNIT,
11790 bitsize / BITS_PER_UNIT);
11791 if (v)
11792 return v;
11793 }
11794 }
11795 }
11796
11797 return NULL_TREE;
11798
11799 case FMA_EXPR:
11800 /* For integers we can decompose the FMA if possible. */
11801 if (TREE_CODE (arg0) == INTEGER_CST
11802 && TREE_CODE (arg1) == INTEGER_CST)
11803 return fold_build2_loc (loc, PLUS_EXPR, type,
11804 const_binop (MULT_EXPR, arg0, arg1), arg2);
11805 if (integer_zerop (arg2))
11806 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11807
11808 return fold_fma (loc, type, arg0, arg1, arg2);
11809
11810 case VEC_PERM_EXPR:
11811 if (TREE_CODE (arg2) == VECTOR_CST)
11812 {
11813 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11814 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11815 unsigned char *sel2 = sel + nelts;
11816 bool need_mask_canon = false;
11817 bool need_mask_canon2 = false;
11818 bool all_in_vec0 = true;
11819 bool all_in_vec1 = true;
11820 bool maybe_identity = true;
11821 bool single_arg = (op0 == op1);
11822 bool changed = false;
11823
11824 mask2 = 2 * nelts - 1;
11825 mask = single_arg ? (nelts - 1) : mask2;
11826 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11827 for (i = 0; i < nelts; i++)
11828 {
11829 tree val = VECTOR_CST_ELT (arg2, i);
11830 if (TREE_CODE (val) != INTEGER_CST)
11831 return NULL_TREE;
11832
11833 /* Make sure that the perm value is in an acceptable
11834 range. */
11835 wide_int t = val;
11836 need_mask_canon |= wi::gtu_p (t, mask);
11837 need_mask_canon2 |= wi::gtu_p (t, mask2);
11838 sel[i] = t.to_uhwi () & mask;
11839 sel2[i] = t.to_uhwi () & mask2;
11840
11841 if (sel[i] < nelts)
11842 all_in_vec1 = false;
11843 else
11844 all_in_vec0 = false;
11845
11846 if ((sel[i] & (nelts-1)) != i)
11847 maybe_identity = false;
11848 }
11849
11850 if (maybe_identity)
11851 {
11852 if (all_in_vec0)
11853 return op0;
11854 if (all_in_vec1)
11855 return op1;
11856 }
11857
11858 if (all_in_vec0)
11859 op1 = op0;
11860 else if (all_in_vec1)
11861 {
11862 op0 = op1;
11863 for (i = 0; i < nelts; i++)
11864 sel[i] -= nelts;
11865 need_mask_canon = true;
11866 }
11867
11868 if ((TREE_CODE (op0) == VECTOR_CST
11869 || TREE_CODE (op0) == CONSTRUCTOR)
11870 && (TREE_CODE (op1) == VECTOR_CST
11871 || TREE_CODE (op1) == CONSTRUCTOR))
11872 {
11873 tree t = fold_vec_perm (type, op0, op1, sel);
11874 if (t != NULL_TREE)
11875 return t;
11876 }
11877
11878 if (op0 == op1 && !single_arg)
11879 changed = true;
11880
11881 /* Some targets are deficient and fail to expand a single
11882 argument permutation while still allowing an equivalent
11883 2-argument version. */
11884 if (need_mask_canon && arg2 == op2
11885 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11886 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11887 {
11888 need_mask_canon = need_mask_canon2;
11889 sel = sel2;
11890 }
11891
11892 if (need_mask_canon && arg2 == op2)
11893 {
11894 tree *tsel = XALLOCAVEC (tree, nelts);
11895 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11896 for (i = 0; i < nelts; i++)
11897 tsel[i] = build_int_cst (eltype, sel[i]);
11898 op2 = build_vector (TREE_TYPE (arg2), tsel);
11899 changed = true;
11900 }
11901
11902 if (changed)
11903 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11904 }
11905 return NULL_TREE;
11906
11907 default:
11908 return NULL_TREE;
11909 } /* switch (code) */
11910 }
11911
11912 /* Perform constant folding and related simplification of EXPR.
11913 The related simplifications include x*1 => x, x*0 => 0, etc.,
11914 and application of the associative law.
11915 NOP_EXPR conversions may be removed freely (as long as we
11916 are careful not to change the type of the overall expression).
11917 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11918 but we can constant-fold them if they have constant operands. */
11919
11920 #ifdef ENABLE_FOLD_CHECKING
11921 # define fold(x) fold_1 (x)
11922 static tree fold_1 (tree);
11923 static
11924 #endif
11925 tree
11926 fold (tree expr)
11927 {
11928 const tree t = expr;
11929 enum tree_code code = TREE_CODE (t);
11930 enum tree_code_class kind = TREE_CODE_CLASS (code);
11931 tree tem;
11932 location_t loc = EXPR_LOCATION (expr);
11933
11934 /* Return right away if a constant. */
11935 if (kind == tcc_constant)
11936 return t;
11937
11938 /* CALL_EXPR-like objects with variable numbers of operands are
11939 treated specially. */
11940 if (kind == tcc_vl_exp)
11941 {
11942 if (code == CALL_EXPR)
11943 {
11944 tem = fold_call_expr (loc, expr, false);
11945 return tem ? tem : expr;
11946 }
11947 return expr;
11948 }
11949
11950 if (IS_EXPR_CODE_CLASS (kind))
11951 {
11952 tree type = TREE_TYPE (t);
11953 tree op0, op1, op2;
11954
11955 switch (TREE_CODE_LENGTH (code))
11956 {
11957 case 1:
11958 op0 = TREE_OPERAND (t, 0);
11959 tem = fold_unary_loc (loc, code, type, op0);
11960 return tem ? tem : expr;
11961 case 2:
11962 op0 = TREE_OPERAND (t, 0);
11963 op1 = TREE_OPERAND (t, 1);
11964 tem = fold_binary_loc (loc, code, type, op0, op1);
11965 return tem ? tem : expr;
11966 case 3:
11967 op0 = TREE_OPERAND (t, 0);
11968 op1 = TREE_OPERAND (t, 1);
11969 op2 = TREE_OPERAND (t, 2);
11970 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11971 return tem ? tem : expr;
11972 default:
11973 break;
11974 }
11975 }
11976
11977 switch (code)
11978 {
11979 case ARRAY_REF:
11980 {
11981 tree op0 = TREE_OPERAND (t, 0);
11982 tree op1 = TREE_OPERAND (t, 1);
11983
11984 if (TREE_CODE (op1) == INTEGER_CST
11985 && TREE_CODE (op0) == CONSTRUCTOR
11986 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11987 {
11988 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
11989 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
11990 unsigned HOST_WIDE_INT begin = 0;
11991
11992 /* Find a matching index by means of a binary search. */
11993 while (begin != end)
11994 {
11995 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
11996 tree index = (*elts)[middle].index;
11997
11998 if (TREE_CODE (index) == INTEGER_CST
11999 && tree_int_cst_lt (index, op1))
12000 begin = middle + 1;
12001 else if (TREE_CODE (index) == INTEGER_CST
12002 && tree_int_cst_lt (op1, index))
12003 end = middle;
12004 else if (TREE_CODE (index) == RANGE_EXPR
12005 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
12006 begin = middle + 1;
12007 else if (TREE_CODE (index) == RANGE_EXPR
12008 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
12009 end = middle;
12010 else
12011 return (*elts)[middle].value;
12012 }
12013 }
12014
12015 return t;
12016 }
12017
12018 /* Return a VECTOR_CST if possible. */
12019 case CONSTRUCTOR:
12020 {
12021 tree type = TREE_TYPE (t);
12022 if (TREE_CODE (type) != VECTOR_TYPE)
12023 return t;
12024
12025 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
12026 unsigned HOST_WIDE_INT idx, pos = 0;
12027 tree value;
12028
12029 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
12030 {
12031 if (!CONSTANT_CLASS_P (value))
12032 return t;
12033 if (TREE_CODE (value) == VECTOR_CST)
12034 {
12035 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
12036 vec[pos++] = VECTOR_CST_ELT (value, i);
12037 }
12038 else
12039 vec[pos++] = value;
12040 }
12041 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
12042 vec[pos] = build_zero_cst (TREE_TYPE (type));
12043
12044 return build_vector (type, vec);
12045 }
12046
12047 case CONST_DECL:
12048 return fold (DECL_INITIAL (t));
12049
12050 default:
12051 return t;
12052 } /* switch (code) */
12053 }
12054
12055 #ifdef ENABLE_FOLD_CHECKING
12056 #undef fold
12057
12058 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12059 hash_table<nofree_ptr_hash<const tree_node> > *);
12060 static void fold_check_failed (const_tree, const_tree);
12061 void print_fold_checksum (const_tree);
12062
12063 /* When --enable-checking=fold, compute a digest of expr before
12064 and after actual fold call to see if fold did not accidentally
12065 change original expr. */
12066
12067 tree
12068 fold (tree expr)
12069 {
12070 tree ret;
12071 struct md5_ctx ctx;
12072 unsigned char checksum_before[16], checksum_after[16];
12073 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12074
12075 md5_init_ctx (&ctx);
12076 fold_checksum_tree (expr, &ctx, &ht);
12077 md5_finish_ctx (&ctx, checksum_before);
12078 ht.empty ();
12079
12080 ret = fold_1 (expr);
12081
12082 md5_init_ctx (&ctx);
12083 fold_checksum_tree (expr, &ctx, &ht);
12084 md5_finish_ctx (&ctx, checksum_after);
12085
12086 if (memcmp (checksum_before, checksum_after, 16))
12087 fold_check_failed (expr, ret);
12088
12089 return ret;
12090 }
12091
12092 void
12093 print_fold_checksum (const_tree expr)
12094 {
12095 struct md5_ctx ctx;
12096 unsigned char checksum[16], cnt;
12097 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12098
12099 md5_init_ctx (&ctx);
12100 fold_checksum_tree (expr, &ctx, &ht);
12101 md5_finish_ctx (&ctx, checksum);
12102 for (cnt = 0; cnt < 16; ++cnt)
12103 fprintf (stderr, "%02x", checksum[cnt]);
12104 putc ('\n', stderr);
12105 }
12106
12107 static void
12108 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12109 {
12110 internal_error ("fold check: original tree changed by fold");
12111 }
12112
12113 static void
12114 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12115 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12116 {
12117 const tree_node **slot;
12118 enum tree_code code;
12119 union tree_node buf;
12120 int i, len;
12121
12122 recursive_label:
12123 if (expr == NULL)
12124 return;
12125 slot = ht->find_slot (expr, INSERT);
12126 if (*slot != NULL)
12127 return;
12128 *slot = expr;
12129 code = TREE_CODE (expr);
12130 if (TREE_CODE_CLASS (code) == tcc_declaration
12131 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12132 {
12133 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12134 memcpy ((char *) &buf, expr, tree_size (expr));
12135 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12136 buf.decl_with_vis.symtab_node = NULL;
12137 expr = (tree) &buf;
12138 }
12139 else if (TREE_CODE_CLASS (code) == tcc_type
12140 && (TYPE_POINTER_TO (expr)
12141 || TYPE_REFERENCE_TO (expr)
12142 || TYPE_CACHED_VALUES_P (expr)
12143 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12144 || TYPE_NEXT_VARIANT (expr)))
12145 {
12146 /* Allow these fields to be modified. */
12147 tree tmp;
12148 memcpy ((char *) &buf, expr, tree_size (expr));
12149 expr = tmp = (tree) &buf;
12150 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12151 TYPE_POINTER_TO (tmp) = NULL;
12152 TYPE_REFERENCE_TO (tmp) = NULL;
12153 TYPE_NEXT_VARIANT (tmp) = NULL;
12154 if (TYPE_CACHED_VALUES_P (tmp))
12155 {
12156 TYPE_CACHED_VALUES_P (tmp) = 0;
12157 TYPE_CACHED_VALUES (tmp) = NULL;
12158 }
12159 }
12160 md5_process_bytes (expr, tree_size (expr), ctx);
12161 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12162 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12163 if (TREE_CODE_CLASS (code) != tcc_type
12164 && TREE_CODE_CLASS (code) != tcc_declaration
12165 && code != TREE_LIST
12166 && code != SSA_NAME
12167 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12168 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12169 switch (TREE_CODE_CLASS (code))
12170 {
12171 case tcc_constant:
12172 switch (code)
12173 {
12174 case STRING_CST:
12175 md5_process_bytes (TREE_STRING_POINTER (expr),
12176 TREE_STRING_LENGTH (expr), ctx);
12177 break;
12178 case COMPLEX_CST:
12179 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12180 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12181 break;
12182 case VECTOR_CST:
12183 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12184 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12185 break;
12186 default:
12187 break;
12188 }
12189 break;
12190 case tcc_exceptional:
12191 switch (code)
12192 {
12193 case TREE_LIST:
12194 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12195 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12196 expr = TREE_CHAIN (expr);
12197 goto recursive_label;
12198 break;
12199 case TREE_VEC:
12200 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12201 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12202 break;
12203 default:
12204 break;
12205 }
12206 break;
12207 case tcc_expression:
12208 case tcc_reference:
12209 case tcc_comparison:
12210 case tcc_unary:
12211 case tcc_binary:
12212 case tcc_statement:
12213 case tcc_vl_exp:
12214 len = TREE_OPERAND_LENGTH (expr);
12215 for (i = 0; i < len; ++i)
12216 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12217 break;
12218 case tcc_declaration:
12219 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12220 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12221 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12222 {
12223 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12224 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12225 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12226 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12227 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12228 }
12229
12230 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12231 {
12232 if (TREE_CODE (expr) == FUNCTION_DECL)
12233 {
12234 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12235 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12236 }
12237 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12238 }
12239 break;
12240 case tcc_type:
12241 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12242 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12243 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12244 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12245 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12246 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12247 if (INTEGRAL_TYPE_P (expr)
12248 || SCALAR_FLOAT_TYPE_P (expr))
12249 {
12250 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12251 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12252 }
12253 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12254 if (TREE_CODE (expr) == RECORD_TYPE
12255 || TREE_CODE (expr) == UNION_TYPE
12256 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12257 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12258 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12259 break;
12260 default:
12261 break;
12262 }
12263 }
12264
12265 /* Helper function for outputting the checksum of a tree T. When
12266 debugging with gdb, you can "define mynext" to be "next" followed
12267 by "call debug_fold_checksum (op0)", then just trace down till the
12268 outputs differ. */
12269
12270 DEBUG_FUNCTION void
12271 debug_fold_checksum (const_tree t)
12272 {
12273 int i;
12274 unsigned char checksum[16];
12275 struct md5_ctx ctx;
12276 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12277
12278 md5_init_ctx (&ctx);
12279 fold_checksum_tree (t, &ctx, &ht);
12280 md5_finish_ctx (&ctx, checksum);
12281 ht.empty ();
12282
12283 for (i = 0; i < 16; i++)
12284 fprintf (stderr, "%d ", checksum[i]);
12285
12286 fprintf (stderr, "\n");
12287 }
12288
12289 #endif
12290
12291 /* Fold a unary tree expression with code CODE of type TYPE with an
12292 operand OP0. LOC is the location of the resulting expression.
12293 Return a folded expression if successful. Otherwise, return a tree
12294 expression with code CODE of type TYPE with an operand OP0. */
12295
12296 tree
12297 fold_build1_stat_loc (location_t loc,
12298 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12299 {
12300 tree tem;
12301 #ifdef ENABLE_FOLD_CHECKING
12302 unsigned char checksum_before[16], checksum_after[16];
12303 struct md5_ctx ctx;
12304 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12305
12306 md5_init_ctx (&ctx);
12307 fold_checksum_tree (op0, &ctx, &ht);
12308 md5_finish_ctx (&ctx, checksum_before);
12309 ht.empty ();
12310 #endif
12311
12312 tem = fold_unary_loc (loc, code, type, op0);
12313 if (!tem)
12314 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12315
12316 #ifdef ENABLE_FOLD_CHECKING
12317 md5_init_ctx (&ctx);
12318 fold_checksum_tree (op0, &ctx, &ht);
12319 md5_finish_ctx (&ctx, checksum_after);
12320
12321 if (memcmp (checksum_before, checksum_after, 16))
12322 fold_check_failed (op0, tem);
12323 #endif
12324 return tem;
12325 }
12326
12327 /* Fold a binary tree expression with code CODE of type TYPE with
12328 operands OP0 and OP1. LOC is the location of the resulting
12329 expression. Return a folded expression if successful. Otherwise,
12330 return a tree expression with code CODE of type TYPE with operands
12331 OP0 and OP1. */
12332
12333 tree
12334 fold_build2_stat_loc (location_t loc,
12335 enum tree_code code, tree type, tree op0, tree op1
12336 MEM_STAT_DECL)
12337 {
12338 tree tem;
12339 #ifdef ENABLE_FOLD_CHECKING
12340 unsigned char checksum_before_op0[16],
12341 checksum_before_op1[16],
12342 checksum_after_op0[16],
12343 checksum_after_op1[16];
12344 struct md5_ctx ctx;
12345 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12346
12347 md5_init_ctx (&ctx);
12348 fold_checksum_tree (op0, &ctx, &ht);
12349 md5_finish_ctx (&ctx, checksum_before_op0);
12350 ht.empty ();
12351
12352 md5_init_ctx (&ctx);
12353 fold_checksum_tree (op1, &ctx, &ht);
12354 md5_finish_ctx (&ctx, checksum_before_op1);
12355 ht.empty ();
12356 #endif
12357
12358 tem = fold_binary_loc (loc, code, type, op0, op1);
12359 if (!tem)
12360 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12361
12362 #ifdef ENABLE_FOLD_CHECKING
12363 md5_init_ctx (&ctx);
12364 fold_checksum_tree (op0, &ctx, &ht);
12365 md5_finish_ctx (&ctx, checksum_after_op0);
12366 ht.empty ();
12367
12368 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12369 fold_check_failed (op0, tem);
12370
12371 md5_init_ctx (&ctx);
12372 fold_checksum_tree (op1, &ctx, &ht);
12373 md5_finish_ctx (&ctx, checksum_after_op1);
12374
12375 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12376 fold_check_failed (op1, tem);
12377 #endif
12378 return tem;
12379 }
12380
12381 /* Fold a ternary tree expression with code CODE of type TYPE with
12382 operands OP0, OP1, and OP2. Return a folded expression if
12383 successful. Otherwise, return a tree expression with code CODE of
12384 type TYPE with operands OP0, OP1, and OP2. */
12385
12386 tree
12387 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12388 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12389 {
12390 tree tem;
12391 #ifdef ENABLE_FOLD_CHECKING
12392 unsigned char checksum_before_op0[16],
12393 checksum_before_op1[16],
12394 checksum_before_op2[16],
12395 checksum_after_op0[16],
12396 checksum_after_op1[16],
12397 checksum_after_op2[16];
12398 struct md5_ctx ctx;
12399 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12400
12401 md5_init_ctx (&ctx);
12402 fold_checksum_tree (op0, &ctx, &ht);
12403 md5_finish_ctx (&ctx, checksum_before_op0);
12404 ht.empty ();
12405
12406 md5_init_ctx (&ctx);
12407 fold_checksum_tree (op1, &ctx, &ht);
12408 md5_finish_ctx (&ctx, checksum_before_op1);
12409 ht.empty ();
12410
12411 md5_init_ctx (&ctx);
12412 fold_checksum_tree (op2, &ctx, &ht);
12413 md5_finish_ctx (&ctx, checksum_before_op2);
12414 ht.empty ();
12415 #endif
12416
12417 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12418 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12419 if (!tem)
12420 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12421
12422 #ifdef ENABLE_FOLD_CHECKING
12423 md5_init_ctx (&ctx);
12424 fold_checksum_tree (op0, &ctx, &ht);
12425 md5_finish_ctx (&ctx, checksum_after_op0);
12426 ht.empty ();
12427
12428 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12429 fold_check_failed (op0, tem);
12430
12431 md5_init_ctx (&ctx);
12432 fold_checksum_tree (op1, &ctx, &ht);
12433 md5_finish_ctx (&ctx, checksum_after_op1);
12434 ht.empty ();
12435
12436 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12437 fold_check_failed (op1, tem);
12438
12439 md5_init_ctx (&ctx);
12440 fold_checksum_tree (op2, &ctx, &ht);
12441 md5_finish_ctx (&ctx, checksum_after_op2);
12442
12443 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12444 fold_check_failed (op2, tem);
12445 #endif
12446 return tem;
12447 }
12448
12449 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12450 arguments in ARGARRAY, and a null static chain.
12451 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12452 of type TYPE from the given operands as constructed by build_call_array. */
12453
12454 tree
12455 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12456 int nargs, tree *argarray)
12457 {
12458 tree tem;
12459 #ifdef ENABLE_FOLD_CHECKING
12460 unsigned char checksum_before_fn[16],
12461 checksum_before_arglist[16],
12462 checksum_after_fn[16],
12463 checksum_after_arglist[16];
12464 struct md5_ctx ctx;
12465 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12466 int i;
12467
12468 md5_init_ctx (&ctx);
12469 fold_checksum_tree (fn, &ctx, &ht);
12470 md5_finish_ctx (&ctx, checksum_before_fn);
12471 ht.empty ();
12472
12473 md5_init_ctx (&ctx);
12474 for (i = 0; i < nargs; i++)
12475 fold_checksum_tree (argarray[i], &ctx, &ht);
12476 md5_finish_ctx (&ctx, checksum_before_arglist);
12477 ht.empty ();
12478 #endif
12479
12480 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12481 if (!tem)
12482 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12483
12484 #ifdef ENABLE_FOLD_CHECKING
12485 md5_init_ctx (&ctx);
12486 fold_checksum_tree (fn, &ctx, &ht);
12487 md5_finish_ctx (&ctx, checksum_after_fn);
12488 ht.empty ();
12489
12490 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12491 fold_check_failed (fn, tem);
12492
12493 md5_init_ctx (&ctx);
12494 for (i = 0; i < nargs; i++)
12495 fold_checksum_tree (argarray[i], &ctx, &ht);
12496 md5_finish_ctx (&ctx, checksum_after_arglist);
12497
12498 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12499 fold_check_failed (NULL_TREE, tem);
12500 #endif
12501 return tem;
12502 }
12503
12504 /* Perform constant folding and related simplification of initializer
12505 expression EXPR. These behave identically to "fold_buildN" but ignore
12506 potential run-time traps and exceptions that fold must preserve. */
12507
12508 #define START_FOLD_INIT \
12509 int saved_signaling_nans = flag_signaling_nans;\
12510 int saved_trapping_math = flag_trapping_math;\
12511 int saved_rounding_math = flag_rounding_math;\
12512 int saved_trapv = flag_trapv;\
12513 int saved_folding_initializer = folding_initializer;\
12514 flag_signaling_nans = 0;\
12515 flag_trapping_math = 0;\
12516 flag_rounding_math = 0;\
12517 flag_trapv = 0;\
12518 folding_initializer = 1;
12519
12520 #define END_FOLD_INIT \
12521 flag_signaling_nans = saved_signaling_nans;\
12522 flag_trapping_math = saved_trapping_math;\
12523 flag_rounding_math = saved_rounding_math;\
12524 flag_trapv = saved_trapv;\
12525 folding_initializer = saved_folding_initializer;
12526
12527 tree
12528 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12529 tree type, tree op)
12530 {
12531 tree result;
12532 START_FOLD_INIT;
12533
12534 result = fold_build1_loc (loc, code, type, op);
12535
12536 END_FOLD_INIT;
12537 return result;
12538 }
12539
12540 tree
12541 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12542 tree type, tree op0, tree op1)
12543 {
12544 tree result;
12545 START_FOLD_INIT;
12546
12547 result = fold_build2_loc (loc, code, type, op0, op1);
12548
12549 END_FOLD_INIT;
12550 return result;
12551 }
12552
12553 tree
12554 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12555 int nargs, tree *argarray)
12556 {
12557 tree result;
12558 START_FOLD_INIT;
12559
12560 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12561
12562 END_FOLD_INIT;
12563 return result;
12564 }
12565
12566 #undef START_FOLD_INIT
12567 #undef END_FOLD_INIT
12568
12569 /* Determine if first argument is a multiple of second argument. Return 0 if
12570 it is not, or we cannot easily determined it to be.
12571
12572 An example of the sort of thing we care about (at this point; this routine
12573 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12574 fold cases do now) is discovering that
12575
12576 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12577
12578 is a multiple of
12579
12580 SAVE_EXPR (J * 8)
12581
12582 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12583
12584 This code also handles discovering that
12585
12586 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12587
12588 is a multiple of 8 so we don't have to worry about dealing with a
12589 possible remainder.
12590
12591 Note that we *look* inside a SAVE_EXPR only to determine how it was
12592 calculated; it is not safe for fold to do much of anything else with the
12593 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12594 at run time. For example, the latter example above *cannot* be implemented
12595 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12596 evaluation time of the original SAVE_EXPR is not necessarily the same at
12597 the time the new expression is evaluated. The only optimization of this
12598 sort that would be valid is changing
12599
12600 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12601
12602 divided by 8 to
12603
12604 SAVE_EXPR (I) * SAVE_EXPR (J)
12605
12606 (where the same SAVE_EXPR (J) is used in the original and the
12607 transformed version). */
12608
12609 int
12610 multiple_of_p (tree type, const_tree top, const_tree bottom)
12611 {
12612 if (operand_equal_p (top, bottom, 0))
12613 return 1;
12614
12615 if (TREE_CODE (type) != INTEGER_TYPE)
12616 return 0;
12617
12618 switch (TREE_CODE (top))
12619 {
12620 case BIT_AND_EXPR:
12621 /* Bitwise and provides a power of two multiple. If the mask is
12622 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12623 if (!integer_pow2p (bottom))
12624 return 0;
12625 /* FALLTHRU */
12626
12627 case MULT_EXPR:
12628 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12629 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12630
12631 case PLUS_EXPR:
12632 case MINUS_EXPR:
12633 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12634 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12635
12636 case LSHIFT_EXPR:
12637 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12638 {
12639 tree op1, t1;
12640
12641 op1 = TREE_OPERAND (top, 1);
12642 /* const_binop may not detect overflow correctly,
12643 so check for it explicitly here. */
12644 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12645 && 0 != (t1 = fold_convert (type,
12646 const_binop (LSHIFT_EXPR,
12647 size_one_node,
12648 op1)))
12649 && !TREE_OVERFLOW (t1))
12650 return multiple_of_p (type, t1, bottom);
12651 }
12652 return 0;
12653
12654 case NOP_EXPR:
12655 /* Can't handle conversions from non-integral or wider integral type. */
12656 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12657 || (TYPE_PRECISION (type)
12658 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12659 return 0;
12660
12661 /* .. fall through ... */
12662
12663 case SAVE_EXPR:
12664 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12665
12666 case COND_EXPR:
12667 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12668 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12669
12670 case INTEGER_CST:
12671 if (TREE_CODE (bottom) != INTEGER_CST
12672 || integer_zerop (bottom)
12673 || (TYPE_UNSIGNED (type)
12674 && (tree_int_cst_sgn (top) < 0
12675 || tree_int_cst_sgn (bottom) < 0)))
12676 return 0;
12677 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12678 SIGNED);
12679
12680 default:
12681 return 0;
12682 }
12683 }
12684
12685 #define tree_expr_nonnegative_warnv_p(X, Y) \
12686 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12687
12688 #define RECURSE(X) \
12689 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12690
12691 /* Return true if CODE or TYPE is known to be non-negative. */
12692
12693 static bool
12694 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12695 {
12696 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12697 && truth_value_p (code))
12698 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12699 have a signed:1 type (where the value is -1 and 0). */
12700 return true;
12701 return false;
12702 }
12703
12704 /* Return true if (CODE OP0) is known to be non-negative. If the return
12705 value is based on the assumption that signed overflow is undefined,
12706 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12707 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12708
12709 bool
12710 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12711 bool *strict_overflow_p, int depth)
12712 {
12713 if (TYPE_UNSIGNED (type))
12714 return true;
12715
12716 switch (code)
12717 {
12718 case ABS_EXPR:
12719 /* We can't return 1 if flag_wrapv is set because
12720 ABS_EXPR<INT_MIN> = INT_MIN. */
12721 if (!ANY_INTEGRAL_TYPE_P (type))
12722 return true;
12723 if (TYPE_OVERFLOW_UNDEFINED (type))
12724 {
12725 *strict_overflow_p = true;
12726 return true;
12727 }
12728 break;
12729
12730 case NON_LVALUE_EXPR:
12731 case FLOAT_EXPR:
12732 case FIX_TRUNC_EXPR:
12733 return RECURSE (op0);
12734
12735 CASE_CONVERT:
12736 {
12737 tree inner_type = TREE_TYPE (op0);
12738 tree outer_type = type;
12739
12740 if (TREE_CODE (outer_type) == REAL_TYPE)
12741 {
12742 if (TREE_CODE (inner_type) == REAL_TYPE)
12743 return RECURSE (op0);
12744 if (INTEGRAL_TYPE_P (inner_type))
12745 {
12746 if (TYPE_UNSIGNED (inner_type))
12747 return true;
12748 return RECURSE (op0);
12749 }
12750 }
12751 else if (INTEGRAL_TYPE_P (outer_type))
12752 {
12753 if (TREE_CODE (inner_type) == REAL_TYPE)
12754 return RECURSE (op0);
12755 if (INTEGRAL_TYPE_P (inner_type))
12756 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12757 && TYPE_UNSIGNED (inner_type);
12758 }
12759 }
12760 break;
12761
12762 default:
12763 return tree_simple_nonnegative_warnv_p (code, type);
12764 }
12765
12766 /* We don't know sign of `t', so be conservative and return false. */
12767 return false;
12768 }
12769
12770 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12771 value is based on the assumption that signed overflow is undefined,
12772 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12773 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12774
12775 bool
12776 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12777 tree op1, bool *strict_overflow_p,
12778 int depth)
12779 {
12780 if (TYPE_UNSIGNED (type))
12781 return true;
12782
12783 switch (code)
12784 {
12785 case POINTER_PLUS_EXPR:
12786 case PLUS_EXPR:
12787 if (FLOAT_TYPE_P (type))
12788 return RECURSE (op0) && RECURSE (op1);
12789
12790 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12791 both unsigned and at least 2 bits shorter than the result. */
12792 if (TREE_CODE (type) == INTEGER_TYPE
12793 && TREE_CODE (op0) == NOP_EXPR
12794 && TREE_CODE (op1) == NOP_EXPR)
12795 {
12796 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12797 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12798 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12799 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12800 {
12801 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12802 TYPE_PRECISION (inner2)) + 1;
12803 return prec < TYPE_PRECISION (type);
12804 }
12805 }
12806 break;
12807
12808 case MULT_EXPR:
12809 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12810 {
12811 /* x * x is always non-negative for floating point x
12812 or without overflow. */
12813 if (operand_equal_p (op0, op1, 0)
12814 || (RECURSE (op0) && RECURSE (op1)))
12815 {
12816 if (ANY_INTEGRAL_TYPE_P (type)
12817 && TYPE_OVERFLOW_UNDEFINED (type))
12818 *strict_overflow_p = true;
12819 return true;
12820 }
12821 }
12822
12823 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12824 both unsigned and their total bits is shorter than the result. */
12825 if (TREE_CODE (type) == INTEGER_TYPE
12826 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12827 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12828 {
12829 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12830 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12831 : TREE_TYPE (op0);
12832 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12833 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12834 : TREE_TYPE (op1);
12835
12836 bool unsigned0 = TYPE_UNSIGNED (inner0);
12837 bool unsigned1 = TYPE_UNSIGNED (inner1);
12838
12839 if (TREE_CODE (op0) == INTEGER_CST)
12840 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12841
12842 if (TREE_CODE (op1) == INTEGER_CST)
12843 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12844
12845 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12846 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12847 {
12848 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12849 ? tree_int_cst_min_precision (op0, UNSIGNED)
12850 : TYPE_PRECISION (inner0);
12851
12852 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12853 ? tree_int_cst_min_precision (op1, UNSIGNED)
12854 : TYPE_PRECISION (inner1);
12855
12856 return precision0 + precision1 < TYPE_PRECISION (type);
12857 }
12858 }
12859 return false;
12860
12861 case BIT_AND_EXPR:
12862 case MAX_EXPR:
12863 return RECURSE (op0) || RECURSE (op1);
12864
12865 case BIT_IOR_EXPR:
12866 case BIT_XOR_EXPR:
12867 case MIN_EXPR:
12868 case RDIV_EXPR:
12869 case TRUNC_DIV_EXPR:
12870 case CEIL_DIV_EXPR:
12871 case FLOOR_DIV_EXPR:
12872 case ROUND_DIV_EXPR:
12873 return RECURSE (op0) && RECURSE (op1);
12874
12875 case TRUNC_MOD_EXPR:
12876 return RECURSE (op0);
12877
12878 case FLOOR_MOD_EXPR:
12879 return RECURSE (op1);
12880
12881 case CEIL_MOD_EXPR:
12882 case ROUND_MOD_EXPR:
12883 default:
12884 return tree_simple_nonnegative_warnv_p (code, type);
12885 }
12886
12887 /* We don't know sign of `t', so be conservative and return false. */
12888 return false;
12889 }
12890
12891 /* Return true if T is known to be non-negative. If the return
12892 value is based on the assumption that signed overflow is undefined,
12893 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12894 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12895
12896 bool
12897 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12898 {
12899 if (TREE_CODE (t) == SSA_NAME
12900 && name_registered_for_update_p (t))
12901 return false;
12902
12903 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12904 return true;
12905
12906 switch (TREE_CODE (t))
12907 {
12908 case INTEGER_CST:
12909 return tree_int_cst_sgn (t) >= 0;
12910
12911 case REAL_CST:
12912 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12913
12914 case FIXED_CST:
12915 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12916
12917 case COND_EXPR:
12918 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12919
12920 case SSA_NAME:
12921 /* Limit the depth of recursion to avoid quadratic behavior.
12922 This is expected to catch almost all occurrences in practice.
12923 If this code misses important cases that unbounded recursion
12924 would not, passes that need this information could be revised
12925 to provide it through dataflow propagation. */
12926 if (depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH))
12927 return gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12928 strict_overflow_p, depth);
12929
12930 /* Fallthru. */
12931 default:
12932 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12933 }
12934 }
12935
12936 /* Return true if T is known to be non-negative. If the return
12937 value is based on the assumption that signed overflow is undefined,
12938 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12939 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12940
12941 bool
12942 tree_call_nonnegative_warnv_p (tree type, tree fndecl, tree arg0, tree arg1,
12943 bool *strict_overflow_p, int depth)
12944 {
12945 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12946 switch (DECL_FUNCTION_CODE (fndecl))
12947 {
12948 CASE_FLT_FN (BUILT_IN_ACOS):
12949 CASE_FLT_FN (BUILT_IN_ACOSH):
12950 CASE_FLT_FN (BUILT_IN_CABS):
12951 CASE_FLT_FN (BUILT_IN_COSH):
12952 CASE_FLT_FN (BUILT_IN_ERFC):
12953 CASE_FLT_FN (BUILT_IN_EXP):
12954 CASE_FLT_FN (BUILT_IN_EXP10):
12955 CASE_FLT_FN (BUILT_IN_EXP2):
12956 CASE_FLT_FN (BUILT_IN_FABS):
12957 CASE_FLT_FN (BUILT_IN_FDIM):
12958 CASE_FLT_FN (BUILT_IN_HYPOT):
12959 CASE_FLT_FN (BUILT_IN_POW10):
12960 CASE_INT_FN (BUILT_IN_FFS):
12961 CASE_INT_FN (BUILT_IN_PARITY):
12962 CASE_INT_FN (BUILT_IN_POPCOUNT):
12963 CASE_INT_FN (BUILT_IN_CLZ):
12964 CASE_INT_FN (BUILT_IN_CLRSB):
12965 case BUILT_IN_BSWAP32:
12966 case BUILT_IN_BSWAP64:
12967 /* Always true. */
12968 return true;
12969
12970 CASE_FLT_FN (BUILT_IN_SQRT):
12971 /* sqrt(-0.0) is -0.0. */
12972 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12973 return true;
12974 return RECURSE (arg0);
12975
12976 CASE_FLT_FN (BUILT_IN_ASINH):
12977 CASE_FLT_FN (BUILT_IN_ATAN):
12978 CASE_FLT_FN (BUILT_IN_ATANH):
12979 CASE_FLT_FN (BUILT_IN_CBRT):
12980 CASE_FLT_FN (BUILT_IN_CEIL):
12981 CASE_FLT_FN (BUILT_IN_ERF):
12982 CASE_FLT_FN (BUILT_IN_EXPM1):
12983 CASE_FLT_FN (BUILT_IN_FLOOR):
12984 CASE_FLT_FN (BUILT_IN_FMOD):
12985 CASE_FLT_FN (BUILT_IN_FREXP):
12986 CASE_FLT_FN (BUILT_IN_ICEIL):
12987 CASE_FLT_FN (BUILT_IN_IFLOOR):
12988 CASE_FLT_FN (BUILT_IN_IRINT):
12989 CASE_FLT_FN (BUILT_IN_IROUND):
12990 CASE_FLT_FN (BUILT_IN_LCEIL):
12991 CASE_FLT_FN (BUILT_IN_LDEXP):
12992 CASE_FLT_FN (BUILT_IN_LFLOOR):
12993 CASE_FLT_FN (BUILT_IN_LLCEIL):
12994 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12995 CASE_FLT_FN (BUILT_IN_LLRINT):
12996 CASE_FLT_FN (BUILT_IN_LLROUND):
12997 CASE_FLT_FN (BUILT_IN_LRINT):
12998 CASE_FLT_FN (BUILT_IN_LROUND):
12999 CASE_FLT_FN (BUILT_IN_MODF):
13000 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13001 CASE_FLT_FN (BUILT_IN_RINT):
13002 CASE_FLT_FN (BUILT_IN_ROUND):
13003 CASE_FLT_FN (BUILT_IN_SCALB):
13004 CASE_FLT_FN (BUILT_IN_SCALBLN):
13005 CASE_FLT_FN (BUILT_IN_SCALBN):
13006 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13007 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13008 CASE_FLT_FN (BUILT_IN_SINH):
13009 CASE_FLT_FN (BUILT_IN_TANH):
13010 CASE_FLT_FN (BUILT_IN_TRUNC):
13011 /* True if the 1st argument is nonnegative. */
13012 return RECURSE (arg0);
13013
13014 CASE_FLT_FN (BUILT_IN_FMAX):
13015 /* True if the 1st OR 2nd arguments are nonnegative. */
13016 return RECURSE (arg0) || RECURSE (arg1);
13017
13018 CASE_FLT_FN (BUILT_IN_FMIN):
13019 /* True if the 1st AND 2nd arguments are nonnegative. */
13020 return RECURSE (arg0) && RECURSE (arg1);
13021
13022 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13023 /* True if the 2nd argument is nonnegative. */
13024 return RECURSE (arg1);
13025
13026 CASE_FLT_FN (BUILT_IN_POWI):
13027 /* True if the 1st argument is nonnegative or the second
13028 argument is an even integer. */
13029 if (TREE_CODE (arg1) == INTEGER_CST
13030 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13031 return true;
13032 return RECURSE (arg0);
13033
13034 CASE_FLT_FN (BUILT_IN_POW):
13035 /* True if the 1st argument is nonnegative or the second
13036 argument is an even integer valued real. */
13037 if (TREE_CODE (arg1) == REAL_CST)
13038 {
13039 REAL_VALUE_TYPE c;
13040 HOST_WIDE_INT n;
13041
13042 c = TREE_REAL_CST (arg1);
13043 n = real_to_integer (&c);
13044 if ((n & 1) == 0)
13045 {
13046 REAL_VALUE_TYPE cint;
13047 real_from_integer (&cint, VOIDmode, n, SIGNED);
13048 if (real_identical (&c, &cint))
13049 return true;
13050 }
13051 }
13052 return RECURSE (arg0);
13053
13054 default:
13055 break;
13056 }
13057 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13058 }
13059
13060 /* Return true if T is known to be non-negative. If the return
13061 value is based on the assumption that signed overflow is undefined,
13062 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13063 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13064
13065 static bool
13066 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13067 {
13068 enum tree_code code = TREE_CODE (t);
13069 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13070 return true;
13071
13072 switch (code)
13073 {
13074 case TARGET_EXPR:
13075 {
13076 tree temp = TARGET_EXPR_SLOT (t);
13077 t = TARGET_EXPR_INITIAL (t);
13078
13079 /* If the initializer is non-void, then it's a normal expression
13080 that will be assigned to the slot. */
13081 if (!VOID_TYPE_P (t))
13082 return RECURSE (t);
13083
13084 /* Otherwise, the initializer sets the slot in some way. One common
13085 way is an assignment statement at the end of the initializer. */
13086 while (1)
13087 {
13088 if (TREE_CODE (t) == BIND_EXPR)
13089 t = expr_last (BIND_EXPR_BODY (t));
13090 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13091 || TREE_CODE (t) == TRY_CATCH_EXPR)
13092 t = expr_last (TREE_OPERAND (t, 0));
13093 else if (TREE_CODE (t) == STATEMENT_LIST)
13094 t = expr_last (t);
13095 else
13096 break;
13097 }
13098 if (TREE_CODE (t) == MODIFY_EXPR
13099 && TREE_OPERAND (t, 0) == temp)
13100 return RECURSE (TREE_OPERAND (t, 1));
13101
13102 return false;
13103 }
13104
13105 case CALL_EXPR:
13106 {
13107 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13108 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13109
13110 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13111 get_callee_fndecl (t),
13112 arg0,
13113 arg1,
13114 strict_overflow_p, depth);
13115 }
13116 case COMPOUND_EXPR:
13117 case MODIFY_EXPR:
13118 return RECURSE (TREE_OPERAND (t, 1));
13119
13120 case BIND_EXPR:
13121 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13122
13123 case SAVE_EXPR:
13124 return RECURSE (TREE_OPERAND (t, 0));
13125
13126 default:
13127 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13128 }
13129 }
13130
13131 #undef RECURSE
13132 #undef tree_expr_nonnegative_warnv_p
13133
13134 /* Return true if T is known to be non-negative. If the return
13135 value is based on the assumption that signed overflow is undefined,
13136 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13137 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13138
13139 bool
13140 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13141 {
13142 enum tree_code code;
13143 if (t == error_mark_node)
13144 return false;
13145
13146 code = TREE_CODE (t);
13147 switch (TREE_CODE_CLASS (code))
13148 {
13149 case tcc_binary:
13150 case tcc_comparison:
13151 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13152 TREE_TYPE (t),
13153 TREE_OPERAND (t, 0),
13154 TREE_OPERAND (t, 1),
13155 strict_overflow_p, depth);
13156
13157 case tcc_unary:
13158 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13159 TREE_TYPE (t),
13160 TREE_OPERAND (t, 0),
13161 strict_overflow_p, depth);
13162
13163 case tcc_constant:
13164 case tcc_declaration:
13165 case tcc_reference:
13166 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13167
13168 default:
13169 break;
13170 }
13171
13172 switch (code)
13173 {
13174 case TRUTH_AND_EXPR:
13175 case TRUTH_OR_EXPR:
13176 case TRUTH_XOR_EXPR:
13177 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13178 TREE_TYPE (t),
13179 TREE_OPERAND (t, 0),
13180 TREE_OPERAND (t, 1),
13181 strict_overflow_p, depth);
13182 case TRUTH_NOT_EXPR:
13183 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13184 TREE_TYPE (t),
13185 TREE_OPERAND (t, 0),
13186 strict_overflow_p, depth);
13187
13188 case COND_EXPR:
13189 case CONSTRUCTOR:
13190 case OBJ_TYPE_REF:
13191 case ASSERT_EXPR:
13192 case ADDR_EXPR:
13193 case WITH_SIZE_EXPR:
13194 case SSA_NAME:
13195 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13196
13197 default:
13198 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13199 }
13200 }
13201
13202 /* Return true if `t' is known to be non-negative. Handle warnings
13203 about undefined signed overflow. */
13204
13205 bool
13206 tree_expr_nonnegative_p (tree t)
13207 {
13208 bool ret, strict_overflow_p;
13209
13210 strict_overflow_p = false;
13211 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13212 if (strict_overflow_p)
13213 fold_overflow_warning (("assuming signed overflow does not occur when "
13214 "determining that expression is always "
13215 "non-negative"),
13216 WARN_STRICT_OVERFLOW_MISC);
13217 return ret;
13218 }
13219
13220
13221 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13222 For floating point we further ensure that T is not denormal.
13223 Similar logic is present in nonzero_address in rtlanal.h.
13224
13225 If the return value is based on the assumption that signed overflow
13226 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13227 change *STRICT_OVERFLOW_P. */
13228
13229 bool
13230 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13231 bool *strict_overflow_p)
13232 {
13233 switch (code)
13234 {
13235 case ABS_EXPR:
13236 return tree_expr_nonzero_warnv_p (op0,
13237 strict_overflow_p);
13238
13239 case NOP_EXPR:
13240 {
13241 tree inner_type = TREE_TYPE (op0);
13242 tree outer_type = type;
13243
13244 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13245 && tree_expr_nonzero_warnv_p (op0,
13246 strict_overflow_p));
13247 }
13248 break;
13249
13250 case NON_LVALUE_EXPR:
13251 return tree_expr_nonzero_warnv_p (op0,
13252 strict_overflow_p);
13253
13254 default:
13255 break;
13256 }
13257
13258 return false;
13259 }
13260
13261 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13262 For floating point we further ensure that T is not denormal.
13263 Similar logic is present in nonzero_address in rtlanal.h.
13264
13265 If the return value is based on the assumption that signed overflow
13266 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13267 change *STRICT_OVERFLOW_P. */
13268
13269 bool
13270 tree_binary_nonzero_warnv_p (enum tree_code code,
13271 tree type,
13272 tree op0,
13273 tree op1, bool *strict_overflow_p)
13274 {
13275 bool sub_strict_overflow_p;
13276 switch (code)
13277 {
13278 case POINTER_PLUS_EXPR:
13279 case PLUS_EXPR:
13280 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13281 {
13282 /* With the presence of negative values it is hard
13283 to say something. */
13284 sub_strict_overflow_p = false;
13285 if (!tree_expr_nonnegative_warnv_p (op0,
13286 &sub_strict_overflow_p)
13287 || !tree_expr_nonnegative_warnv_p (op1,
13288 &sub_strict_overflow_p))
13289 return false;
13290 /* One of operands must be positive and the other non-negative. */
13291 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13292 overflows, on a twos-complement machine the sum of two
13293 nonnegative numbers can never be zero. */
13294 return (tree_expr_nonzero_warnv_p (op0,
13295 strict_overflow_p)
13296 || tree_expr_nonzero_warnv_p (op1,
13297 strict_overflow_p));
13298 }
13299 break;
13300
13301 case MULT_EXPR:
13302 if (TYPE_OVERFLOW_UNDEFINED (type))
13303 {
13304 if (tree_expr_nonzero_warnv_p (op0,
13305 strict_overflow_p)
13306 && tree_expr_nonzero_warnv_p (op1,
13307 strict_overflow_p))
13308 {
13309 *strict_overflow_p = true;
13310 return true;
13311 }
13312 }
13313 break;
13314
13315 case MIN_EXPR:
13316 sub_strict_overflow_p = false;
13317 if (tree_expr_nonzero_warnv_p (op0,
13318 &sub_strict_overflow_p)
13319 && tree_expr_nonzero_warnv_p (op1,
13320 &sub_strict_overflow_p))
13321 {
13322 if (sub_strict_overflow_p)
13323 *strict_overflow_p = true;
13324 }
13325 break;
13326
13327 case MAX_EXPR:
13328 sub_strict_overflow_p = false;
13329 if (tree_expr_nonzero_warnv_p (op0,
13330 &sub_strict_overflow_p))
13331 {
13332 if (sub_strict_overflow_p)
13333 *strict_overflow_p = true;
13334
13335 /* When both operands are nonzero, then MAX must be too. */
13336 if (tree_expr_nonzero_warnv_p (op1,
13337 strict_overflow_p))
13338 return true;
13339
13340 /* MAX where operand 0 is positive is positive. */
13341 return tree_expr_nonnegative_warnv_p (op0,
13342 strict_overflow_p);
13343 }
13344 /* MAX where operand 1 is positive is positive. */
13345 else if (tree_expr_nonzero_warnv_p (op1,
13346 &sub_strict_overflow_p)
13347 && tree_expr_nonnegative_warnv_p (op1,
13348 &sub_strict_overflow_p))
13349 {
13350 if (sub_strict_overflow_p)
13351 *strict_overflow_p = true;
13352 return true;
13353 }
13354 break;
13355
13356 case BIT_IOR_EXPR:
13357 return (tree_expr_nonzero_warnv_p (op1,
13358 strict_overflow_p)
13359 || tree_expr_nonzero_warnv_p (op0,
13360 strict_overflow_p));
13361
13362 default:
13363 break;
13364 }
13365
13366 return false;
13367 }
13368
13369 /* Return true when T is an address and is known to be nonzero.
13370 For floating point we further ensure that T is not denormal.
13371 Similar logic is present in nonzero_address in rtlanal.h.
13372
13373 If the return value is based on the assumption that signed overflow
13374 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13375 change *STRICT_OVERFLOW_P. */
13376
13377 bool
13378 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13379 {
13380 bool sub_strict_overflow_p;
13381 switch (TREE_CODE (t))
13382 {
13383 case INTEGER_CST:
13384 return !integer_zerop (t);
13385
13386 case ADDR_EXPR:
13387 {
13388 tree base = TREE_OPERAND (t, 0);
13389
13390 if (!DECL_P (base))
13391 base = get_base_address (base);
13392
13393 if (!base)
13394 return false;
13395
13396 /* For objects in symbol table check if we know they are non-zero.
13397 Don't do anything for variables and functions before symtab is built;
13398 it is quite possible that they will be declared weak later. */
13399 if (DECL_P (base) && decl_in_symtab_p (base))
13400 {
13401 struct symtab_node *symbol;
13402
13403 symbol = symtab_node::get_create (base);
13404 if (symbol)
13405 return symbol->nonzero_address ();
13406 else
13407 return false;
13408 }
13409
13410 /* Function local objects are never NULL. */
13411 if (DECL_P (base)
13412 && (DECL_CONTEXT (base)
13413 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13414 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13415 return true;
13416
13417 /* Constants are never weak. */
13418 if (CONSTANT_CLASS_P (base))
13419 return true;
13420
13421 return false;
13422 }
13423
13424 case COND_EXPR:
13425 sub_strict_overflow_p = false;
13426 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13427 &sub_strict_overflow_p)
13428 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13429 &sub_strict_overflow_p))
13430 {
13431 if (sub_strict_overflow_p)
13432 *strict_overflow_p = true;
13433 return true;
13434 }
13435 break;
13436
13437 default:
13438 break;
13439 }
13440 return false;
13441 }
13442
13443 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13444 attempt to fold the expression to a constant without modifying TYPE,
13445 OP0 or OP1.
13446
13447 If the expression could be simplified to a constant, then return
13448 the constant. If the expression would not be simplified to a
13449 constant, then return NULL_TREE. */
13450
13451 tree
13452 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13453 {
13454 tree tem = fold_binary (code, type, op0, op1);
13455 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13456 }
13457
13458 /* Given the components of a unary expression CODE, TYPE and OP0,
13459 attempt to fold the expression to a constant without modifying
13460 TYPE or OP0.
13461
13462 If the expression could be simplified to a constant, then return
13463 the constant. If the expression would not be simplified to a
13464 constant, then return NULL_TREE. */
13465
13466 tree
13467 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13468 {
13469 tree tem = fold_unary (code, type, op0);
13470 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13471 }
13472
13473 /* If EXP represents referencing an element in a constant string
13474 (either via pointer arithmetic or array indexing), return the
13475 tree representing the value accessed, otherwise return NULL. */
13476
13477 tree
13478 fold_read_from_constant_string (tree exp)
13479 {
13480 if ((TREE_CODE (exp) == INDIRECT_REF
13481 || TREE_CODE (exp) == ARRAY_REF)
13482 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13483 {
13484 tree exp1 = TREE_OPERAND (exp, 0);
13485 tree index;
13486 tree string;
13487 location_t loc = EXPR_LOCATION (exp);
13488
13489 if (TREE_CODE (exp) == INDIRECT_REF)
13490 string = string_constant (exp1, &index);
13491 else
13492 {
13493 tree low_bound = array_ref_low_bound (exp);
13494 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13495
13496 /* Optimize the special-case of a zero lower bound.
13497
13498 We convert the low_bound to sizetype to avoid some problems
13499 with constant folding. (E.g. suppose the lower bound is 1,
13500 and its mode is QI. Without the conversion,l (ARRAY
13501 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13502 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13503 if (! integer_zerop (low_bound))
13504 index = size_diffop_loc (loc, index,
13505 fold_convert_loc (loc, sizetype, low_bound));
13506
13507 string = exp1;
13508 }
13509
13510 if (string
13511 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13512 && TREE_CODE (string) == STRING_CST
13513 && TREE_CODE (index) == INTEGER_CST
13514 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13515 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13516 == MODE_INT)
13517 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13518 return build_int_cst_type (TREE_TYPE (exp),
13519 (TREE_STRING_POINTER (string)
13520 [TREE_INT_CST_LOW (index)]));
13521 }
13522 return NULL;
13523 }
13524
13525 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13526 an integer constant, real, or fixed-point constant.
13527
13528 TYPE is the type of the result. */
13529
13530 static tree
13531 fold_negate_const (tree arg0, tree type)
13532 {
13533 tree t = NULL_TREE;
13534
13535 switch (TREE_CODE (arg0))
13536 {
13537 case INTEGER_CST:
13538 {
13539 bool overflow;
13540 wide_int val = wi::neg (arg0, &overflow);
13541 t = force_fit_type (type, val, 1,
13542 (overflow | TREE_OVERFLOW (arg0))
13543 && !TYPE_UNSIGNED (type));
13544 break;
13545 }
13546
13547 case REAL_CST:
13548 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13549 break;
13550
13551 case FIXED_CST:
13552 {
13553 FIXED_VALUE_TYPE f;
13554 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13555 &(TREE_FIXED_CST (arg0)), NULL,
13556 TYPE_SATURATING (type));
13557 t = build_fixed (type, f);
13558 /* Propagate overflow flags. */
13559 if (overflow_p | TREE_OVERFLOW (arg0))
13560 TREE_OVERFLOW (t) = 1;
13561 break;
13562 }
13563
13564 default:
13565 gcc_unreachable ();
13566 }
13567
13568 return t;
13569 }
13570
13571 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13572 an integer constant or real constant.
13573
13574 TYPE is the type of the result. */
13575
13576 tree
13577 fold_abs_const (tree arg0, tree type)
13578 {
13579 tree t = NULL_TREE;
13580
13581 switch (TREE_CODE (arg0))
13582 {
13583 case INTEGER_CST:
13584 {
13585 /* If the value is unsigned or non-negative, then the absolute value
13586 is the same as the ordinary value. */
13587 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13588 t = arg0;
13589
13590 /* If the value is negative, then the absolute value is
13591 its negation. */
13592 else
13593 {
13594 bool overflow;
13595 wide_int val = wi::neg (arg0, &overflow);
13596 t = force_fit_type (type, val, -1,
13597 overflow | TREE_OVERFLOW (arg0));
13598 }
13599 }
13600 break;
13601
13602 case REAL_CST:
13603 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13604 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13605 else
13606 t = arg0;
13607 break;
13608
13609 default:
13610 gcc_unreachable ();
13611 }
13612
13613 return t;
13614 }
13615
13616 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13617 constant. TYPE is the type of the result. */
13618
13619 static tree
13620 fold_not_const (const_tree arg0, tree type)
13621 {
13622 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13623
13624 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13625 }
13626
13627 /* Given CODE, a relational operator, the target type, TYPE and two
13628 constant operands OP0 and OP1, return the result of the
13629 relational operation. If the result is not a compile time
13630 constant, then return NULL_TREE. */
13631
13632 static tree
13633 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13634 {
13635 int result, invert;
13636
13637 /* From here on, the only cases we handle are when the result is
13638 known to be a constant. */
13639
13640 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13641 {
13642 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13643 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13644
13645 /* Handle the cases where either operand is a NaN. */
13646 if (real_isnan (c0) || real_isnan (c1))
13647 {
13648 switch (code)
13649 {
13650 case EQ_EXPR:
13651 case ORDERED_EXPR:
13652 result = 0;
13653 break;
13654
13655 case NE_EXPR:
13656 case UNORDERED_EXPR:
13657 case UNLT_EXPR:
13658 case UNLE_EXPR:
13659 case UNGT_EXPR:
13660 case UNGE_EXPR:
13661 case UNEQ_EXPR:
13662 result = 1;
13663 break;
13664
13665 case LT_EXPR:
13666 case LE_EXPR:
13667 case GT_EXPR:
13668 case GE_EXPR:
13669 case LTGT_EXPR:
13670 if (flag_trapping_math)
13671 return NULL_TREE;
13672 result = 0;
13673 break;
13674
13675 default:
13676 gcc_unreachable ();
13677 }
13678
13679 return constant_boolean_node (result, type);
13680 }
13681
13682 return constant_boolean_node (real_compare (code, c0, c1), type);
13683 }
13684
13685 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13686 {
13687 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13688 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13689 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13690 }
13691
13692 /* Handle equality/inequality of complex constants. */
13693 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13694 {
13695 tree rcond = fold_relational_const (code, type,
13696 TREE_REALPART (op0),
13697 TREE_REALPART (op1));
13698 tree icond = fold_relational_const (code, type,
13699 TREE_IMAGPART (op0),
13700 TREE_IMAGPART (op1));
13701 if (code == EQ_EXPR)
13702 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13703 else if (code == NE_EXPR)
13704 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13705 else
13706 return NULL_TREE;
13707 }
13708
13709 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13710 {
13711 unsigned count = VECTOR_CST_NELTS (op0);
13712 tree *elts = XALLOCAVEC (tree, count);
13713 gcc_assert (VECTOR_CST_NELTS (op1) == count
13714 && TYPE_VECTOR_SUBPARTS (type) == count);
13715
13716 for (unsigned i = 0; i < count; i++)
13717 {
13718 tree elem_type = TREE_TYPE (type);
13719 tree elem0 = VECTOR_CST_ELT (op0, i);
13720 tree elem1 = VECTOR_CST_ELT (op1, i);
13721
13722 tree tem = fold_relational_const (code, elem_type,
13723 elem0, elem1);
13724
13725 if (tem == NULL_TREE)
13726 return NULL_TREE;
13727
13728 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13729 }
13730
13731 return build_vector (type, elts);
13732 }
13733
13734 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13735
13736 To compute GT, swap the arguments and do LT.
13737 To compute GE, do LT and invert the result.
13738 To compute LE, swap the arguments, do LT and invert the result.
13739 To compute NE, do EQ and invert the result.
13740
13741 Therefore, the code below must handle only EQ and LT. */
13742
13743 if (code == LE_EXPR || code == GT_EXPR)
13744 {
13745 std::swap (op0, op1);
13746 code = swap_tree_comparison (code);
13747 }
13748
13749 /* Note that it is safe to invert for real values here because we
13750 have already handled the one case that it matters. */
13751
13752 invert = 0;
13753 if (code == NE_EXPR || code == GE_EXPR)
13754 {
13755 invert = 1;
13756 code = invert_tree_comparison (code, false);
13757 }
13758
13759 /* Compute a result for LT or EQ if args permit;
13760 Otherwise return T. */
13761 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13762 {
13763 if (code == EQ_EXPR)
13764 result = tree_int_cst_equal (op0, op1);
13765 else
13766 result = tree_int_cst_lt (op0, op1);
13767 }
13768 else
13769 return NULL_TREE;
13770
13771 if (invert)
13772 result ^= 1;
13773 return constant_boolean_node (result, type);
13774 }
13775
13776 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13777 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13778 itself. */
13779
13780 tree
13781 fold_build_cleanup_point_expr (tree type, tree expr)
13782 {
13783 /* If the expression does not have side effects then we don't have to wrap
13784 it with a cleanup point expression. */
13785 if (!TREE_SIDE_EFFECTS (expr))
13786 return expr;
13787
13788 /* If the expression is a return, check to see if the expression inside the
13789 return has no side effects or the right hand side of the modify expression
13790 inside the return. If either don't have side effects set we don't need to
13791 wrap the expression in a cleanup point expression. Note we don't check the
13792 left hand side of the modify because it should always be a return decl. */
13793 if (TREE_CODE (expr) == RETURN_EXPR)
13794 {
13795 tree op = TREE_OPERAND (expr, 0);
13796 if (!op || !TREE_SIDE_EFFECTS (op))
13797 return expr;
13798 op = TREE_OPERAND (op, 1);
13799 if (!TREE_SIDE_EFFECTS (op))
13800 return expr;
13801 }
13802
13803 return build1 (CLEANUP_POINT_EXPR, type, expr);
13804 }
13805
13806 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13807 of an indirection through OP0, or NULL_TREE if no simplification is
13808 possible. */
13809
13810 tree
13811 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13812 {
13813 tree sub = op0;
13814 tree subtype;
13815
13816 STRIP_NOPS (sub);
13817 subtype = TREE_TYPE (sub);
13818 if (!POINTER_TYPE_P (subtype))
13819 return NULL_TREE;
13820
13821 if (TREE_CODE (sub) == ADDR_EXPR)
13822 {
13823 tree op = TREE_OPERAND (sub, 0);
13824 tree optype = TREE_TYPE (op);
13825 /* *&CONST_DECL -> to the value of the const decl. */
13826 if (TREE_CODE (op) == CONST_DECL)
13827 return DECL_INITIAL (op);
13828 /* *&p => p; make sure to handle *&"str"[cst] here. */
13829 if (type == optype)
13830 {
13831 tree fop = fold_read_from_constant_string (op);
13832 if (fop)
13833 return fop;
13834 else
13835 return op;
13836 }
13837 /* *(foo *)&fooarray => fooarray[0] */
13838 else if (TREE_CODE (optype) == ARRAY_TYPE
13839 && type == TREE_TYPE (optype)
13840 && (!in_gimple_form
13841 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13842 {
13843 tree type_domain = TYPE_DOMAIN (optype);
13844 tree min_val = size_zero_node;
13845 if (type_domain && TYPE_MIN_VALUE (type_domain))
13846 min_val = TYPE_MIN_VALUE (type_domain);
13847 if (in_gimple_form
13848 && TREE_CODE (min_val) != INTEGER_CST)
13849 return NULL_TREE;
13850 return build4_loc (loc, ARRAY_REF, type, op, min_val,
13851 NULL_TREE, NULL_TREE);
13852 }
13853 /* *(foo *)&complexfoo => __real__ complexfoo */
13854 else if (TREE_CODE (optype) == COMPLEX_TYPE
13855 && type == TREE_TYPE (optype))
13856 return fold_build1_loc (loc, REALPART_EXPR, type, op);
13857 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13858 else if (TREE_CODE (optype) == VECTOR_TYPE
13859 && type == TREE_TYPE (optype))
13860 {
13861 tree part_width = TYPE_SIZE (type);
13862 tree index = bitsize_int (0);
13863 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
13864 }
13865 }
13866
13867 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
13868 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13869 {
13870 tree op00 = TREE_OPERAND (sub, 0);
13871 tree op01 = TREE_OPERAND (sub, 1);
13872
13873 STRIP_NOPS (op00);
13874 if (TREE_CODE (op00) == ADDR_EXPR)
13875 {
13876 tree op00type;
13877 op00 = TREE_OPERAND (op00, 0);
13878 op00type = TREE_TYPE (op00);
13879
13880 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13881 if (TREE_CODE (op00type) == VECTOR_TYPE
13882 && type == TREE_TYPE (op00type))
13883 {
13884 HOST_WIDE_INT offset = tree_to_shwi (op01);
13885 tree part_width = TYPE_SIZE (type);
13886 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
13887 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
13888 tree index = bitsize_int (indexi);
13889
13890 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
13891 return fold_build3_loc (loc,
13892 BIT_FIELD_REF, type, op00,
13893 part_width, index);
13894
13895 }
13896 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13897 else if (TREE_CODE (op00type) == COMPLEX_TYPE
13898 && type == TREE_TYPE (op00type))
13899 {
13900 tree size = TYPE_SIZE_UNIT (type);
13901 if (tree_int_cst_equal (size, op01))
13902 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
13903 }
13904 /* ((foo *)&fooarray)[1] => fooarray[1] */
13905 else if (TREE_CODE (op00type) == ARRAY_TYPE
13906 && type == TREE_TYPE (op00type))
13907 {
13908 tree type_domain = TYPE_DOMAIN (op00type);
13909 tree min_val = size_zero_node;
13910 if (type_domain && TYPE_MIN_VALUE (type_domain))
13911 min_val = TYPE_MIN_VALUE (type_domain);
13912 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
13913 TYPE_SIZE_UNIT (type));
13914 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
13915 return build4_loc (loc, ARRAY_REF, type, op00, op01,
13916 NULL_TREE, NULL_TREE);
13917 }
13918 }
13919 }
13920
13921 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13922 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13923 && type == TREE_TYPE (TREE_TYPE (subtype))
13924 && (!in_gimple_form
13925 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13926 {
13927 tree type_domain;
13928 tree min_val = size_zero_node;
13929 sub = build_fold_indirect_ref_loc (loc, sub);
13930 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13931 if (type_domain && TYPE_MIN_VALUE (type_domain))
13932 min_val = TYPE_MIN_VALUE (type_domain);
13933 if (in_gimple_form
13934 && TREE_CODE (min_val) != INTEGER_CST)
13935 return NULL_TREE;
13936 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
13937 NULL_TREE);
13938 }
13939
13940 return NULL_TREE;
13941 }
13942
13943 /* Builds an expression for an indirection through T, simplifying some
13944 cases. */
13945
13946 tree
13947 build_fold_indirect_ref_loc (location_t loc, tree t)
13948 {
13949 tree type = TREE_TYPE (TREE_TYPE (t));
13950 tree sub = fold_indirect_ref_1 (loc, type, t);
13951
13952 if (sub)
13953 return sub;
13954
13955 return build1_loc (loc, INDIRECT_REF, type, t);
13956 }
13957
13958 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13959
13960 tree
13961 fold_indirect_ref_loc (location_t loc, tree t)
13962 {
13963 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
13964
13965 if (sub)
13966 return sub;
13967 else
13968 return t;
13969 }
13970
13971 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13972 whose result is ignored. The type of the returned tree need not be
13973 the same as the original expression. */
13974
13975 tree
13976 fold_ignored_result (tree t)
13977 {
13978 if (!TREE_SIDE_EFFECTS (t))
13979 return integer_zero_node;
13980
13981 for (;;)
13982 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13983 {
13984 case tcc_unary:
13985 t = TREE_OPERAND (t, 0);
13986 break;
13987
13988 case tcc_binary:
13989 case tcc_comparison:
13990 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13991 t = TREE_OPERAND (t, 0);
13992 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13993 t = TREE_OPERAND (t, 1);
13994 else
13995 return t;
13996 break;
13997
13998 case tcc_expression:
13999 switch (TREE_CODE (t))
14000 {
14001 case COMPOUND_EXPR:
14002 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14003 return t;
14004 t = TREE_OPERAND (t, 0);
14005 break;
14006
14007 case COND_EXPR:
14008 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14009 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14010 return t;
14011 t = TREE_OPERAND (t, 0);
14012 break;
14013
14014 default:
14015 return t;
14016 }
14017 break;
14018
14019 default:
14020 return t;
14021 }
14022 }
14023
14024 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14025
14026 tree
14027 round_up_loc (location_t loc, tree value, unsigned int divisor)
14028 {
14029 tree div = NULL_TREE;
14030
14031 if (divisor == 1)
14032 return value;
14033
14034 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14035 have to do anything. Only do this when we are not given a const,
14036 because in that case, this check is more expensive than just
14037 doing it. */
14038 if (TREE_CODE (value) != INTEGER_CST)
14039 {
14040 div = build_int_cst (TREE_TYPE (value), divisor);
14041
14042 if (multiple_of_p (TREE_TYPE (value), value, div))
14043 return value;
14044 }
14045
14046 /* If divisor is a power of two, simplify this to bit manipulation. */
14047 if (divisor == (divisor & -divisor))
14048 {
14049 if (TREE_CODE (value) == INTEGER_CST)
14050 {
14051 wide_int val = value;
14052 bool overflow_p;
14053
14054 if ((val & (divisor - 1)) == 0)
14055 return value;
14056
14057 overflow_p = TREE_OVERFLOW (value);
14058 val += divisor - 1;
14059 val &= - (int) divisor;
14060 if (val == 0)
14061 overflow_p = true;
14062
14063 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14064 }
14065 else
14066 {
14067 tree t;
14068
14069 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14070 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14071 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14072 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14073 }
14074 }
14075 else
14076 {
14077 if (!div)
14078 div = build_int_cst (TREE_TYPE (value), divisor);
14079 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14080 value = size_binop_loc (loc, MULT_EXPR, value, div);
14081 }
14082
14083 return value;
14084 }
14085
14086 /* Likewise, but round down. */
14087
14088 tree
14089 round_down_loc (location_t loc, tree value, int divisor)
14090 {
14091 tree div = NULL_TREE;
14092
14093 gcc_assert (divisor > 0);
14094 if (divisor == 1)
14095 return value;
14096
14097 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14098 have to do anything. Only do this when we are not given a const,
14099 because in that case, this check is more expensive than just
14100 doing it. */
14101 if (TREE_CODE (value) != INTEGER_CST)
14102 {
14103 div = build_int_cst (TREE_TYPE (value), divisor);
14104
14105 if (multiple_of_p (TREE_TYPE (value), value, div))
14106 return value;
14107 }
14108
14109 /* If divisor is a power of two, simplify this to bit manipulation. */
14110 if (divisor == (divisor & -divisor))
14111 {
14112 tree t;
14113
14114 t = build_int_cst (TREE_TYPE (value), -divisor);
14115 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14116 }
14117 else
14118 {
14119 if (!div)
14120 div = build_int_cst (TREE_TYPE (value), divisor);
14121 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14122 value = size_binop_loc (loc, MULT_EXPR, value, div);
14123 }
14124
14125 return value;
14126 }
14127
14128 /* Returns the pointer to the base of the object addressed by EXP and
14129 extracts the information about the offset of the access, storing it
14130 to PBITPOS and POFFSET. */
14131
14132 static tree
14133 split_address_to_core_and_offset (tree exp,
14134 HOST_WIDE_INT *pbitpos, tree *poffset)
14135 {
14136 tree core;
14137 machine_mode mode;
14138 int unsignedp, volatilep;
14139 HOST_WIDE_INT bitsize;
14140 location_t loc = EXPR_LOCATION (exp);
14141
14142 if (TREE_CODE (exp) == ADDR_EXPR)
14143 {
14144 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14145 poffset, &mode, &unsignedp, &volatilep,
14146 false);
14147 core = build_fold_addr_expr_loc (loc, core);
14148 }
14149 else
14150 {
14151 core = exp;
14152 *pbitpos = 0;
14153 *poffset = NULL_TREE;
14154 }
14155
14156 return core;
14157 }
14158
14159 /* Returns true if addresses of E1 and E2 differ by a constant, false
14160 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14161
14162 bool
14163 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14164 {
14165 tree core1, core2;
14166 HOST_WIDE_INT bitpos1, bitpos2;
14167 tree toffset1, toffset2, tdiff, type;
14168
14169 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14170 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14171
14172 if (bitpos1 % BITS_PER_UNIT != 0
14173 || bitpos2 % BITS_PER_UNIT != 0
14174 || !operand_equal_p (core1, core2, 0))
14175 return false;
14176
14177 if (toffset1 && toffset2)
14178 {
14179 type = TREE_TYPE (toffset1);
14180 if (type != TREE_TYPE (toffset2))
14181 toffset2 = fold_convert (type, toffset2);
14182
14183 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14184 if (!cst_and_fits_in_hwi (tdiff))
14185 return false;
14186
14187 *diff = int_cst_value (tdiff);
14188 }
14189 else if (toffset1 || toffset2)
14190 {
14191 /* If only one of the offsets is non-constant, the difference cannot
14192 be a constant. */
14193 return false;
14194 }
14195 else
14196 *diff = 0;
14197
14198 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14199 return true;
14200 }
14201
14202 /* Return OFF converted to a pointer offset type suitable as offset for
14203 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14204 tree
14205 convert_to_ptrofftype_loc (location_t loc, tree off)
14206 {
14207 return fold_convert_loc (loc, sizetype, off);
14208 }
14209
14210 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14211 tree
14212 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14213 {
14214 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14215 ptr, convert_to_ptrofftype_loc (loc, off));
14216 }
14217
14218 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14219 tree
14220 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14221 {
14222 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14223 ptr, size_int (off));
14224 }