]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fold-const.c
41081ff919c3eb2fe075d5a7842ee99599174e8b
[thirdparty/gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide and size_binop.
31
32 fold takes a tree as argument and returns a simplified tree.
33
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
37
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
40
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
64
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
68
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
89 };
90
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
143
144 static location_t
145 expr_location_or (tree t, location_t loc)
146 {
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc != UNKNOWN_LOCATION ? tloc : loc;
149 }
150
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
153
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
156 {
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
162 {
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
165 }
166 return x;
167 }
168
169
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
173 addition.
174
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
177 sign. */
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
179 \f
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
183
184 tree
185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
186 {
187 double_int quo, rem;
188 int uns;
189
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
194 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
196 uns = false;
197
198 quo = double_int_divmod (tree_to_double_int (arg1),
199 tree_to_double_int (arg2),
200 uns, code, &rem);
201
202 if (double_int_zero_p (rem))
203 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
204
205 return NULL_TREE;
206 }
207 \f
208 /* This is nonzero if we should defer warnings about undefined
209 overflow. This facility exists because these warnings are a
210 special case. The code to estimate loop iterations does not want
211 to issue any warnings, since it works with expressions which do not
212 occur in user code. Various bits of cleanup code call fold(), but
213 only use the result if it has certain characteristics (e.g., is a
214 constant); that code only wants to issue a warning if the result is
215 used. */
216
217 static int fold_deferring_overflow_warnings;
218
219 /* If a warning about undefined overflow is deferred, this is the
220 warning. Note that this may cause us to turn two warnings into
221 one, but that is fine since it is sufficient to only give one
222 warning per expression. */
223
224 static const char* fold_deferred_overflow_warning;
225
226 /* If a warning about undefined overflow is deferred, this is the
227 level at which the warning should be emitted. */
228
229 static enum warn_strict_overflow_code fold_deferred_overflow_code;
230
231 /* Start deferring overflow warnings. We could use a stack here to
232 permit nested calls, but at present it is not necessary. */
233
234 void
235 fold_defer_overflow_warnings (void)
236 {
237 ++fold_deferring_overflow_warnings;
238 }
239
240 /* Stop deferring overflow warnings. If there is a pending warning,
241 and ISSUE is true, then issue the warning if appropriate. STMT is
242 the statement with which the warning should be associated (used for
243 location information); STMT may be NULL. CODE is the level of the
244 warning--a warn_strict_overflow_code value. This function will use
245 the smaller of CODE and the deferred code when deciding whether to
246 issue the warning. CODE may be zero to mean to always use the
247 deferred code. */
248
249 void
250 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
251 {
252 const char *warnmsg;
253 location_t locus;
254
255 gcc_assert (fold_deferring_overflow_warnings > 0);
256 --fold_deferring_overflow_warnings;
257 if (fold_deferring_overflow_warnings > 0)
258 {
259 if (fold_deferred_overflow_warning != NULL
260 && code != 0
261 && code < (int) fold_deferred_overflow_code)
262 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
263 return;
264 }
265
266 warnmsg = fold_deferred_overflow_warning;
267 fold_deferred_overflow_warning = NULL;
268
269 if (!issue || warnmsg == NULL)
270 return;
271
272 if (gimple_no_warning_p (stmt))
273 return;
274
275 /* Use the smallest code level when deciding to issue the
276 warning. */
277 if (code == 0 || code > (int) fold_deferred_overflow_code)
278 code = fold_deferred_overflow_code;
279
280 if (!issue_strict_overflow_warning (code))
281 return;
282
283 if (stmt == NULL)
284 locus = input_location;
285 else
286 locus = gimple_location (stmt);
287 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
288 }
289
290 /* Stop deferring overflow warnings, ignoring any deferred
291 warnings. */
292
293 void
294 fold_undefer_and_ignore_overflow_warnings (void)
295 {
296 fold_undefer_overflow_warnings (false, NULL, 0);
297 }
298
299 /* Whether we are deferring overflow warnings. */
300
301 bool
302 fold_deferring_overflow_warnings_p (void)
303 {
304 return fold_deferring_overflow_warnings > 0;
305 }
306
307 /* This is called when we fold something based on the fact that signed
308 overflow is undefined. */
309
310 static void
311 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
312 {
313 if (fold_deferring_overflow_warnings > 0)
314 {
315 if (fold_deferred_overflow_warning == NULL
316 || wc < fold_deferred_overflow_code)
317 {
318 fold_deferred_overflow_warning = gmsgid;
319 fold_deferred_overflow_code = wc;
320 }
321 }
322 else if (issue_strict_overflow_warning (wc))
323 warning (OPT_Wstrict_overflow, gmsgid);
324 }
325 \f
326 /* Return true if the built-in mathematical function specified by CODE
327 is odd, i.e. -f(x) == f(-x). */
328
329 static bool
330 negate_mathfn_p (enum built_in_function code)
331 {
332 switch (code)
333 {
334 CASE_FLT_FN (BUILT_IN_ASIN):
335 CASE_FLT_FN (BUILT_IN_ASINH):
336 CASE_FLT_FN (BUILT_IN_ATAN):
337 CASE_FLT_FN (BUILT_IN_ATANH):
338 CASE_FLT_FN (BUILT_IN_CASIN):
339 CASE_FLT_FN (BUILT_IN_CASINH):
340 CASE_FLT_FN (BUILT_IN_CATAN):
341 CASE_FLT_FN (BUILT_IN_CATANH):
342 CASE_FLT_FN (BUILT_IN_CBRT):
343 CASE_FLT_FN (BUILT_IN_CPROJ):
344 CASE_FLT_FN (BUILT_IN_CSIN):
345 CASE_FLT_FN (BUILT_IN_CSINH):
346 CASE_FLT_FN (BUILT_IN_CTAN):
347 CASE_FLT_FN (BUILT_IN_CTANH):
348 CASE_FLT_FN (BUILT_IN_ERF):
349 CASE_FLT_FN (BUILT_IN_LLROUND):
350 CASE_FLT_FN (BUILT_IN_LROUND):
351 CASE_FLT_FN (BUILT_IN_ROUND):
352 CASE_FLT_FN (BUILT_IN_SIN):
353 CASE_FLT_FN (BUILT_IN_SINH):
354 CASE_FLT_FN (BUILT_IN_TAN):
355 CASE_FLT_FN (BUILT_IN_TANH):
356 CASE_FLT_FN (BUILT_IN_TRUNC):
357 return true;
358
359 CASE_FLT_FN (BUILT_IN_LLRINT):
360 CASE_FLT_FN (BUILT_IN_LRINT):
361 CASE_FLT_FN (BUILT_IN_NEARBYINT):
362 CASE_FLT_FN (BUILT_IN_RINT):
363 return !flag_rounding_math;
364
365 default:
366 break;
367 }
368 return false;
369 }
370
371 /* Check whether we may negate an integer constant T without causing
372 overflow. */
373
374 bool
375 may_negate_without_overflow_p (const_tree t)
376 {
377 unsigned HOST_WIDE_INT val;
378 unsigned int prec;
379 tree type;
380
381 gcc_assert (TREE_CODE (t) == INTEGER_CST);
382
383 type = TREE_TYPE (t);
384 if (TYPE_UNSIGNED (type))
385 return false;
386
387 prec = TYPE_PRECISION (type);
388 if (prec > HOST_BITS_PER_WIDE_INT)
389 {
390 if (TREE_INT_CST_LOW (t) != 0)
391 return true;
392 prec -= HOST_BITS_PER_WIDE_INT;
393 val = TREE_INT_CST_HIGH (t);
394 }
395 else
396 val = TREE_INT_CST_LOW (t);
397 if (prec < HOST_BITS_PER_WIDE_INT)
398 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
399 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
400 }
401
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
404
405 static bool
406 negate_expr_p (tree t)
407 {
408 tree type;
409
410 if (t == 0)
411 return false;
412
413 type = TREE_TYPE (t);
414
415 STRIP_SIGN_NOPS (t);
416 switch (TREE_CODE (t))
417 {
418 case INTEGER_CST:
419 if (TYPE_OVERFLOW_WRAPS (type))
420 return true;
421
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
424 case BIT_NOT_EXPR:
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
427
428 case FIXED_CST:
429 case NEGATE_EXPR:
430 return true;
431
432 case REAL_CST:
433 /* We want to canonicalize to positive real constants. Pretend
434 that only negative ones can be easily negated. */
435 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
436
437 case COMPLEX_CST:
438 return negate_expr_p (TREE_REALPART (t))
439 && negate_expr_p (TREE_IMAGPART (t));
440
441 case COMPLEX_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0))
443 && negate_expr_p (TREE_OPERAND (t, 1));
444
445 case CONJ_EXPR:
446 return negate_expr_p (TREE_OPERAND (t, 0));
447
448 case PLUS_EXPR:
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
450 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
451 return false;
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t, 1))
454 && reorder_operands_p (TREE_OPERAND (t, 0),
455 TREE_OPERAND (t, 1)))
456 return true;
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t, 0));
459
460 case MINUS_EXPR:
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
463 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
464 && reorder_operands_p (TREE_OPERAND (t, 0),
465 TREE_OPERAND (t, 1));
466
467 case MULT_EXPR:
468 if (TYPE_UNSIGNED (TREE_TYPE (t)))
469 break;
470
471 /* Fall through. */
472
473 case RDIV_EXPR:
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
477 break;
478
479 case TRUNC_DIV_EXPR:
480 case ROUND_DIV_EXPR:
481 case FLOOR_DIV_EXPR:
482 case CEIL_DIV_EXPR:
483 case EXACT_DIV_EXPR:
484 /* In general we can't negate A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. But if overflow is
487 undefined, we can negate, because - (INT_MIN / 1) is an
488 overflow. */
489 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
490 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
491 break;
492 return negate_expr_p (TREE_OPERAND (t, 1))
493 || negate_expr_p (TREE_OPERAND (t, 0));
494
495 case NOP_EXPR:
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type) == REAL_TYPE)
498 {
499 tree tem = strip_float_extensions (t);
500 if (tem != t)
501 return negate_expr_p (tem);
502 }
503 break;
504
505 case CALL_EXPR:
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t)))
508 return negate_expr_p (CALL_EXPR_ARG (t, 0));
509 break;
510
511 case RSHIFT_EXPR:
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
513 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
514 {
515 tree op1 = TREE_OPERAND (t, 1);
516 if (TREE_INT_CST_HIGH (op1) == 0
517 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
518 == TREE_INT_CST_LOW (op1))
519 return true;
520 }
521 break;
522
523 default:
524 break;
525 }
526 return false;
527 }
528
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
532 returned. */
533
534 static tree
535 fold_negate_expr (location_t loc, tree t)
536 {
537 tree type = TREE_TYPE (t);
538 tree tem;
539
540 switch (TREE_CODE (t))
541 {
542 /* Convert - (~A) to A + 1. */
543 case BIT_NOT_EXPR:
544 if (INTEGRAL_TYPE_P (type))
545 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
546 build_int_cst (type, 1));
547 break;
548
549 case INTEGER_CST:
550 tem = fold_negate_const (t, type);
551 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
552 || !TYPE_OVERFLOW_TRAPS (type))
553 return tem;
554 break;
555
556 case REAL_CST:
557 tem = fold_negate_const (t, type);
558 /* Two's complement FP formats, such as c4x, may overflow. */
559 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
560 return tem;
561 break;
562
563 case FIXED_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
566
567 case COMPLEX_CST:
568 {
569 tree rpart = negate_expr (TREE_REALPART (t));
570 tree ipart = negate_expr (TREE_IMAGPART (t));
571
572 if ((TREE_CODE (rpart) == REAL_CST
573 && TREE_CODE (ipart) == REAL_CST)
574 || (TREE_CODE (rpart) == INTEGER_CST
575 && TREE_CODE (ipart) == INTEGER_CST))
576 return build_complex (type, rpart, ipart);
577 }
578 break;
579
580 case COMPLEX_EXPR:
581 if (negate_expr_p (t))
582 return fold_build2_loc (loc, COMPLEX_EXPR, type,
583 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
584 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
585 break;
586
587 case CONJ_EXPR:
588 if (negate_expr_p (t))
589 return fold_build1_loc (loc, CONJ_EXPR, type,
590 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
591 break;
592
593 case NEGATE_EXPR:
594 return TREE_OPERAND (t, 0);
595
596 case PLUS_EXPR:
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
599 {
600 /* -(A + B) -> (-B) - A. */
601 if (negate_expr_p (TREE_OPERAND (t, 1))
602 && reorder_operands_p (TREE_OPERAND (t, 0),
603 TREE_OPERAND (t, 1)))
604 {
605 tem = negate_expr (TREE_OPERAND (t, 1));
606 return fold_build2_loc (loc, MINUS_EXPR, type,
607 tem, TREE_OPERAND (t, 0));
608 }
609
610 /* -(A + B) -> (-A) - B. */
611 if (negate_expr_p (TREE_OPERAND (t, 0)))
612 {
613 tem = negate_expr (TREE_OPERAND (t, 0));
614 return fold_build2_loc (loc, MINUS_EXPR, type,
615 tem, TREE_OPERAND (t, 1));
616 }
617 }
618 break;
619
620 case MINUS_EXPR:
621 /* - (A - B) -> B - A */
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
624 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
625 return fold_build2_loc (loc, MINUS_EXPR, type,
626 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
627 break;
628
629 case MULT_EXPR:
630 if (TYPE_UNSIGNED (type))
631 break;
632
633 /* Fall through. */
634
635 case RDIV_EXPR:
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
637 {
638 tem = TREE_OPERAND (t, 1);
639 if (negate_expr_p (tem))
640 return fold_build2_loc (loc, TREE_CODE (t), type,
641 TREE_OPERAND (t, 0), negate_expr (tem));
642 tem = TREE_OPERAND (t, 0);
643 if (negate_expr_p (tem))
644 return fold_build2_loc (loc, TREE_CODE (t), type,
645 negate_expr (tem), TREE_OPERAND (t, 1));
646 }
647 break;
648
649 case TRUNC_DIV_EXPR:
650 case ROUND_DIV_EXPR:
651 case FLOOR_DIV_EXPR:
652 case CEIL_DIV_EXPR:
653 case EXACT_DIV_EXPR:
654 /* In general we can't negate A / B, because if A is INT_MIN and
655 B is 1, we may turn this into INT_MIN / -1 which is undefined
656 and actually traps on some architectures. But if overflow is
657 undefined, we can negate, because - (INT_MIN / 1) is an
658 overflow. */
659 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
660 {
661 const char * const warnmsg = G_("assuming signed overflow does not "
662 "occur when negating a division");
663 tem = TREE_OPERAND (t, 1);
664 if (negate_expr_p (tem))
665 {
666 if (INTEGRAL_TYPE_P (type)
667 && (TREE_CODE (tem) != INTEGER_CST
668 || integer_onep (tem)))
669 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
670 return fold_build2_loc (loc, TREE_CODE (t), type,
671 TREE_OPERAND (t, 0), negate_expr (tem));
672 }
673 tem = TREE_OPERAND (t, 0);
674 if (negate_expr_p (tem))
675 {
676 if (INTEGRAL_TYPE_P (type)
677 && (TREE_CODE (tem) != INTEGER_CST
678 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
679 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
680 return fold_build2_loc (loc, TREE_CODE (t), type,
681 negate_expr (tem), TREE_OPERAND (t, 1));
682 }
683 }
684 break;
685
686 case NOP_EXPR:
687 /* Convert -((double)float) into (double)(-float). */
688 if (TREE_CODE (type) == REAL_TYPE)
689 {
690 tem = strip_float_extensions (t);
691 if (tem != t && negate_expr_p (tem))
692 return fold_convert_loc (loc, type, negate_expr (tem));
693 }
694 break;
695
696 case CALL_EXPR:
697 /* Negate -f(x) as f(-x). */
698 if (negate_mathfn_p (builtin_mathfn_code (t))
699 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
700 {
701 tree fndecl, arg;
702
703 fndecl = get_callee_fndecl (t);
704 arg = negate_expr (CALL_EXPR_ARG (t, 0));
705 return build_call_expr_loc (loc, fndecl, 1, arg);
706 }
707 break;
708
709 case RSHIFT_EXPR:
710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
711 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
712 {
713 tree op1 = TREE_OPERAND (t, 1);
714 if (TREE_INT_CST_HIGH (op1) == 0
715 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
716 == TREE_INT_CST_LOW (op1))
717 {
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
724 }
725 }
726 break;
727
728 default:
729 break;
730 }
731
732 return NULL_TREE;
733 }
734
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
737 return NULL_TREE. */
738
739 static tree
740 negate_expr (tree t)
741 {
742 tree type, tem;
743 location_t loc;
744
745 if (t == NULL_TREE)
746 return NULL_TREE;
747
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
750 STRIP_SIGN_NOPS (t);
751
752 tem = fold_negate_expr (loc, t);
753 if (!tem)
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
756 }
757 \f
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
765
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
769
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead.
772
773 If IN is itself a literal or constant, return it as appropriate.
774
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
777
778 static tree
779 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
780 tree *minus_litp, int negate_p)
781 {
782 tree var = 0;
783
784 *conp = 0;
785 *litp = 0;
786 *minus_litp = 0;
787
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in);
790
791 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
792 || TREE_CODE (in) == FIXED_CST)
793 *litp = in;
794 else if (TREE_CODE (in) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
802 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
803 {
804 tree op0 = TREE_OPERAND (in, 0);
805 tree op1 = TREE_OPERAND (in, 1);
806 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
807 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
808
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
811 || TREE_CODE (op0) == FIXED_CST)
812 *litp = op0, op0 = 0;
813 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
814 || TREE_CODE (op1) == FIXED_CST)
815 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
816
817 if (op0 != 0 && TREE_CONSTANT (op0))
818 *conp = op0, op0 = 0;
819 else if (op1 != 0 && TREE_CONSTANT (op1))
820 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
821
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0 != 0 && op1 != 0)
825 var = in;
826 else if (op0 != 0)
827 var = op0;
828 else
829 var = op1, neg_var_p = neg1_p;
830
831 /* Now do any needed negations. */
832 if (neg_litp_p)
833 *minus_litp = *litp, *litp = 0;
834 if (neg_conp_p)
835 *conp = negate_expr (*conp);
836 if (neg_var_p)
837 var = negate_expr (var);
838 }
839 else if (TREE_CONSTANT (in))
840 *conp = in;
841 else
842 var = in;
843
844 if (negate_p)
845 {
846 if (*litp)
847 *minus_litp = *litp, *litp = 0;
848 else if (*minus_litp)
849 *litp = *minus_litp, *minus_litp = 0;
850 *conp = negate_expr (*conp);
851 var = negate_expr (var);
852 }
853
854 return var;
855 }
856
857 /* Re-associate trees split by the above function. T1 and T2 are
858 either expressions to associate or null. Return the new
859 expression, if any. LOC is the location of the new expression. If
860 we build an operation, do it in TYPE and with CODE. */
861
862 static tree
863 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
864 {
865 if (t1 == 0)
866 return t2;
867 else if (t2 == 0)
868 return t1;
869
870 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
871 try to fold this since we will have infinite recursion. But do
872 deal with any NEGATE_EXPRs. */
873 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
874 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
875 {
876 if (code == PLUS_EXPR)
877 {
878 if (TREE_CODE (t1) == NEGATE_EXPR)
879 return build2_loc (loc, MINUS_EXPR, type,
880 fold_convert_loc (loc, type, t2),
881 fold_convert_loc (loc, type,
882 TREE_OPERAND (t1, 0)));
883 else if (TREE_CODE (t2) == NEGATE_EXPR)
884 return build2_loc (loc, MINUS_EXPR, type,
885 fold_convert_loc (loc, type, t1),
886 fold_convert_loc (loc, type,
887 TREE_OPERAND (t2, 0)));
888 else if (integer_zerop (t2))
889 return fold_convert_loc (loc, type, t1);
890 }
891 else if (code == MINUS_EXPR)
892 {
893 if (integer_zerop (t2))
894 return fold_convert_loc (loc, type, t1);
895 }
896
897 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
898 fold_convert_loc (loc, type, t2));
899 }
900
901 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type, t2));
903 }
904 \f
905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
906 for use in int_const_binop, size_binop and size_diffop. */
907
908 static bool
909 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
910 {
911 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
912 return false;
913 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
914 return false;
915
916 switch (code)
917 {
918 case LSHIFT_EXPR:
919 case RSHIFT_EXPR:
920 case LROTATE_EXPR:
921 case RROTATE_EXPR:
922 return true;
923
924 default:
925 break;
926 }
927
928 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
929 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
930 && TYPE_MODE (type1) == TYPE_MODE (type2);
931 }
932
933
934 /* Combine two integer constants ARG1 and ARG2 under operation CODE
935 to produce a new constant. Return NULL_TREE if we don't know how
936 to evaluate CODE at compile-time. */
937
938 tree
939 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
940 {
941 double_int op1, op2, res, tmp;
942 tree t;
943 tree type = TREE_TYPE (arg1);
944 bool uns = TYPE_UNSIGNED (type);
945 bool is_sizetype
946 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
947 bool overflow = false;
948
949 op1 = tree_to_double_int (arg1);
950 op2 = tree_to_double_int (arg2);
951
952 switch (code)
953 {
954 case BIT_IOR_EXPR:
955 res = double_int_ior (op1, op2);
956 break;
957
958 case BIT_XOR_EXPR:
959 res = double_int_xor (op1, op2);
960 break;
961
962 case BIT_AND_EXPR:
963 res = double_int_and (op1, op2);
964 break;
965
966 case RSHIFT_EXPR:
967 res = double_int_rshift (op1, double_int_to_shwi (op2),
968 TYPE_PRECISION (type), !uns);
969 break;
970
971 case LSHIFT_EXPR:
972 /* It's unclear from the C standard whether shifts can overflow.
973 The following code ignores overflow; perhaps a C standard
974 interpretation ruling is needed. */
975 res = double_int_lshift (op1, double_int_to_shwi (op2),
976 TYPE_PRECISION (type), !uns);
977 break;
978
979 case RROTATE_EXPR:
980 res = double_int_rrotate (op1, double_int_to_shwi (op2),
981 TYPE_PRECISION (type));
982 break;
983
984 case LROTATE_EXPR:
985 res = double_int_lrotate (op1, double_int_to_shwi (op2),
986 TYPE_PRECISION (type));
987 break;
988
989 case PLUS_EXPR:
990 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
991 &res.low, &res.high);
992 break;
993
994 case MINUS_EXPR:
995 neg_double (op2.low, op2.high, &res.low, &res.high);
996 add_double (op1.low, op1.high, res.low, res.high,
997 &res.low, &res.high);
998 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
999 break;
1000
1001 case MULT_EXPR:
1002 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1003 &res.low, &res.high);
1004 break;
1005
1006 case TRUNC_DIV_EXPR:
1007 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1008 case EXACT_DIV_EXPR:
1009 /* This is a shortcut for a common special case. */
1010 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1011 && !TREE_OVERFLOW (arg1)
1012 && !TREE_OVERFLOW (arg2)
1013 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1014 {
1015 if (code == CEIL_DIV_EXPR)
1016 op1.low += op2.low - 1;
1017
1018 res.low = op1.low / op2.low, res.high = 0;
1019 break;
1020 }
1021
1022 /* ... fall through ... */
1023
1024 case ROUND_DIV_EXPR:
1025 if (double_int_zero_p (op2))
1026 return NULL_TREE;
1027 if (double_int_one_p (op2))
1028 {
1029 res = op1;
1030 break;
1031 }
1032 if (double_int_equal_p (op1, op2)
1033 && ! double_int_zero_p (op1))
1034 {
1035 res = double_int_one;
1036 break;
1037 }
1038 overflow = div_and_round_double (code, uns,
1039 op1.low, op1.high, op2.low, op2.high,
1040 &res.low, &res.high,
1041 &tmp.low, &tmp.high);
1042 break;
1043
1044 case TRUNC_MOD_EXPR:
1045 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1046 /* This is a shortcut for a common special case. */
1047 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1048 && !TREE_OVERFLOW (arg1)
1049 && !TREE_OVERFLOW (arg2)
1050 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1051 {
1052 if (code == CEIL_MOD_EXPR)
1053 op1.low += op2.low - 1;
1054 res.low = op1.low % op2.low, res.high = 0;
1055 break;
1056 }
1057
1058 /* ... fall through ... */
1059
1060 case ROUND_MOD_EXPR:
1061 if (double_int_zero_p (op2))
1062 return NULL_TREE;
1063 overflow = div_and_round_double (code, uns,
1064 op1.low, op1.high, op2.low, op2.high,
1065 &tmp.low, &tmp.high,
1066 &res.low, &res.high);
1067 break;
1068
1069 case MIN_EXPR:
1070 res = double_int_min (op1, op2, uns);
1071 break;
1072
1073 case MAX_EXPR:
1074 res = double_int_max (op1, op2, uns);
1075 break;
1076
1077 default:
1078 return NULL_TREE;
1079 }
1080
1081 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1082 ((!uns || is_sizetype) && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1084
1085 return t;
1086 }
1087
1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1089 constant. We assume ARG1 and ARG2 have the same data type, or at least
1090 are the same kind of constant and the same machine mode. Return zero if
1091 combining the constants is not allowed in the current operating mode. */
1092
1093 static tree
1094 const_binop (enum tree_code code, tree arg1, tree arg2)
1095 {
1096 /* Sanity check for the recursive cases. */
1097 if (!arg1 || !arg2)
1098 return NULL_TREE;
1099
1100 STRIP_NOPS (arg1);
1101 STRIP_NOPS (arg2);
1102
1103 if (TREE_CODE (arg1) == INTEGER_CST)
1104 return int_const_binop (code, arg1, arg2);
1105
1106 if (TREE_CODE (arg1) == REAL_CST)
1107 {
1108 enum machine_mode mode;
1109 REAL_VALUE_TYPE d1;
1110 REAL_VALUE_TYPE d2;
1111 REAL_VALUE_TYPE value;
1112 REAL_VALUE_TYPE result;
1113 bool inexact;
1114 tree t, type;
1115
1116 /* The following codes are handled by real_arithmetic. */
1117 switch (code)
1118 {
1119 case PLUS_EXPR:
1120 case MINUS_EXPR:
1121 case MULT_EXPR:
1122 case RDIV_EXPR:
1123 case MIN_EXPR:
1124 case MAX_EXPR:
1125 break;
1126
1127 default:
1128 return NULL_TREE;
1129 }
1130
1131 d1 = TREE_REAL_CST (arg1);
1132 d2 = TREE_REAL_CST (arg2);
1133
1134 type = TREE_TYPE (arg1);
1135 mode = TYPE_MODE (type);
1136
1137 /* Don't perform operation if we honor signaling NaNs and
1138 either operand is a NaN. */
1139 if (HONOR_SNANS (mode)
1140 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1141 return NULL_TREE;
1142
1143 /* Don't perform operation if it would raise a division
1144 by zero exception. */
1145 if (code == RDIV_EXPR
1146 && REAL_VALUES_EQUAL (d2, dconst0)
1147 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1148 return NULL_TREE;
1149
1150 /* If either operand is a NaN, just return it. Otherwise, set up
1151 for floating-point trap; we return an overflow. */
1152 if (REAL_VALUE_ISNAN (d1))
1153 return arg1;
1154 else if (REAL_VALUE_ISNAN (d2))
1155 return arg2;
1156
1157 inexact = real_arithmetic (&value, code, &d1, &d2);
1158 real_convert (&result, mode, &value);
1159
1160 /* Don't constant fold this floating point operation if
1161 the result has overflowed and flag_trapping_math. */
1162 if (flag_trapping_math
1163 && MODE_HAS_INFINITIES (mode)
1164 && REAL_VALUE_ISINF (result)
1165 && !REAL_VALUE_ISINF (d1)
1166 && !REAL_VALUE_ISINF (d2))
1167 return NULL_TREE;
1168
1169 /* Don't constant fold this floating point operation if the
1170 result may dependent upon the run-time rounding mode and
1171 flag_rounding_math is set, or if GCC's software emulation
1172 is unable to accurately represent the result. */
1173 if ((flag_rounding_math
1174 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1175 && (inexact || !real_identical (&result, &value)))
1176 return NULL_TREE;
1177
1178 t = build_real (type, result);
1179
1180 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1181 return t;
1182 }
1183
1184 if (TREE_CODE (arg1) == FIXED_CST)
1185 {
1186 FIXED_VALUE_TYPE f1;
1187 FIXED_VALUE_TYPE f2;
1188 FIXED_VALUE_TYPE result;
1189 tree t, type;
1190 int sat_p;
1191 bool overflow_p;
1192
1193 /* The following codes are handled by fixed_arithmetic. */
1194 switch (code)
1195 {
1196 case PLUS_EXPR:
1197 case MINUS_EXPR:
1198 case MULT_EXPR:
1199 case TRUNC_DIV_EXPR:
1200 f2 = TREE_FIXED_CST (arg2);
1201 break;
1202
1203 case LSHIFT_EXPR:
1204 case RSHIFT_EXPR:
1205 f2.data.high = TREE_INT_CST_HIGH (arg2);
1206 f2.data.low = TREE_INT_CST_LOW (arg2);
1207 f2.mode = SImode;
1208 break;
1209
1210 default:
1211 return NULL_TREE;
1212 }
1213
1214 f1 = TREE_FIXED_CST (arg1);
1215 type = TREE_TYPE (arg1);
1216 sat_p = TYPE_SATURATING (type);
1217 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1218 t = build_fixed (type, result);
1219 /* Propagate overflow flags. */
1220 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1221 TREE_OVERFLOW (t) = 1;
1222 return t;
1223 }
1224
1225 if (TREE_CODE (arg1) == COMPLEX_CST)
1226 {
1227 tree type = TREE_TYPE (arg1);
1228 tree r1 = TREE_REALPART (arg1);
1229 tree i1 = TREE_IMAGPART (arg1);
1230 tree r2 = TREE_REALPART (arg2);
1231 tree i2 = TREE_IMAGPART (arg2);
1232 tree real, imag;
1233
1234 switch (code)
1235 {
1236 case PLUS_EXPR:
1237 case MINUS_EXPR:
1238 real = const_binop (code, r1, r2);
1239 imag = const_binop (code, i1, i2);
1240 break;
1241
1242 case MULT_EXPR:
1243 if (COMPLEX_FLOAT_TYPE_P (type))
1244 return do_mpc_arg2 (arg1, arg2, type,
1245 /* do_nonfinite= */ folding_initializer,
1246 mpc_mul);
1247
1248 real = const_binop (MINUS_EXPR,
1249 const_binop (MULT_EXPR, r1, r2),
1250 const_binop (MULT_EXPR, i1, i2));
1251 imag = const_binop (PLUS_EXPR,
1252 const_binop (MULT_EXPR, r1, i2),
1253 const_binop (MULT_EXPR, i1, r2));
1254 break;
1255
1256 case RDIV_EXPR:
1257 if (COMPLEX_FLOAT_TYPE_P (type))
1258 return do_mpc_arg2 (arg1, arg2, type,
1259 /* do_nonfinite= */ folding_initializer,
1260 mpc_div);
1261 /* Fallthru ... */
1262 case TRUNC_DIV_EXPR:
1263 case CEIL_DIV_EXPR:
1264 case FLOOR_DIV_EXPR:
1265 case ROUND_DIV_EXPR:
1266 if (flag_complex_method == 0)
1267 {
1268 /* Keep this algorithm in sync with
1269 tree-complex.c:expand_complex_div_straight().
1270
1271 Expand complex division to scalars, straightforward algorithm.
1272 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1273 t = br*br + bi*bi
1274 */
1275 tree magsquared
1276 = const_binop (PLUS_EXPR,
1277 const_binop (MULT_EXPR, r2, r2),
1278 const_binop (MULT_EXPR, i2, i2));
1279 tree t1
1280 = const_binop (PLUS_EXPR,
1281 const_binop (MULT_EXPR, r1, r2),
1282 const_binop (MULT_EXPR, i1, i2));
1283 tree t2
1284 = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, i1, r2),
1286 const_binop (MULT_EXPR, r1, i2));
1287
1288 real = const_binop (code, t1, magsquared);
1289 imag = const_binop (code, t2, magsquared);
1290 }
1291 else
1292 {
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_wide().
1295
1296 Expand complex division to scalars, modified algorithm to minimize
1297 overflow with wide input ranges. */
1298 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1299 fold_abs_const (r2, TREE_TYPE (type)),
1300 fold_abs_const (i2, TREE_TYPE (type)));
1301
1302 if (integer_nonzerop (compare))
1303 {
1304 /* In the TRUE branch, we compute
1305 ratio = br/bi;
1306 div = (br * ratio) + bi;
1307 tr = (ar * ratio) + ai;
1308 ti = (ai * ratio) - ar;
1309 tr = tr / div;
1310 ti = ti / div; */
1311 tree ratio = const_binop (code, r2, i2);
1312 tree div = const_binop (PLUS_EXPR, i2,
1313 const_binop (MULT_EXPR, r2, ratio));
1314 real = const_binop (MULT_EXPR, r1, ratio);
1315 real = const_binop (PLUS_EXPR, real, i1);
1316 real = const_binop (code, real, div);
1317
1318 imag = const_binop (MULT_EXPR, i1, ratio);
1319 imag = const_binop (MINUS_EXPR, imag, r1);
1320 imag = const_binop (code, imag, div);
1321 }
1322 else
1323 {
1324 /* In the FALSE branch, we compute
1325 ratio = d/c;
1326 divisor = (d * ratio) + c;
1327 tr = (b * ratio) + a;
1328 ti = b - (a * ratio);
1329 tr = tr / div;
1330 ti = ti / div; */
1331 tree ratio = const_binop (code, i2, r2);
1332 tree div = const_binop (PLUS_EXPR, r2,
1333 const_binop (MULT_EXPR, i2, ratio));
1334
1335 real = const_binop (MULT_EXPR, i1, ratio);
1336 real = const_binop (PLUS_EXPR, real, r1);
1337 real = const_binop (code, real, div);
1338
1339 imag = const_binop (MULT_EXPR, r1, ratio);
1340 imag = const_binop (MINUS_EXPR, i1, imag);
1341 imag = const_binop (code, imag, div);
1342 }
1343 }
1344 break;
1345
1346 default:
1347 return NULL_TREE;
1348 }
1349
1350 if (real && imag)
1351 return build_complex (type, real, imag);
1352 }
1353
1354 if (TREE_CODE (arg1) == VECTOR_CST
1355 && TREE_CODE (arg2) == VECTOR_CST)
1356 {
1357 tree type = TREE_TYPE(arg1);
1358 int count = TYPE_VECTOR_SUBPARTS (type), i;
1359 tree *elts = XALLOCAVEC (tree, count);
1360
1361 for (i = 0; i < count; i++)
1362 {
1363 tree elem1 = VECTOR_CST_ELT (arg1, i);
1364 tree elem2 = VECTOR_CST_ELT (arg2, i);
1365
1366 elts[i] = const_binop (code, elem1, elem2);
1367
1368 /* It is possible that const_binop cannot handle the given
1369 code and return NULL_TREE */
1370 if(elts[i] == NULL_TREE)
1371 return NULL_TREE;
1372 }
1373
1374 return build_vector (type, elts);
1375 }
1376 return NULL_TREE;
1377 }
1378
1379 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1380 indicates which particular sizetype to create. */
1381
1382 tree
1383 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1384 {
1385 return build_int_cst (sizetype_tab[(int) kind], number);
1386 }
1387 \f
1388 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1389 is a tree code. The type of the result is taken from the operands.
1390 Both must be equivalent integer types, ala int_binop_types_match_p.
1391 If the operands are constant, so is the result. */
1392
1393 tree
1394 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1395 {
1396 tree type = TREE_TYPE (arg0);
1397
1398 if (arg0 == error_mark_node || arg1 == error_mark_node)
1399 return error_mark_node;
1400
1401 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1402 TREE_TYPE (arg1)));
1403
1404 /* Handle the special case of two integer constants faster. */
1405 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1406 {
1407 /* And some specific cases even faster than that. */
1408 if (code == PLUS_EXPR)
1409 {
1410 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1411 return arg1;
1412 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1413 return arg0;
1414 }
1415 else if (code == MINUS_EXPR)
1416 {
1417 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1418 return arg0;
1419 }
1420 else if (code == MULT_EXPR)
1421 {
1422 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1423 return arg1;
1424 }
1425
1426 /* Handle general case of two integer constants. */
1427 return int_const_binop (code, arg0, arg1);
1428 }
1429
1430 return fold_build2_loc (loc, code, type, arg0, arg1);
1431 }
1432
1433 /* Given two values, either both of sizetype or both of bitsizetype,
1434 compute the difference between the two values. Return the value
1435 in signed type corresponding to the type of the operands. */
1436
1437 tree
1438 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1439 {
1440 tree type = TREE_TYPE (arg0);
1441 tree ctype;
1442
1443 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1444 TREE_TYPE (arg1)));
1445
1446 /* If the type is already signed, just do the simple thing. */
1447 if (!TYPE_UNSIGNED (type))
1448 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1449
1450 if (type == sizetype)
1451 ctype = ssizetype;
1452 else if (type == bitsizetype)
1453 ctype = sbitsizetype;
1454 else
1455 ctype = signed_type_for (type);
1456
1457 /* If either operand is not a constant, do the conversions to the signed
1458 type and subtract. The hardware will do the right thing with any
1459 overflow in the subtraction. */
1460 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1461 return size_binop_loc (loc, MINUS_EXPR,
1462 fold_convert_loc (loc, ctype, arg0),
1463 fold_convert_loc (loc, ctype, arg1));
1464
1465 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1466 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1467 overflow) and negate (which can't either). Special-case a result
1468 of zero while we're here. */
1469 if (tree_int_cst_equal (arg0, arg1))
1470 return build_int_cst (ctype, 0);
1471 else if (tree_int_cst_lt (arg1, arg0))
1472 return fold_convert_loc (loc, ctype,
1473 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1474 else
1475 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1476 fold_convert_loc (loc, ctype,
1477 size_binop_loc (loc,
1478 MINUS_EXPR,
1479 arg1, arg0)));
1480 }
1481 \f
1482 /* A subroutine of fold_convert_const handling conversions of an
1483 INTEGER_CST to another integer type. */
1484
1485 static tree
1486 fold_convert_const_int_from_int (tree type, const_tree arg1)
1487 {
1488 tree t;
1489
1490 /* Given an integer constant, make new constant with new type,
1491 appropriately sign-extended or truncated. */
1492 t = force_fit_type_double (type, tree_to_double_int (arg1),
1493 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1494 (TREE_INT_CST_HIGH (arg1) < 0
1495 && (TYPE_UNSIGNED (type)
1496 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1497 | TREE_OVERFLOW (arg1));
1498
1499 return t;
1500 }
1501
1502 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1503 to an integer type. */
1504
1505 static tree
1506 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1507 {
1508 int overflow = 0;
1509 tree t;
1510
1511 /* The following code implements the floating point to integer
1512 conversion rules required by the Java Language Specification,
1513 that IEEE NaNs are mapped to zero and values that overflow
1514 the target precision saturate, i.e. values greater than
1515 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1516 are mapped to INT_MIN. These semantics are allowed by the
1517 C and C++ standards that simply state that the behavior of
1518 FP-to-integer conversion is unspecified upon overflow. */
1519
1520 double_int val;
1521 REAL_VALUE_TYPE r;
1522 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1523
1524 switch (code)
1525 {
1526 case FIX_TRUNC_EXPR:
1527 real_trunc (&r, VOIDmode, &x);
1528 break;
1529
1530 default:
1531 gcc_unreachable ();
1532 }
1533
1534 /* If R is NaN, return zero and show we have an overflow. */
1535 if (REAL_VALUE_ISNAN (r))
1536 {
1537 overflow = 1;
1538 val = double_int_zero;
1539 }
1540
1541 /* See if R is less than the lower bound or greater than the
1542 upper bound. */
1543
1544 if (! overflow)
1545 {
1546 tree lt = TYPE_MIN_VALUE (type);
1547 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1548 if (REAL_VALUES_LESS (r, l))
1549 {
1550 overflow = 1;
1551 val = tree_to_double_int (lt);
1552 }
1553 }
1554
1555 if (! overflow)
1556 {
1557 tree ut = TYPE_MAX_VALUE (type);
1558 if (ut)
1559 {
1560 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1561 if (REAL_VALUES_LESS (u, r))
1562 {
1563 overflow = 1;
1564 val = tree_to_double_int (ut);
1565 }
1566 }
1567 }
1568
1569 if (! overflow)
1570 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1571
1572 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1573 return t;
1574 }
1575
1576 /* A subroutine of fold_convert_const handling conversions of a
1577 FIXED_CST to an integer type. */
1578
1579 static tree
1580 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1581 {
1582 tree t;
1583 double_int temp, temp_trunc;
1584 unsigned int mode;
1585
1586 /* Right shift FIXED_CST to temp by fbit. */
1587 temp = TREE_FIXED_CST (arg1).data;
1588 mode = TREE_FIXED_CST (arg1).mode;
1589 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1590 {
1591 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1592 HOST_BITS_PER_DOUBLE_INT,
1593 SIGNED_FIXED_POINT_MODE_P (mode));
1594
1595 /* Left shift temp to temp_trunc by fbit. */
1596 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1597 HOST_BITS_PER_DOUBLE_INT,
1598 SIGNED_FIXED_POINT_MODE_P (mode));
1599 }
1600 else
1601 {
1602 temp = double_int_zero;
1603 temp_trunc = double_int_zero;
1604 }
1605
1606 /* If FIXED_CST is negative, we need to round the value toward 0.
1607 By checking if the fractional bits are not zero to add 1 to temp. */
1608 if (SIGNED_FIXED_POINT_MODE_P (mode)
1609 && double_int_negative_p (temp_trunc)
1610 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1611 temp = double_int_add (temp, double_int_one);
1612
1613 /* Given a fixed-point constant, make new constant with new type,
1614 appropriately sign-extended or truncated. */
1615 t = force_fit_type_double (type, temp, -1,
1616 (double_int_negative_p (temp)
1617 && (TYPE_UNSIGNED (type)
1618 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1619 | TREE_OVERFLOW (arg1));
1620
1621 return t;
1622 }
1623
1624 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1625 to another floating point type. */
1626
1627 static tree
1628 fold_convert_const_real_from_real (tree type, const_tree arg1)
1629 {
1630 REAL_VALUE_TYPE value;
1631 tree t;
1632
1633 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1634 t = build_real (type, value);
1635
1636 /* If converting an infinity or NAN to a representation that doesn't
1637 have one, set the overflow bit so that we can produce some kind of
1638 error message at the appropriate point if necessary. It's not the
1639 most user-friendly message, but it's better than nothing. */
1640 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1641 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1642 TREE_OVERFLOW (t) = 1;
1643 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1644 && !MODE_HAS_NANS (TYPE_MODE (type)))
1645 TREE_OVERFLOW (t) = 1;
1646 /* Regular overflow, conversion produced an infinity in a mode that
1647 can't represent them. */
1648 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1649 && REAL_VALUE_ISINF (value)
1650 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1651 TREE_OVERFLOW (t) = 1;
1652 else
1653 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1654 return t;
1655 }
1656
1657 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1658 to a floating point type. */
1659
1660 static tree
1661 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1662 {
1663 REAL_VALUE_TYPE value;
1664 tree t;
1665
1666 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1667 t = build_real (type, value);
1668
1669 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1670 return t;
1671 }
1672
1673 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1674 to another fixed-point type. */
1675
1676 static tree
1677 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1678 {
1679 FIXED_VALUE_TYPE value;
1680 tree t;
1681 bool overflow_p;
1682
1683 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1684 TYPE_SATURATING (type));
1685 t = build_fixed (type, value);
1686
1687 /* Propagate overflow flags. */
1688 if (overflow_p | TREE_OVERFLOW (arg1))
1689 TREE_OVERFLOW (t) = 1;
1690 return t;
1691 }
1692
1693 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1694 to a fixed-point type. */
1695
1696 static tree
1697 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1698 {
1699 FIXED_VALUE_TYPE value;
1700 tree t;
1701 bool overflow_p;
1702
1703 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1704 TREE_INT_CST (arg1),
1705 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1706 TYPE_SATURATING (type));
1707 t = build_fixed (type, value);
1708
1709 /* Propagate overflow flags. */
1710 if (overflow_p | TREE_OVERFLOW (arg1))
1711 TREE_OVERFLOW (t) = 1;
1712 return t;
1713 }
1714
1715 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1716 to a fixed-point type. */
1717
1718 static tree
1719 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1720 {
1721 FIXED_VALUE_TYPE value;
1722 tree t;
1723 bool overflow_p;
1724
1725 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1726 &TREE_REAL_CST (arg1),
1727 TYPE_SATURATING (type));
1728 t = build_fixed (type, value);
1729
1730 /* Propagate overflow flags. */
1731 if (overflow_p | TREE_OVERFLOW (arg1))
1732 TREE_OVERFLOW (t) = 1;
1733 return t;
1734 }
1735
1736 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1737 type TYPE. If no simplification can be done return NULL_TREE. */
1738
1739 static tree
1740 fold_convert_const (enum tree_code code, tree type, tree arg1)
1741 {
1742 if (TREE_TYPE (arg1) == type)
1743 return arg1;
1744
1745 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1746 || TREE_CODE (type) == OFFSET_TYPE)
1747 {
1748 if (TREE_CODE (arg1) == INTEGER_CST)
1749 return fold_convert_const_int_from_int (type, arg1);
1750 else if (TREE_CODE (arg1) == REAL_CST)
1751 return fold_convert_const_int_from_real (code, type, arg1);
1752 else if (TREE_CODE (arg1) == FIXED_CST)
1753 return fold_convert_const_int_from_fixed (type, arg1);
1754 }
1755 else if (TREE_CODE (type) == REAL_TYPE)
1756 {
1757 if (TREE_CODE (arg1) == INTEGER_CST)
1758 return build_real_from_int_cst (type, arg1);
1759 else if (TREE_CODE (arg1) == REAL_CST)
1760 return fold_convert_const_real_from_real (type, arg1);
1761 else if (TREE_CODE (arg1) == FIXED_CST)
1762 return fold_convert_const_real_from_fixed (type, arg1);
1763 }
1764 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1765 {
1766 if (TREE_CODE (arg1) == FIXED_CST)
1767 return fold_convert_const_fixed_from_fixed (type, arg1);
1768 else if (TREE_CODE (arg1) == INTEGER_CST)
1769 return fold_convert_const_fixed_from_int (type, arg1);
1770 else if (TREE_CODE (arg1) == REAL_CST)
1771 return fold_convert_const_fixed_from_real (type, arg1);
1772 }
1773 return NULL_TREE;
1774 }
1775
1776 /* Construct a vector of zero elements of vector type TYPE. */
1777
1778 static tree
1779 build_zero_vector (tree type)
1780 {
1781 tree t;
1782
1783 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1784 return build_vector_from_val (type, t);
1785 }
1786
1787 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1788
1789 bool
1790 fold_convertible_p (const_tree type, const_tree arg)
1791 {
1792 tree orig = TREE_TYPE (arg);
1793
1794 if (type == orig)
1795 return true;
1796
1797 if (TREE_CODE (arg) == ERROR_MARK
1798 || TREE_CODE (type) == ERROR_MARK
1799 || TREE_CODE (orig) == ERROR_MARK)
1800 return false;
1801
1802 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1803 return true;
1804
1805 switch (TREE_CODE (type))
1806 {
1807 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1808 case POINTER_TYPE: case REFERENCE_TYPE:
1809 case OFFSET_TYPE:
1810 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1811 || TREE_CODE (orig) == OFFSET_TYPE)
1812 return true;
1813 return (TREE_CODE (orig) == VECTOR_TYPE
1814 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1815
1816 case REAL_TYPE:
1817 case FIXED_POINT_TYPE:
1818 case COMPLEX_TYPE:
1819 case VECTOR_TYPE:
1820 case VOID_TYPE:
1821 return TREE_CODE (type) == TREE_CODE (orig);
1822
1823 default:
1824 return false;
1825 }
1826 }
1827
1828 /* Convert expression ARG to type TYPE. Used by the middle-end for
1829 simple conversions in preference to calling the front-end's convert. */
1830
1831 tree
1832 fold_convert_loc (location_t loc, tree type, tree arg)
1833 {
1834 tree orig = TREE_TYPE (arg);
1835 tree tem;
1836
1837 if (type == orig)
1838 return arg;
1839
1840 if (TREE_CODE (arg) == ERROR_MARK
1841 || TREE_CODE (type) == ERROR_MARK
1842 || TREE_CODE (orig) == ERROR_MARK)
1843 return error_mark_node;
1844
1845 switch (TREE_CODE (type))
1846 {
1847 case POINTER_TYPE:
1848 case REFERENCE_TYPE:
1849 /* Handle conversions between pointers to different address spaces. */
1850 if (POINTER_TYPE_P (orig)
1851 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1852 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1853 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1854 /* fall through */
1855
1856 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1857 case OFFSET_TYPE:
1858 if (TREE_CODE (arg) == INTEGER_CST)
1859 {
1860 tem = fold_convert_const (NOP_EXPR, type, arg);
1861 if (tem != NULL_TREE)
1862 return tem;
1863 }
1864 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1865 || TREE_CODE (orig) == OFFSET_TYPE)
1866 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1867 if (TREE_CODE (orig) == COMPLEX_TYPE)
1868 return fold_convert_loc (loc, type,
1869 fold_build1_loc (loc, REALPART_EXPR,
1870 TREE_TYPE (orig), arg));
1871 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1872 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1873 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1874
1875 case REAL_TYPE:
1876 if (TREE_CODE (arg) == INTEGER_CST)
1877 {
1878 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1879 if (tem != NULL_TREE)
1880 return tem;
1881 }
1882 else if (TREE_CODE (arg) == REAL_CST)
1883 {
1884 tem = fold_convert_const (NOP_EXPR, type, arg);
1885 if (tem != NULL_TREE)
1886 return tem;
1887 }
1888 else if (TREE_CODE (arg) == FIXED_CST)
1889 {
1890 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1891 if (tem != NULL_TREE)
1892 return tem;
1893 }
1894
1895 switch (TREE_CODE (orig))
1896 {
1897 case INTEGER_TYPE:
1898 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1899 case POINTER_TYPE: case REFERENCE_TYPE:
1900 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1901
1902 case REAL_TYPE:
1903 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1904
1905 case FIXED_POINT_TYPE:
1906 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1907
1908 case COMPLEX_TYPE:
1909 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1910 return fold_convert_loc (loc, type, tem);
1911
1912 default:
1913 gcc_unreachable ();
1914 }
1915
1916 case FIXED_POINT_TYPE:
1917 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1918 || TREE_CODE (arg) == REAL_CST)
1919 {
1920 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1921 if (tem != NULL_TREE)
1922 goto fold_convert_exit;
1923 }
1924
1925 switch (TREE_CODE (orig))
1926 {
1927 case FIXED_POINT_TYPE:
1928 case INTEGER_TYPE:
1929 case ENUMERAL_TYPE:
1930 case BOOLEAN_TYPE:
1931 case REAL_TYPE:
1932 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1933
1934 case COMPLEX_TYPE:
1935 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1936 return fold_convert_loc (loc, type, tem);
1937
1938 default:
1939 gcc_unreachable ();
1940 }
1941
1942 case COMPLEX_TYPE:
1943 switch (TREE_CODE (orig))
1944 {
1945 case INTEGER_TYPE:
1946 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1947 case POINTER_TYPE: case REFERENCE_TYPE:
1948 case REAL_TYPE:
1949 case FIXED_POINT_TYPE:
1950 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1951 fold_convert_loc (loc, TREE_TYPE (type), arg),
1952 fold_convert_loc (loc, TREE_TYPE (type),
1953 integer_zero_node));
1954 case COMPLEX_TYPE:
1955 {
1956 tree rpart, ipart;
1957
1958 if (TREE_CODE (arg) == COMPLEX_EXPR)
1959 {
1960 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1961 TREE_OPERAND (arg, 0));
1962 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1963 TREE_OPERAND (arg, 1));
1964 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1965 }
1966
1967 arg = save_expr (arg);
1968 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1969 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1970 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1971 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1972 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1973 }
1974
1975 default:
1976 gcc_unreachable ();
1977 }
1978
1979 case VECTOR_TYPE:
1980 if (integer_zerop (arg))
1981 return build_zero_vector (type);
1982 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1983 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1984 || TREE_CODE (orig) == VECTOR_TYPE);
1985 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1986
1987 case VOID_TYPE:
1988 tem = fold_ignored_result (arg);
1989 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1990
1991 default:
1992 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1993 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1994 gcc_unreachable ();
1995 }
1996 fold_convert_exit:
1997 protected_set_expr_location_unshare (tem, loc);
1998 return tem;
1999 }
2000 \f
2001 /* Return false if expr can be assumed not to be an lvalue, true
2002 otherwise. */
2003
2004 static bool
2005 maybe_lvalue_p (const_tree x)
2006 {
2007 /* We only need to wrap lvalue tree codes. */
2008 switch (TREE_CODE (x))
2009 {
2010 case VAR_DECL:
2011 case PARM_DECL:
2012 case RESULT_DECL:
2013 case LABEL_DECL:
2014 case FUNCTION_DECL:
2015 case SSA_NAME:
2016
2017 case COMPONENT_REF:
2018 case MEM_REF:
2019 case INDIRECT_REF:
2020 case ARRAY_REF:
2021 case ARRAY_RANGE_REF:
2022 case BIT_FIELD_REF:
2023 case OBJ_TYPE_REF:
2024
2025 case REALPART_EXPR:
2026 case IMAGPART_EXPR:
2027 case PREINCREMENT_EXPR:
2028 case PREDECREMENT_EXPR:
2029 case SAVE_EXPR:
2030 case TRY_CATCH_EXPR:
2031 case WITH_CLEANUP_EXPR:
2032 case COMPOUND_EXPR:
2033 case MODIFY_EXPR:
2034 case TARGET_EXPR:
2035 case COND_EXPR:
2036 case BIND_EXPR:
2037 break;
2038
2039 default:
2040 /* Assume the worst for front-end tree codes. */
2041 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2042 break;
2043 return false;
2044 }
2045
2046 return true;
2047 }
2048
2049 /* Return an expr equal to X but certainly not valid as an lvalue. */
2050
2051 tree
2052 non_lvalue_loc (location_t loc, tree x)
2053 {
2054 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2055 us. */
2056 if (in_gimple_form)
2057 return x;
2058
2059 if (! maybe_lvalue_p (x))
2060 return x;
2061 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2062 }
2063
2064 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2065 Zero means allow extended lvalues. */
2066
2067 int pedantic_lvalues;
2068
2069 /* When pedantic, return an expr equal to X but certainly not valid as a
2070 pedantic lvalue. Otherwise, return X. */
2071
2072 static tree
2073 pedantic_non_lvalue_loc (location_t loc, tree x)
2074 {
2075 if (pedantic_lvalues)
2076 return non_lvalue_loc (loc, x);
2077
2078 return protected_set_expr_location_unshare (x, loc);
2079 }
2080 \f
2081 /* Given a tree comparison code, return the code that is the logical inverse.
2082 It is generally not safe to do this for floating-point comparisons, except
2083 for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case. */
2084
2085 enum tree_code
2086 invert_tree_comparison (enum tree_code code, bool honor_nans)
2087 {
2088 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR)
2089 return ERROR_MARK;
2090
2091 switch (code)
2092 {
2093 case EQ_EXPR:
2094 return NE_EXPR;
2095 case NE_EXPR:
2096 return EQ_EXPR;
2097 case GT_EXPR:
2098 return honor_nans ? UNLE_EXPR : LE_EXPR;
2099 case GE_EXPR:
2100 return honor_nans ? UNLT_EXPR : LT_EXPR;
2101 case LT_EXPR:
2102 return honor_nans ? UNGE_EXPR : GE_EXPR;
2103 case LE_EXPR:
2104 return honor_nans ? UNGT_EXPR : GT_EXPR;
2105 case LTGT_EXPR:
2106 return UNEQ_EXPR;
2107 case UNEQ_EXPR:
2108 return LTGT_EXPR;
2109 case UNGT_EXPR:
2110 return LE_EXPR;
2111 case UNGE_EXPR:
2112 return LT_EXPR;
2113 case UNLT_EXPR:
2114 return GE_EXPR;
2115 case UNLE_EXPR:
2116 return GT_EXPR;
2117 case ORDERED_EXPR:
2118 return UNORDERED_EXPR;
2119 case UNORDERED_EXPR:
2120 return ORDERED_EXPR;
2121 default:
2122 gcc_unreachable ();
2123 }
2124 }
2125
2126 /* Similar, but return the comparison that results if the operands are
2127 swapped. This is safe for floating-point. */
2128
2129 enum tree_code
2130 swap_tree_comparison (enum tree_code code)
2131 {
2132 switch (code)
2133 {
2134 case EQ_EXPR:
2135 case NE_EXPR:
2136 case ORDERED_EXPR:
2137 case UNORDERED_EXPR:
2138 case LTGT_EXPR:
2139 case UNEQ_EXPR:
2140 return code;
2141 case GT_EXPR:
2142 return LT_EXPR;
2143 case GE_EXPR:
2144 return LE_EXPR;
2145 case LT_EXPR:
2146 return GT_EXPR;
2147 case LE_EXPR:
2148 return GE_EXPR;
2149 case UNGT_EXPR:
2150 return UNLT_EXPR;
2151 case UNGE_EXPR:
2152 return UNLE_EXPR;
2153 case UNLT_EXPR:
2154 return UNGT_EXPR;
2155 case UNLE_EXPR:
2156 return UNGE_EXPR;
2157 default:
2158 gcc_unreachable ();
2159 }
2160 }
2161
2162
2163 /* Convert a comparison tree code from an enum tree_code representation
2164 into a compcode bit-based encoding. This function is the inverse of
2165 compcode_to_comparison. */
2166
2167 static enum comparison_code
2168 comparison_to_compcode (enum tree_code code)
2169 {
2170 switch (code)
2171 {
2172 case LT_EXPR:
2173 return COMPCODE_LT;
2174 case EQ_EXPR:
2175 return COMPCODE_EQ;
2176 case LE_EXPR:
2177 return COMPCODE_LE;
2178 case GT_EXPR:
2179 return COMPCODE_GT;
2180 case NE_EXPR:
2181 return COMPCODE_NE;
2182 case GE_EXPR:
2183 return COMPCODE_GE;
2184 case ORDERED_EXPR:
2185 return COMPCODE_ORD;
2186 case UNORDERED_EXPR:
2187 return COMPCODE_UNORD;
2188 case UNLT_EXPR:
2189 return COMPCODE_UNLT;
2190 case UNEQ_EXPR:
2191 return COMPCODE_UNEQ;
2192 case UNLE_EXPR:
2193 return COMPCODE_UNLE;
2194 case UNGT_EXPR:
2195 return COMPCODE_UNGT;
2196 case LTGT_EXPR:
2197 return COMPCODE_LTGT;
2198 case UNGE_EXPR:
2199 return COMPCODE_UNGE;
2200 default:
2201 gcc_unreachable ();
2202 }
2203 }
2204
2205 /* Convert a compcode bit-based encoding of a comparison operator back
2206 to GCC's enum tree_code representation. This function is the
2207 inverse of comparison_to_compcode. */
2208
2209 static enum tree_code
2210 compcode_to_comparison (enum comparison_code code)
2211 {
2212 switch (code)
2213 {
2214 case COMPCODE_LT:
2215 return LT_EXPR;
2216 case COMPCODE_EQ:
2217 return EQ_EXPR;
2218 case COMPCODE_LE:
2219 return LE_EXPR;
2220 case COMPCODE_GT:
2221 return GT_EXPR;
2222 case COMPCODE_NE:
2223 return NE_EXPR;
2224 case COMPCODE_GE:
2225 return GE_EXPR;
2226 case COMPCODE_ORD:
2227 return ORDERED_EXPR;
2228 case COMPCODE_UNORD:
2229 return UNORDERED_EXPR;
2230 case COMPCODE_UNLT:
2231 return UNLT_EXPR;
2232 case COMPCODE_UNEQ:
2233 return UNEQ_EXPR;
2234 case COMPCODE_UNLE:
2235 return UNLE_EXPR;
2236 case COMPCODE_UNGT:
2237 return UNGT_EXPR;
2238 case COMPCODE_LTGT:
2239 return LTGT_EXPR;
2240 case COMPCODE_UNGE:
2241 return UNGE_EXPR;
2242 default:
2243 gcc_unreachable ();
2244 }
2245 }
2246
2247 /* Return a tree for the comparison which is the combination of
2248 doing the AND or OR (depending on CODE) of the two operations LCODE
2249 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2250 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2251 if this makes the transformation invalid. */
2252
2253 tree
2254 combine_comparisons (location_t loc,
2255 enum tree_code code, enum tree_code lcode,
2256 enum tree_code rcode, tree truth_type,
2257 tree ll_arg, tree lr_arg)
2258 {
2259 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2260 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2261 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2262 int compcode;
2263
2264 switch (code)
2265 {
2266 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2267 compcode = lcompcode & rcompcode;
2268 break;
2269
2270 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2271 compcode = lcompcode | rcompcode;
2272 break;
2273
2274 default:
2275 return NULL_TREE;
2276 }
2277
2278 if (!honor_nans)
2279 {
2280 /* Eliminate unordered comparisons, as well as LTGT and ORD
2281 which are not used unless the mode has NaNs. */
2282 compcode &= ~COMPCODE_UNORD;
2283 if (compcode == COMPCODE_LTGT)
2284 compcode = COMPCODE_NE;
2285 else if (compcode == COMPCODE_ORD)
2286 compcode = COMPCODE_TRUE;
2287 }
2288 else if (flag_trapping_math)
2289 {
2290 /* Check that the original operation and the optimized ones will trap
2291 under the same condition. */
2292 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2293 && (lcompcode != COMPCODE_EQ)
2294 && (lcompcode != COMPCODE_ORD);
2295 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2296 && (rcompcode != COMPCODE_EQ)
2297 && (rcompcode != COMPCODE_ORD);
2298 bool trap = (compcode & COMPCODE_UNORD) == 0
2299 && (compcode != COMPCODE_EQ)
2300 && (compcode != COMPCODE_ORD);
2301
2302 /* In a short-circuited boolean expression the LHS might be
2303 such that the RHS, if evaluated, will never trap. For
2304 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2305 if neither x nor y is NaN. (This is a mixed blessing: for
2306 example, the expression above will never trap, hence
2307 optimizing it to x < y would be invalid). */
2308 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2309 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2310 rtrap = false;
2311
2312 /* If the comparison was short-circuited, and only the RHS
2313 trapped, we may now generate a spurious trap. */
2314 if (rtrap && !ltrap
2315 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2316 return NULL_TREE;
2317
2318 /* If we changed the conditions that cause a trap, we lose. */
2319 if ((ltrap || rtrap) != trap)
2320 return NULL_TREE;
2321 }
2322
2323 if (compcode == COMPCODE_TRUE)
2324 return constant_boolean_node (true, truth_type);
2325 else if (compcode == COMPCODE_FALSE)
2326 return constant_boolean_node (false, truth_type);
2327 else
2328 {
2329 enum tree_code tcode;
2330
2331 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2332 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2333 }
2334 }
2335 \f
2336 /* Return nonzero if two operands (typically of the same tree node)
2337 are necessarily equal. If either argument has side-effects this
2338 function returns zero. FLAGS modifies behavior as follows:
2339
2340 If OEP_ONLY_CONST is set, only return nonzero for constants.
2341 This function tests whether the operands are indistinguishable;
2342 it does not test whether they are equal using C's == operation.
2343 The distinction is important for IEEE floating point, because
2344 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2345 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2346
2347 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2348 even though it may hold multiple values during a function.
2349 This is because a GCC tree node guarantees that nothing else is
2350 executed between the evaluation of its "operands" (which may often
2351 be evaluated in arbitrary order). Hence if the operands themselves
2352 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2353 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2354 unset means assuming isochronic (or instantaneous) tree equivalence.
2355 Unless comparing arbitrary expression trees, such as from different
2356 statements, this flag can usually be left unset.
2357
2358 If OEP_PURE_SAME is set, then pure functions with identical arguments
2359 are considered the same. It is used when the caller has other ways
2360 to ensure that global memory is unchanged in between. */
2361
2362 int
2363 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2364 {
2365 /* If either is ERROR_MARK, they aren't equal. */
2366 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2367 || TREE_TYPE (arg0) == error_mark_node
2368 || TREE_TYPE (arg1) == error_mark_node)
2369 return 0;
2370
2371 /* Similar, if either does not have a type (like a released SSA name),
2372 they aren't equal. */
2373 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2374 return 0;
2375
2376 /* Check equality of integer constants before bailing out due to
2377 precision differences. */
2378 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2379 return tree_int_cst_equal (arg0, arg1);
2380
2381 /* If both types don't have the same signedness, then we can't consider
2382 them equal. We must check this before the STRIP_NOPS calls
2383 because they may change the signedness of the arguments. As pointers
2384 strictly don't have a signedness, require either two pointers or
2385 two non-pointers as well. */
2386 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2387 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2388 return 0;
2389
2390 /* We cannot consider pointers to different address space equal. */
2391 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2392 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2393 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2394 return 0;
2395
2396 /* If both types don't have the same precision, then it is not safe
2397 to strip NOPs. */
2398 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2399 return 0;
2400
2401 STRIP_NOPS (arg0);
2402 STRIP_NOPS (arg1);
2403
2404 /* In case both args are comparisons but with different comparison
2405 code, try to swap the comparison operands of one arg to produce
2406 a match and compare that variant. */
2407 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2408 && COMPARISON_CLASS_P (arg0)
2409 && COMPARISON_CLASS_P (arg1))
2410 {
2411 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2412
2413 if (TREE_CODE (arg0) == swap_code)
2414 return operand_equal_p (TREE_OPERAND (arg0, 0),
2415 TREE_OPERAND (arg1, 1), flags)
2416 && operand_equal_p (TREE_OPERAND (arg0, 1),
2417 TREE_OPERAND (arg1, 0), flags);
2418 }
2419
2420 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2421 /* This is needed for conversions and for COMPONENT_REF.
2422 Might as well play it safe and always test this. */
2423 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2424 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2425 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2426 return 0;
2427
2428 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2429 We don't care about side effects in that case because the SAVE_EXPR
2430 takes care of that for us. In all other cases, two expressions are
2431 equal if they have no side effects. If we have two identical
2432 expressions with side effects that should be treated the same due
2433 to the only side effects being identical SAVE_EXPR's, that will
2434 be detected in the recursive calls below.
2435 If we are taking an invariant address of two identical objects
2436 they are necessarily equal as well. */
2437 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2438 && (TREE_CODE (arg0) == SAVE_EXPR
2439 || (flags & OEP_CONSTANT_ADDRESS_OF)
2440 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2441 return 1;
2442
2443 /* Next handle constant cases, those for which we can return 1 even
2444 if ONLY_CONST is set. */
2445 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2446 switch (TREE_CODE (arg0))
2447 {
2448 case INTEGER_CST:
2449 return tree_int_cst_equal (arg0, arg1);
2450
2451 case FIXED_CST:
2452 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2453 TREE_FIXED_CST (arg1));
2454
2455 case REAL_CST:
2456 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2457 TREE_REAL_CST (arg1)))
2458 return 1;
2459
2460
2461 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2462 {
2463 /* If we do not distinguish between signed and unsigned zero,
2464 consider them equal. */
2465 if (real_zerop (arg0) && real_zerop (arg1))
2466 return 1;
2467 }
2468 return 0;
2469
2470 case VECTOR_CST:
2471 {
2472 unsigned i;
2473
2474 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2475 return 0;
2476
2477 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2478 {
2479 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2480 VECTOR_CST_ELT (arg1, i), flags))
2481 return 0;
2482 }
2483 return 1;
2484 }
2485
2486 case COMPLEX_CST:
2487 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2488 flags)
2489 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2490 flags));
2491
2492 case STRING_CST:
2493 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2494 && ! memcmp (TREE_STRING_POINTER (arg0),
2495 TREE_STRING_POINTER (arg1),
2496 TREE_STRING_LENGTH (arg0)));
2497
2498 case ADDR_EXPR:
2499 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2500 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2501 ? OEP_CONSTANT_ADDRESS_OF : 0);
2502 default:
2503 break;
2504 }
2505
2506 if (flags & OEP_ONLY_CONST)
2507 return 0;
2508
2509 /* Define macros to test an operand from arg0 and arg1 for equality and a
2510 variant that allows null and views null as being different from any
2511 non-null value. In the latter case, if either is null, the both
2512 must be; otherwise, do the normal comparison. */
2513 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2514 TREE_OPERAND (arg1, N), flags)
2515
2516 #define OP_SAME_WITH_NULL(N) \
2517 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2518 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2519
2520 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2521 {
2522 case tcc_unary:
2523 /* Two conversions are equal only if signedness and modes match. */
2524 switch (TREE_CODE (arg0))
2525 {
2526 CASE_CONVERT:
2527 case FIX_TRUNC_EXPR:
2528 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2529 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2530 return 0;
2531 break;
2532 default:
2533 break;
2534 }
2535
2536 return OP_SAME (0);
2537
2538
2539 case tcc_comparison:
2540 case tcc_binary:
2541 if (OP_SAME (0) && OP_SAME (1))
2542 return 1;
2543
2544 /* For commutative ops, allow the other order. */
2545 return (commutative_tree_code (TREE_CODE (arg0))
2546 && operand_equal_p (TREE_OPERAND (arg0, 0),
2547 TREE_OPERAND (arg1, 1), flags)
2548 && operand_equal_p (TREE_OPERAND (arg0, 1),
2549 TREE_OPERAND (arg1, 0), flags));
2550
2551 case tcc_reference:
2552 /* If either of the pointer (or reference) expressions we are
2553 dereferencing contain a side effect, these cannot be equal. */
2554 if (TREE_SIDE_EFFECTS (arg0)
2555 || TREE_SIDE_EFFECTS (arg1))
2556 return 0;
2557
2558 switch (TREE_CODE (arg0))
2559 {
2560 case INDIRECT_REF:
2561 case REALPART_EXPR:
2562 case IMAGPART_EXPR:
2563 return OP_SAME (0);
2564
2565 case TARGET_MEM_REF:
2566 /* Require equal extra operands and then fall thru to MEM_REF
2567 handling of the two common operands. */
2568 if (!OP_SAME_WITH_NULL (2)
2569 || !OP_SAME_WITH_NULL (3)
2570 || !OP_SAME_WITH_NULL (4))
2571 return 0;
2572 /* Fallthru. */
2573 case MEM_REF:
2574 /* Require equal access sizes, and similar pointer types.
2575 We can have incomplete types for array references of
2576 variable-sized arrays from the Fortran frontent
2577 though. */
2578 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2579 || (TYPE_SIZE (TREE_TYPE (arg0))
2580 && TYPE_SIZE (TREE_TYPE (arg1))
2581 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2582 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2583 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2584 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2585 && OP_SAME (0) && OP_SAME (1));
2586
2587 case ARRAY_REF:
2588 case ARRAY_RANGE_REF:
2589 /* Operands 2 and 3 may be null.
2590 Compare the array index by value if it is constant first as we
2591 may have different types but same value here. */
2592 return (OP_SAME (0)
2593 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2594 TREE_OPERAND (arg1, 1))
2595 || OP_SAME (1))
2596 && OP_SAME_WITH_NULL (2)
2597 && OP_SAME_WITH_NULL (3));
2598
2599 case COMPONENT_REF:
2600 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2601 may be NULL when we're called to compare MEM_EXPRs. */
2602 return OP_SAME_WITH_NULL (0)
2603 && OP_SAME (1)
2604 && OP_SAME_WITH_NULL (2);
2605
2606 case BIT_FIELD_REF:
2607 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2608
2609 default:
2610 return 0;
2611 }
2612
2613 case tcc_expression:
2614 switch (TREE_CODE (arg0))
2615 {
2616 case ADDR_EXPR:
2617 case TRUTH_NOT_EXPR:
2618 return OP_SAME (0);
2619
2620 case TRUTH_ANDIF_EXPR:
2621 case TRUTH_ORIF_EXPR:
2622 return OP_SAME (0) && OP_SAME (1);
2623
2624 case FMA_EXPR:
2625 case WIDEN_MULT_PLUS_EXPR:
2626 case WIDEN_MULT_MINUS_EXPR:
2627 if (!OP_SAME (2))
2628 return 0;
2629 /* The multiplcation operands are commutative. */
2630 /* FALLTHRU */
2631
2632 case TRUTH_AND_EXPR:
2633 case TRUTH_OR_EXPR:
2634 case TRUTH_XOR_EXPR:
2635 if (OP_SAME (0) && OP_SAME (1))
2636 return 1;
2637
2638 /* Otherwise take into account this is a commutative operation. */
2639 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2640 TREE_OPERAND (arg1, 1), flags)
2641 && operand_equal_p (TREE_OPERAND (arg0, 1),
2642 TREE_OPERAND (arg1, 0), flags));
2643
2644 case COND_EXPR:
2645 case VEC_COND_EXPR:
2646 case DOT_PROD_EXPR:
2647 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2648
2649 default:
2650 return 0;
2651 }
2652
2653 case tcc_vl_exp:
2654 switch (TREE_CODE (arg0))
2655 {
2656 case CALL_EXPR:
2657 /* If the CALL_EXPRs call different functions, then they
2658 clearly can not be equal. */
2659 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2660 flags))
2661 return 0;
2662
2663 {
2664 unsigned int cef = call_expr_flags (arg0);
2665 if (flags & OEP_PURE_SAME)
2666 cef &= ECF_CONST | ECF_PURE;
2667 else
2668 cef &= ECF_CONST;
2669 if (!cef)
2670 return 0;
2671 }
2672
2673 /* Now see if all the arguments are the same. */
2674 {
2675 const_call_expr_arg_iterator iter0, iter1;
2676 const_tree a0, a1;
2677 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2678 a1 = first_const_call_expr_arg (arg1, &iter1);
2679 a0 && a1;
2680 a0 = next_const_call_expr_arg (&iter0),
2681 a1 = next_const_call_expr_arg (&iter1))
2682 if (! operand_equal_p (a0, a1, flags))
2683 return 0;
2684
2685 /* If we get here and both argument lists are exhausted
2686 then the CALL_EXPRs are equal. */
2687 return ! (a0 || a1);
2688 }
2689 default:
2690 return 0;
2691 }
2692
2693 case tcc_declaration:
2694 /* Consider __builtin_sqrt equal to sqrt. */
2695 return (TREE_CODE (arg0) == FUNCTION_DECL
2696 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2697 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2698 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2699
2700 default:
2701 return 0;
2702 }
2703
2704 #undef OP_SAME
2705 #undef OP_SAME_WITH_NULL
2706 }
2707 \f
2708 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2709 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2710
2711 When in doubt, return 0. */
2712
2713 static int
2714 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2715 {
2716 int unsignedp1, unsignedpo;
2717 tree primarg0, primarg1, primother;
2718 unsigned int correct_width;
2719
2720 if (operand_equal_p (arg0, arg1, 0))
2721 return 1;
2722
2723 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2724 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2725 return 0;
2726
2727 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2728 and see if the inner values are the same. This removes any
2729 signedness comparison, which doesn't matter here. */
2730 primarg0 = arg0, primarg1 = arg1;
2731 STRIP_NOPS (primarg0);
2732 STRIP_NOPS (primarg1);
2733 if (operand_equal_p (primarg0, primarg1, 0))
2734 return 1;
2735
2736 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2737 actual comparison operand, ARG0.
2738
2739 First throw away any conversions to wider types
2740 already present in the operands. */
2741
2742 primarg1 = get_narrower (arg1, &unsignedp1);
2743 primother = get_narrower (other, &unsignedpo);
2744
2745 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2746 if (unsignedp1 == unsignedpo
2747 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2748 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2749 {
2750 tree type = TREE_TYPE (arg0);
2751
2752 /* Make sure shorter operand is extended the right way
2753 to match the longer operand. */
2754 primarg1 = fold_convert (signed_or_unsigned_type_for
2755 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2756
2757 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2758 return 1;
2759 }
2760
2761 return 0;
2762 }
2763 \f
2764 /* See if ARG is an expression that is either a comparison or is performing
2765 arithmetic on comparisons. The comparisons must only be comparing
2766 two different values, which will be stored in *CVAL1 and *CVAL2; if
2767 they are nonzero it means that some operands have already been found.
2768 No variables may be used anywhere else in the expression except in the
2769 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2770 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2771
2772 If this is true, return 1. Otherwise, return zero. */
2773
2774 static int
2775 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2776 {
2777 enum tree_code code = TREE_CODE (arg);
2778 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2779
2780 /* We can handle some of the tcc_expression cases here. */
2781 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2782 tclass = tcc_unary;
2783 else if (tclass == tcc_expression
2784 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2785 || code == COMPOUND_EXPR))
2786 tclass = tcc_binary;
2787
2788 else if (tclass == tcc_expression && code == SAVE_EXPR
2789 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2790 {
2791 /* If we've already found a CVAL1 or CVAL2, this expression is
2792 two complex to handle. */
2793 if (*cval1 || *cval2)
2794 return 0;
2795
2796 tclass = tcc_unary;
2797 *save_p = 1;
2798 }
2799
2800 switch (tclass)
2801 {
2802 case tcc_unary:
2803 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2804
2805 case tcc_binary:
2806 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2807 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2808 cval1, cval2, save_p));
2809
2810 case tcc_constant:
2811 return 1;
2812
2813 case tcc_expression:
2814 if (code == COND_EXPR)
2815 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2816 cval1, cval2, save_p)
2817 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2818 cval1, cval2, save_p)
2819 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2820 cval1, cval2, save_p));
2821 return 0;
2822
2823 case tcc_comparison:
2824 /* First see if we can handle the first operand, then the second. For
2825 the second operand, we know *CVAL1 can't be zero. It must be that
2826 one side of the comparison is each of the values; test for the
2827 case where this isn't true by failing if the two operands
2828 are the same. */
2829
2830 if (operand_equal_p (TREE_OPERAND (arg, 0),
2831 TREE_OPERAND (arg, 1), 0))
2832 return 0;
2833
2834 if (*cval1 == 0)
2835 *cval1 = TREE_OPERAND (arg, 0);
2836 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2837 ;
2838 else if (*cval2 == 0)
2839 *cval2 = TREE_OPERAND (arg, 0);
2840 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2841 ;
2842 else
2843 return 0;
2844
2845 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2846 ;
2847 else if (*cval2 == 0)
2848 *cval2 = TREE_OPERAND (arg, 1);
2849 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2850 ;
2851 else
2852 return 0;
2853
2854 return 1;
2855
2856 default:
2857 return 0;
2858 }
2859 }
2860 \f
2861 /* ARG is a tree that is known to contain just arithmetic operations and
2862 comparisons. Evaluate the operations in the tree substituting NEW0 for
2863 any occurrence of OLD0 as an operand of a comparison and likewise for
2864 NEW1 and OLD1. */
2865
2866 static tree
2867 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2868 tree old1, tree new1)
2869 {
2870 tree type = TREE_TYPE (arg);
2871 enum tree_code code = TREE_CODE (arg);
2872 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2873
2874 /* We can handle some of the tcc_expression cases here. */
2875 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2876 tclass = tcc_unary;
2877 else if (tclass == tcc_expression
2878 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2879 tclass = tcc_binary;
2880
2881 switch (tclass)
2882 {
2883 case tcc_unary:
2884 return fold_build1_loc (loc, code, type,
2885 eval_subst (loc, TREE_OPERAND (arg, 0),
2886 old0, new0, old1, new1));
2887
2888 case tcc_binary:
2889 return fold_build2_loc (loc, code, type,
2890 eval_subst (loc, TREE_OPERAND (arg, 0),
2891 old0, new0, old1, new1),
2892 eval_subst (loc, TREE_OPERAND (arg, 1),
2893 old0, new0, old1, new1));
2894
2895 case tcc_expression:
2896 switch (code)
2897 {
2898 case SAVE_EXPR:
2899 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2900 old1, new1);
2901
2902 case COMPOUND_EXPR:
2903 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2904 old1, new1);
2905
2906 case COND_EXPR:
2907 return fold_build3_loc (loc, code, type,
2908 eval_subst (loc, TREE_OPERAND (arg, 0),
2909 old0, new0, old1, new1),
2910 eval_subst (loc, TREE_OPERAND (arg, 1),
2911 old0, new0, old1, new1),
2912 eval_subst (loc, TREE_OPERAND (arg, 2),
2913 old0, new0, old1, new1));
2914 default:
2915 break;
2916 }
2917 /* Fall through - ??? */
2918
2919 case tcc_comparison:
2920 {
2921 tree arg0 = TREE_OPERAND (arg, 0);
2922 tree arg1 = TREE_OPERAND (arg, 1);
2923
2924 /* We need to check both for exact equality and tree equality. The
2925 former will be true if the operand has a side-effect. In that
2926 case, we know the operand occurred exactly once. */
2927
2928 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2929 arg0 = new0;
2930 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2931 arg0 = new1;
2932
2933 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2934 arg1 = new0;
2935 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2936 arg1 = new1;
2937
2938 return fold_build2_loc (loc, code, type, arg0, arg1);
2939 }
2940
2941 default:
2942 return arg;
2943 }
2944 }
2945 \f
2946 /* Return a tree for the case when the result of an expression is RESULT
2947 converted to TYPE and OMITTED was previously an operand of the expression
2948 but is now not needed (e.g., we folded OMITTED * 0).
2949
2950 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2951 the conversion of RESULT to TYPE. */
2952
2953 tree
2954 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2955 {
2956 tree t = fold_convert_loc (loc, type, result);
2957
2958 /* If the resulting operand is an empty statement, just return the omitted
2959 statement casted to void. */
2960 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2961 return build1_loc (loc, NOP_EXPR, void_type_node,
2962 fold_ignored_result (omitted));
2963
2964 if (TREE_SIDE_EFFECTS (omitted))
2965 return build2_loc (loc, COMPOUND_EXPR, type,
2966 fold_ignored_result (omitted), t);
2967
2968 return non_lvalue_loc (loc, t);
2969 }
2970
2971 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2972
2973 static tree
2974 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2975 tree omitted)
2976 {
2977 tree t = fold_convert_loc (loc, type, result);
2978
2979 /* If the resulting operand is an empty statement, just return the omitted
2980 statement casted to void. */
2981 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2982 return build1_loc (loc, NOP_EXPR, void_type_node,
2983 fold_ignored_result (omitted));
2984
2985 if (TREE_SIDE_EFFECTS (omitted))
2986 return build2_loc (loc, COMPOUND_EXPR, type,
2987 fold_ignored_result (omitted), t);
2988
2989 return pedantic_non_lvalue_loc (loc, t);
2990 }
2991
2992 /* Return a tree for the case when the result of an expression is RESULT
2993 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2994 of the expression but are now not needed.
2995
2996 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2997 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2998 evaluated before OMITTED2. Otherwise, if neither has side effects,
2999 just do the conversion of RESULT to TYPE. */
3000
3001 tree
3002 omit_two_operands_loc (location_t loc, tree type, tree result,
3003 tree omitted1, tree omitted2)
3004 {
3005 tree t = fold_convert_loc (loc, type, result);
3006
3007 if (TREE_SIDE_EFFECTS (omitted2))
3008 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3009 if (TREE_SIDE_EFFECTS (omitted1))
3010 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3011
3012 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3013 }
3014
3015 \f
3016 /* Return a simplified tree node for the truth-negation of ARG. This
3017 never alters ARG itself. We assume that ARG is an operation that
3018 returns a truth value (0 or 1).
3019
3020 FIXME: one would think we would fold the result, but it causes
3021 problems with the dominator optimizer. */
3022
3023 tree
3024 fold_truth_not_expr (location_t loc, tree arg)
3025 {
3026 tree type = TREE_TYPE (arg);
3027 enum tree_code code = TREE_CODE (arg);
3028 location_t loc1, loc2;
3029
3030 /* If this is a comparison, we can simply invert it, except for
3031 floating-point non-equality comparisons, in which case we just
3032 enclose a TRUTH_NOT_EXPR around what we have. */
3033
3034 if (TREE_CODE_CLASS (code) == tcc_comparison)
3035 {
3036 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3037 if (FLOAT_TYPE_P (op_type)
3038 && flag_trapping_math
3039 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3040 && code != NE_EXPR && code != EQ_EXPR)
3041 return NULL_TREE;
3042
3043 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3044 if (code == ERROR_MARK)
3045 return NULL_TREE;
3046
3047 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3048 TREE_OPERAND (arg, 1));
3049 }
3050
3051 switch (code)
3052 {
3053 case INTEGER_CST:
3054 return constant_boolean_node (integer_zerop (arg), type);
3055
3056 case TRUTH_AND_EXPR:
3057 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3058 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3059 return build2_loc (loc, TRUTH_OR_EXPR, type,
3060 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3061 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3062
3063 case TRUTH_OR_EXPR:
3064 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3065 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3066 return build2_loc (loc, TRUTH_AND_EXPR, type,
3067 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3068 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3069
3070 case TRUTH_XOR_EXPR:
3071 /* Here we can invert either operand. We invert the first operand
3072 unless the second operand is a TRUTH_NOT_EXPR in which case our
3073 result is the XOR of the first operand with the inside of the
3074 negation of the second operand. */
3075
3076 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3077 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3078 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3079 else
3080 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3081 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3082 TREE_OPERAND (arg, 1));
3083
3084 case TRUTH_ANDIF_EXPR:
3085 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3086 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3087 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3088 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3089 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3090
3091 case TRUTH_ORIF_EXPR:
3092 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3093 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3094 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3095 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3096 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3097
3098 case TRUTH_NOT_EXPR:
3099 return TREE_OPERAND (arg, 0);
3100
3101 case COND_EXPR:
3102 {
3103 tree arg1 = TREE_OPERAND (arg, 1);
3104 tree arg2 = TREE_OPERAND (arg, 2);
3105
3106 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3107 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3108
3109 /* A COND_EXPR may have a throw as one operand, which
3110 then has void type. Just leave void operands
3111 as they are. */
3112 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3113 VOID_TYPE_P (TREE_TYPE (arg1))
3114 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3115 VOID_TYPE_P (TREE_TYPE (arg2))
3116 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3117 }
3118
3119 case COMPOUND_EXPR:
3120 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3121 return build2_loc (loc, COMPOUND_EXPR, type,
3122 TREE_OPERAND (arg, 0),
3123 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3124
3125 case NON_LVALUE_EXPR:
3126 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3127 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3128
3129 CASE_CONVERT:
3130 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3131 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3132
3133 /* ... fall through ... */
3134
3135 case FLOAT_EXPR:
3136 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3137 return build1_loc (loc, TREE_CODE (arg), type,
3138 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3139
3140 case BIT_AND_EXPR:
3141 if (!integer_onep (TREE_OPERAND (arg, 1)))
3142 return NULL_TREE;
3143 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3144
3145 case SAVE_EXPR:
3146 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3147
3148 case CLEANUP_POINT_EXPR:
3149 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3150 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3151 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3152
3153 default:
3154 return NULL_TREE;
3155 }
3156 }
3157
3158 /* Return a simplified tree node for the truth-negation of ARG. This
3159 never alters ARG itself. We assume that ARG is an operation that
3160 returns a truth value (0 or 1).
3161
3162 FIXME: one would think we would fold the result, but it causes
3163 problems with the dominator optimizer. */
3164
3165 tree
3166 invert_truthvalue_loc (location_t loc, tree arg)
3167 {
3168 tree tem;
3169
3170 if (TREE_CODE (arg) == ERROR_MARK)
3171 return arg;
3172
3173 tem = fold_truth_not_expr (loc, arg);
3174 if (!tem)
3175 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3176
3177 return tem;
3178 }
3179
3180 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3181 operands are another bit-wise operation with a common input. If so,
3182 distribute the bit operations to save an operation and possibly two if
3183 constants are involved. For example, convert
3184 (A | B) & (A | C) into A | (B & C)
3185 Further simplification will occur if B and C are constants.
3186
3187 If this optimization cannot be done, 0 will be returned. */
3188
3189 static tree
3190 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3191 tree arg0, tree arg1)
3192 {
3193 tree common;
3194 tree left, right;
3195
3196 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3197 || TREE_CODE (arg0) == code
3198 || (TREE_CODE (arg0) != BIT_AND_EXPR
3199 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3200 return 0;
3201
3202 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3203 {
3204 common = TREE_OPERAND (arg0, 0);
3205 left = TREE_OPERAND (arg0, 1);
3206 right = TREE_OPERAND (arg1, 1);
3207 }
3208 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3209 {
3210 common = TREE_OPERAND (arg0, 0);
3211 left = TREE_OPERAND (arg0, 1);
3212 right = TREE_OPERAND (arg1, 0);
3213 }
3214 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3215 {
3216 common = TREE_OPERAND (arg0, 1);
3217 left = TREE_OPERAND (arg0, 0);
3218 right = TREE_OPERAND (arg1, 1);
3219 }
3220 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3221 {
3222 common = TREE_OPERAND (arg0, 1);
3223 left = TREE_OPERAND (arg0, 0);
3224 right = TREE_OPERAND (arg1, 0);
3225 }
3226 else
3227 return 0;
3228
3229 common = fold_convert_loc (loc, type, common);
3230 left = fold_convert_loc (loc, type, left);
3231 right = fold_convert_loc (loc, type, right);
3232 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3233 fold_build2_loc (loc, code, type, left, right));
3234 }
3235
3236 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3237 with code CODE. This optimization is unsafe. */
3238 static tree
3239 distribute_real_division (location_t loc, enum tree_code code, tree type,
3240 tree arg0, tree arg1)
3241 {
3242 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3243 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3244
3245 /* (A / C) +- (B / C) -> (A +- B) / C. */
3246 if (mul0 == mul1
3247 && operand_equal_p (TREE_OPERAND (arg0, 1),
3248 TREE_OPERAND (arg1, 1), 0))
3249 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3250 fold_build2_loc (loc, code, type,
3251 TREE_OPERAND (arg0, 0),
3252 TREE_OPERAND (arg1, 0)),
3253 TREE_OPERAND (arg0, 1));
3254
3255 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3256 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3257 TREE_OPERAND (arg1, 0), 0)
3258 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3259 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3260 {
3261 REAL_VALUE_TYPE r0, r1;
3262 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3263 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3264 if (!mul0)
3265 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3266 if (!mul1)
3267 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3268 real_arithmetic (&r0, code, &r0, &r1);
3269 return fold_build2_loc (loc, MULT_EXPR, type,
3270 TREE_OPERAND (arg0, 0),
3271 build_real (type, r0));
3272 }
3273
3274 return NULL_TREE;
3275 }
3276 \f
3277 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3278 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3279
3280 static tree
3281 make_bit_field_ref (location_t loc, tree inner, tree type,
3282 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3283 {
3284 tree result, bftype;
3285
3286 if (bitpos == 0)
3287 {
3288 tree size = TYPE_SIZE (TREE_TYPE (inner));
3289 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3290 || POINTER_TYPE_P (TREE_TYPE (inner)))
3291 && host_integerp (size, 0)
3292 && tree_low_cst (size, 0) == bitsize)
3293 return fold_convert_loc (loc, type, inner);
3294 }
3295
3296 bftype = type;
3297 if (TYPE_PRECISION (bftype) != bitsize
3298 || TYPE_UNSIGNED (bftype) == !unsignedp)
3299 bftype = build_nonstandard_integer_type (bitsize, 0);
3300
3301 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3302 size_int (bitsize), bitsize_int (bitpos));
3303
3304 if (bftype != type)
3305 result = fold_convert_loc (loc, type, result);
3306
3307 return result;
3308 }
3309
3310 /* Optimize a bit-field compare.
3311
3312 There are two cases: First is a compare against a constant and the
3313 second is a comparison of two items where the fields are at the same
3314 bit position relative to the start of a chunk (byte, halfword, word)
3315 large enough to contain it. In these cases we can avoid the shift
3316 implicit in bitfield extractions.
3317
3318 For constants, we emit a compare of the shifted constant with the
3319 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3320 compared. For two fields at the same position, we do the ANDs with the
3321 similar mask and compare the result of the ANDs.
3322
3323 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3324 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3325 are the left and right operands of the comparison, respectively.
3326
3327 If the optimization described above can be done, we return the resulting
3328 tree. Otherwise we return zero. */
3329
3330 static tree
3331 optimize_bit_field_compare (location_t loc, enum tree_code code,
3332 tree compare_type, tree lhs, tree rhs)
3333 {
3334 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3335 tree type = TREE_TYPE (lhs);
3336 tree signed_type, unsigned_type;
3337 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3338 enum machine_mode lmode, rmode, nmode;
3339 int lunsignedp, runsignedp;
3340 int lvolatilep = 0, rvolatilep = 0;
3341 tree linner, rinner = NULL_TREE;
3342 tree mask;
3343 tree offset;
3344
3345 /* Get all the information about the extractions being done. If the bit size
3346 if the same as the size of the underlying object, we aren't doing an
3347 extraction at all and so can do nothing. We also don't want to
3348 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3349 then will no longer be able to replace it. */
3350 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3351 &lunsignedp, &lvolatilep, false);
3352 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3353 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3354 return 0;
3355
3356 if (!const_p)
3357 {
3358 /* If this is not a constant, we can only do something if bit positions,
3359 sizes, and signedness are the same. */
3360 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3361 &runsignedp, &rvolatilep, false);
3362
3363 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3364 || lunsignedp != runsignedp || offset != 0
3365 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3366 return 0;
3367 }
3368
3369 /* See if we can find a mode to refer to this field. We should be able to,
3370 but fail if we can't. */
3371 if (lvolatilep
3372 && GET_MODE_BITSIZE (lmode) > 0
3373 && flag_strict_volatile_bitfields > 0)
3374 nmode = lmode;
3375 else
3376 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3377 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3378 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3379 TYPE_ALIGN (TREE_TYPE (rinner))),
3380 word_mode, lvolatilep || rvolatilep);
3381 if (nmode == VOIDmode)
3382 return 0;
3383
3384 /* Set signed and unsigned types of the precision of this mode for the
3385 shifts below. */
3386 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3387 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3388
3389 /* Compute the bit position and size for the new reference and our offset
3390 within it. If the new reference is the same size as the original, we
3391 won't optimize anything, so return zero. */
3392 nbitsize = GET_MODE_BITSIZE (nmode);
3393 nbitpos = lbitpos & ~ (nbitsize - 1);
3394 lbitpos -= nbitpos;
3395 if (nbitsize == lbitsize)
3396 return 0;
3397
3398 if (BYTES_BIG_ENDIAN)
3399 lbitpos = nbitsize - lbitsize - lbitpos;
3400
3401 /* Make the mask to be used against the extracted field. */
3402 mask = build_int_cst_type (unsigned_type, -1);
3403 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3404 mask = const_binop (RSHIFT_EXPR, mask,
3405 size_int (nbitsize - lbitsize - lbitpos));
3406
3407 if (! const_p)
3408 /* If not comparing with constant, just rework the comparison
3409 and return. */
3410 return fold_build2_loc (loc, code, compare_type,
3411 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3412 make_bit_field_ref (loc, linner,
3413 unsigned_type,
3414 nbitsize, nbitpos,
3415 1),
3416 mask),
3417 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3418 make_bit_field_ref (loc, rinner,
3419 unsigned_type,
3420 nbitsize, nbitpos,
3421 1),
3422 mask));
3423
3424 /* Otherwise, we are handling the constant case. See if the constant is too
3425 big for the field. Warn and return a tree of for 0 (false) if so. We do
3426 this not only for its own sake, but to avoid having to test for this
3427 error case below. If we didn't, we might generate wrong code.
3428
3429 For unsigned fields, the constant shifted right by the field length should
3430 be all zero. For signed fields, the high-order bits should agree with
3431 the sign bit. */
3432
3433 if (lunsignedp)
3434 {
3435 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3436 fold_convert_loc (loc,
3437 unsigned_type, rhs),
3438 size_int (lbitsize))))
3439 {
3440 warning (0, "comparison is always %d due to width of bit-field",
3441 code == NE_EXPR);
3442 return constant_boolean_node (code == NE_EXPR, compare_type);
3443 }
3444 }
3445 else
3446 {
3447 tree tem = const_binop (RSHIFT_EXPR,
3448 fold_convert_loc (loc, signed_type, rhs),
3449 size_int (lbitsize - 1));
3450 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3451 {
3452 warning (0, "comparison is always %d due to width of bit-field",
3453 code == NE_EXPR);
3454 return constant_boolean_node (code == NE_EXPR, compare_type);
3455 }
3456 }
3457
3458 /* Single-bit compares should always be against zero. */
3459 if (lbitsize == 1 && ! integer_zerop (rhs))
3460 {
3461 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3462 rhs = build_int_cst (type, 0);
3463 }
3464
3465 /* Make a new bitfield reference, shift the constant over the
3466 appropriate number of bits and mask it with the computed mask
3467 (in case this was a signed field). If we changed it, make a new one. */
3468 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3469 if (lvolatilep)
3470 {
3471 TREE_SIDE_EFFECTS (lhs) = 1;
3472 TREE_THIS_VOLATILE (lhs) = 1;
3473 }
3474
3475 rhs = const_binop (BIT_AND_EXPR,
3476 const_binop (LSHIFT_EXPR,
3477 fold_convert_loc (loc, unsigned_type, rhs),
3478 size_int (lbitpos)),
3479 mask);
3480
3481 lhs = build2_loc (loc, code, compare_type,
3482 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3483 return lhs;
3484 }
3485 \f
3486 /* Subroutine for fold_truth_andor_1: decode a field reference.
3487
3488 If EXP is a comparison reference, we return the innermost reference.
3489
3490 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3491 set to the starting bit number.
3492
3493 If the innermost field can be completely contained in a mode-sized
3494 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3495
3496 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3497 otherwise it is not changed.
3498
3499 *PUNSIGNEDP is set to the signedness of the field.
3500
3501 *PMASK is set to the mask used. This is either contained in a
3502 BIT_AND_EXPR or derived from the width of the field.
3503
3504 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3505
3506 Return 0 if this is not a component reference or is one that we can't
3507 do anything with. */
3508
3509 static tree
3510 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3511 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3512 int *punsignedp, int *pvolatilep,
3513 tree *pmask, tree *pand_mask)
3514 {
3515 tree outer_type = 0;
3516 tree and_mask = 0;
3517 tree mask, inner, offset;
3518 tree unsigned_type;
3519 unsigned int precision;
3520
3521 /* All the optimizations using this function assume integer fields.
3522 There are problems with FP fields since the type_for_size call
3523 below can fail for, e.g., XFmode. */
3524 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3525 return 0;
3526
3527 /* We are interested in the bare arrangement of bits, so strip everything
3528 that doesn't affect the machine mode. However, record the type of the
3529 outermost expression if it may matter below. */
3530 if (CONVERT_EXPR_P (exp)
3531 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3532 outer_type = TREE_TYPE (exp);
3533 STRIP_NOPS (exp);
3534
3535 if (TREE_CODE (exp) == BIT_AND_EXPR)
3536 {
3537 and_mask = TREE_OPERAND (exp, 1);
3538 exp = TREE_OPERAND (exp, 0);
3539 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3540 if (TREE_CODE (and_mask) != INTEGER_CST)
3541 return 0;
3542 }
3543
3544 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3545 punsignedp, pvolatilep, false);
3546 if ((inner == exp && and_mask == 0)
3547 || *pbitsize < 0 || offset != 0
3548 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3549 return 0;
3550
3551 /* If the number of bits in the reference is the same as the bitsize of
3552 the outer type, then the outer type gives the signedness. Otherwise
3553 (in case of a small bitfield) the signedness is unchanged. */
3554 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3555 *punsignedp = TYPE_UNSIGNED (outer_type);
3556
3557 /* Compute the mask to access the bitfield. */
3558 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3559 precision = TYPE_PRECISION (unsigned_type);
3560
3561 mask = build_int_cst_type (unsigned_type, -1);
3562
3563 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3564 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3565
3566 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3567 if (and_mask != 0)
3568 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3569 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3570
3571 *pmask = mask;
3572 *pand_mask = and_mask;
3573 return inner;
3574 }
3575
3576 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3577 bit positions. */
3578
3579 static int
3580 all_ones_mask_p (const_tree mask, int size)
3581 {
3582 tree type = TREE_TYPE (mask);
3583 unsigned int precision = TYPE_PRECISION (type);
3584 tree tmask;
3585
3586 tmask = build_int_cst_type (signed_type_for (type), -1);
3587
3588 return
3589 tree_int_cst_equal (mask,
3590 const_binop (RSHIFT_EXPR,
3591 const_binop (LSHIFT_EXPR, tmask,
3592 size_int (precision - size)),
3593 size_int (precision - size)));
3594 }
3595
3596 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3597 represents the sign bit of EXP's type. If EXP represents a sign
3598 or zero extension, also test VAL against the unextended type.
3599 The return value is the (sub)expression whose sign bit is VAL,
3600 or NULL_TREE otherwise. */
3601
3602 static tree
3603 sign_bit_p (tree exp, const_tree val)
3604 {
3605 unsigned HOST_WIDE_INT mask_lo, lo;
3606 HOST_WIDE_INT mask_hi, hi;
3607 int width;
3608 tree t;
3609
3610 /* Tree EXP must have an integral type. */
3611 t = TREE_TYPE (exp);
3612 if (! INTEGRAL_TYPE_P (t))
3613 return NULL_TREE;
3614
3615 /* Tree VAL must be an integer constant. */
3616 if (TREE_CODE (val) != INTEGER_CST
3617 || TREE_OVERFLOW (val))
3618 return NULL_TREE;
3619
3620 width = TYPE_PRECISION (t);
3621 if (width > HOST_BITS_PER_WIDE_INT)
3622 {
3623 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3624 lo = 0;
3625
3626 mask_hi = ((unsigned HOST_WIDE_INT) -1
3627 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3628 mask_lo = -1;
3629 }
3630 else
3631 {
3632 hi = 0;
3633 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3634
3635 mask_hi = 0;
3636 mask_lo = ((unsigned HOST_WIDE_INT) -1
3637 >> (HOST_BITS_PER_WIDE_INT - width));
3638 }
3639
3640 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3641 treat VAL as if it were unsigned. */
3642 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3643 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3644 return exp;
3645
3646 /* Handle extension from a narrower type. */
3647 if (TREE_CODE (exp) == NOP_EXPR
3648 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3649 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3650
3651 return NULL_TREE;
3652 }
3653
3654 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3655 to be evaluated unconditionally. */
3656
3657 static int
3658 simple_operand_p (const_tree exp)
3659 {
3660 /* Strip any conversions that don't change the machine mode. */
3661 STRIP_NOPS (exp);
3662
3663 return (CONSTANT_CLASS_P (exp)
3664 || TREE_CODE (exp) == SSA_NAME
3665 || (DECL_P (exp)
3666 && ! TREE_ADDRESSABLE (exp)
3667 && ! TREE_THIS_VOLATILE (exp)
3668 && ! DECL_NONLOCAL (exp)
3669 /* Don't regard global variables as simple. They may be
3670 allocated in ways unknown to the compiler (shared memory,
3671 #pragma weak, etc). */
3672 && ! TREE_PUBLIC (exp)
3673 && ! DECL_EXTERNAL (exp)
3674 /* Loading a static variable is unduly expensive, but global
3675 registers aren't expensive. */
3676 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3677 }
3678
3679 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3680 to be evaluated unconditionally.
3681 I addition to simple_operand_p, we assume that comparisons, conversions,
3682 and logic-not operations are simple, if their operands are simple, too. */
3683
3684 static bool
3685 simple_operand_p_2 (tree exp)
3686 {
3687 enum tree_code code;
3688
3689 if (TREE_SIDE_EFFECTS (exp)
3690 || tree_could_trap_p (exp))
3691 return false;
3692
3693 while (CONVERT_EXPR_P (exp))
3694 exp = TREE_OPERAND (exp, 0);
3695
3696 code = TREE_CODE (exp);
3697
3698 if (TREE_CODE_CLASS (code) == tcc_comparison)
3699 return (simple_operand_p (TREE_OPERAND (exp, 0))
3700 && simple_operand_p (TREE_OPERAND (exp, 1)));
3701
3702 if (code == TRUTH_NOT_EXPR)
3703 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3704
3705 return simple_operand_p (exp);
3706 }
3707
3708 \f
3709 /* The following functions are subroutines to fold_range_test and allow it to
3710 try to change a logical combination of comparisons into a range test.
3711
3712 For example, both
3713 X == 2 || X == 3 || X == 4 || X == 5
3714 and
3715 X >= 2 && X <= 5
3716 are converted to
3717 (unsigned) (X - 2) <= 3
3718
3719 We describe each set of comparisons as being either inside or outside
3720 a range, using a variable named like IN_P, and then describe the
3721 range with a lower and upper bound. If one of the bounds is omitted,
3722 it represents either the highest or lowest value of the type.
3723
3724 In the comments below, we represent a range by two numbers in brackets
3725 preceded by a "+" to designate being inside that range, or a "-" to
3726 designate being outside that range, so the condition can be inverted by
3727 flipping the prefix. An omitted bound is represented by a "-". For
3728 example, "- [-, 10]" means being outside the range starting at the lowest
3729 possible value and ending at 10, in other words, being greater than 10.
3730 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3731 always false.
3732
3733 We set up things so that the missing bounds are handled in a consistent
3734 manner so neither a missing bound nor "true" and "false" need to be
3735 handled using a special case. */
3736
3737 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3738 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3739 and UPPER1_P are nonzero if the respective argument is an upper bound
3740 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3741 must be specified for a comparison. ARG1 will be converted to ARG0's
3742 type if both are specified. */
3743
3744 static tree
3745 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3746 tree arg1, int upper1_p)
3747 {
3748 tree tem;
3749 int result;
3750 int sgn0, sgn1;
3751
3752 /* If neither arg represents infinity, do the normal operation.
3753 Else, if not a comparison, return infinity. Else handle the special
3754 comparison rules. Note that most of the cases below won't occur, but
3755 are handled for consistency. */
3756
3757 if (arg0 != 0 && arg1 != 0)
3758 {
3759 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3760 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3761 STRIP_NOPS (tem);
3762 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3763 }
3764
3765 if (TREE_CODE_CLASS (code) != tcc_comparison)
3766 return 0;
3767
3768 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3769 for neither. In real maths, we cannot assume open ended ranges are
3770 the same. But, this is computer arithmetic, where numbers are finite.
3771 We can therefore make the transformation of any unbounded range with
3772 the value Z, Z being greater than any representable number. This permits
3773 us to treat unbounded ranges as equal. */
3774 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3775 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3776 switch (code)
3777 {
3778 case EQ_EXPR:
3779 result = sgn0 == sgn1;
3780 break;
3781 case NE_EXPR:
3782 result = sgn0 != sgn1;
3783 break;
3784 case LT_EXPR:
3785 result = sgn0 < sgn1;
3786 break;
3787 case LE_EXPR:
3788 result = sgn0 <= sgn1;
3789 break;
3790 case GT_EXPR:
3791 result = sgn0 > sgn1;
3792 break;
3793 case GE_EXPR:
3794 result = sgn0 >= sgn1;
3795 break;
3796 default:
3797 gcc_unreachable ();
3798 }
3799
3800 return constant_boolean_node (result, type);
3801 }
3802 \f
3803 /* Helper routine for make_range. Perform one step for it, return
3804 new expression if the loop should continue or NULL_TREE if it should
3805 stop. */
3806
3807 tree
3808 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3809 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3810 bool *strict_overflow_p)
3811 {
3812 tree arg0_type = TREE_TYPE (arg0);
3813 tree n_low, n_high, low = *p_low, high = *p_high;
3814 int in_p = *p_in_p, n_in_p;
3815
3816 switch (code)
3817 {
3818 case TRUTH_NOT_EXPR:
3819 *p_in_p = ! in_p;
3820 return arg0;
3821
3822 case EQ_EXPR: case NE_EXPR:
3823 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3824 /* We can only do something if the range is testing for zero
3825 and if the second operand is an integer constant. Note that
3826 saying something is "in" the range we make is done by
3827 complementing IN_P since it will set in the initial case of
3828 being not equal to zero; "out" is leaving it alone. */
3829 if (low == NULL_TREE || high == NULL_TREE
3830 || ! integer_zerop (low) || ! integer_zerop (high)
3831 || TREE_CODE (arg1) != INTEGER_CST)
3832 return NULL_TREE;
3833
3834 switch (code)
3835 {
3836 case NE_EXPR: /* - [c, c] */
3837 low = high = arg1;
3838 break;
3839 case EQ_EXPR: /* + [c, c] */
3840 in_p = ! in_p, low = high = arg1;
3841 break;
3842 case GT_EXPR: /* - [-, c] */
3843 low = 0, high = arg1;
3844 break;
3845 case GE_EXPR: /* + [c, -] */
3846 in_p = ! in_p, low = arg1, high = 0;
3847 break;
3848 case LT_EXPR: /* - [c, -] */
3849 low = arg1, high = 0;
3850 break;
3851 case LE_EXPR: /* + [-, c] */
3852 in_p = ! in_p, low = 0, high = arg1;
3853 break;
3854 default:
3855 gcc_unreachable ();
3856 }
3857
3858 /* If this is an unsigned comparison, we also know that EXP is
3859 greater than or equal to zero. We base the range tests we make
3860 on that fact, so we record it here so we can parse existing
3861 range tests. We test arg0_type since often the return type
3862 of, e.g. EQ_EXPR, is boolean. */
3863 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3864 {
3865 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3866 in_p, low, high, 1,
3867 build_int_cst (arg0_type, 0),
3868 NULL_TREE))
3869 return NULL_TREE;
3870
3871 in_p = n_in_p, low = n_low, high = n_high;
3872
3873 /* If the high bound is missing, but we have a nonzero low
3874 bound, reverse the range so it goes from zero to the low bound
3875 minus 1. */
3876 if (high == 0 && low && ! integer_zerop (low))
3877 {
3878 in_p = ! in_p;
3879 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3880 integer_one_node, 0);
3881 low = build_int_cst (arg0_type, 0);
3882 }
3883 }
3884
3885 *p_low = low;
3886 *p_high = high;
3887 *p_in_p = in_p;
3888 return arg0;
3889
3890 case NEGATE_EXPR:
3891 /* (-x) IN [a,b] -> x in [-b, -a] */
3892 n_low = range_binop (MINUS_EXPR, exp_type,
3893 build_int_cst (exp_type, 0),
3894 0, high, 1);
3895 n_high = range_binop (MINUS_EXPR, exp_type,
3896 build_int_cst (exp_type, 0),
3897 0, low, 0);
3898 if (n_high != 0 && TREE_OVERFLOW (n_high))
3899 return NULL_TREE;
3900 goto normalize;
3901
3902 case BIT_NOT_EXPR:
3903 /* ~ X -> -X - 1 */
3904 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3905 build_int_cst (exp_type, 1));
3906
3907 case PLUS_EXPR:
3908 case MINUS_EXPR:
3909 if (TREE_CODE (arg1) != INTEGER_CST)
3910 return NULL_TREE;
3911
3912 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3913 move a constant to the other side. */
3914 if (!TYPE_UNSIGNED (arg0_type)
3915 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3916 return NULL_TREE;
3917
3918 /* If EXP is signed, any overflow in the computation is undefined,
3919 so we don't worry about it so long as our computations on
3920 the bounds don't overflow. For unsigned, overflow is defined
3921 and this is exactly the right thing. */
3922 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3923 arg0_type, low, 0, arg1, 0);
3924 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3925 arg0_type, high, 1, arg1, 0);
3926 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3927 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3928 return NULL_TREE;
3929
3930 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3931 *strict_overflow_p = true;
3932
3933 normalize:
3934 /* Check for an unsigned range which has wrapped around the maximum
3935 value thus making n_high < n_low, and normalize it. */
3936 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3937 {
3938 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3939 integer_one_node, 0);
3940 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3941 integer_one_node, 0);
3942
3943 /* If the range is of the form +/- [ x+1, x ], we won't
3944 be able to normalize it. But then, it represents the
3945 whole range or the empty set, so make it
3946 +/- [ -, - ]. */
3947 if (tree_int_cst_equal (n_low, low)
3948 && tree_int_cst_equal (n_high, high))
3949 low = high = 0;
3950 else
3951 in_p = ! in_p;
3952 }
3953 else
3954 low = n_low, high = n_high;
3955
3956 *p_low = low;
3957 *p_high = high;
3958 *p_in_p = in_p;
3959 return arg0;
3960
3961 CASE_CONVERT:
3962 case NON_LVALUE_EXPR:
3963 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3964 return NULL_TREE;
3965
3966 if (! INTEGRAL_TYPE_P (arg0_type)
3967 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3968 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3969 return NULL_TREE;
3970
3971 n_low = low, n_high = high;
3972
3973 if (n_low != 0)
3974 n_low = fold_convert_loc (loc, arg0_type, n_low);
3975
3976 if (n_high != 0)
3977 n_high = fold_convert_loc (loc, arg0_type, n_high);
3978
3979 /* If we're converting arg0 from an unsigned type, to exp,
3980 a signed type, we will be doing the comparison as unsigned.
3981 The tests above have already verified that LOW and HIGH
3982 are both positive.
3983
3984 So we have to ensure that we will handle large unsigned
3985 values the same way that the current signed bounds treat
3986 negative values. */
3987
3988 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3989 {
3990 tree high_positive;
3991 tree equiv_type;
3992 /* For fixed-point modes, we need to pass the saturating flag
3993 as the 2nd parameter. */
3994 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
3995 equiv_type
3996 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
3997 TYPE_SATURATING (arg0_type));
3998 else
3999 equiv_type
4000 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4001
4002 /* A range without an upper bound is, naturally, unbounded.
4003 Since convert would have cropped a very large value, use
4004 the max value for the destination type. */
4005 high_positive
4006 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4007 : TYPE_MAX_VALUE (arg0_type);
4008
4009 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4010 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4011 fold_convert_loc (loc, arg0_type,
4012 high_positive),
4013 build_int_cst (arg0_type, 1));
4014
4015 /* If the low bound is specified, "and" the range with the
4016 range for which the original unsigned value will be
4017 positive. */
4018 if (low != 0)
4019 {
4020 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4021 1, fold_convert_loc (loc, arg0_type,
4022 integer_zero_node),
4023 high_positive))
4024 return NULL_TREE;
4025
4026 in_p = (n_in_p == in_p);
4027 }
4028 else
4029 {
4030 /* Otherwise, "or" the range with the range of the input
4031 that will be interpreted as negative. */
4032 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4033 1, fold_convert_loc (loc, arg0_type,
4034 integer_zero_node),
4035 high_positive))
4036 return NULL_TREE;
4037
4038 in_p = (in_p != n_in_p);
4039 }
4040 }
4041
4042 *p_low = n_low;
4043 *p_high = n_high;
4044 *p_in_p = in_p;
4045 return arg0;
4046
4047 default:
4048 return NULL_TREE;
4049 }
4050 }
4051
4052 /* Given EXP, a logical expression, set the range it is testing into
4053 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4054 actually being tested. *PLOW and *PHIGH will be made of the same
4055 type as the returned expression. If EXP is not a comparison, we
4056 will most likely not be returning a useful value and range. Set
4057 *STRICT_OVERFLOW_P to true if the return value is only valid
4058 because signed overflow is undefined; otherwise, do not change
4059 *STRICT_OVERFLOW_P. */
4060
4061 tree
4062 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4063 bool *strict_overflow_p)
4064 {
4065 enum tree_code code;
4066 tree arg0, arg1 = NULL_TREE;
4067 tree exp_type, nexp;
4068 int in_p;
4069 tree low, high;
4070 location_t loc = EXPR_LOCATION (exp);
4071
4072 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4073 and see if we can refine the range. Some of the cases below may not
4074 happen, but it doesn't seem worth worrying about this. We "continue"
4075 the outer loop when we've changed something; otherwise we "break"
4076 the switch, which will "break" the while. */
4077
4078 in_p = 0;
4079 low = high = build_int_cst (TREE_TYPE (exp), 0);
4080
4081 while (1)
4082 {
4083 code = TREE_CODE (exp);
4084 exp_type = TREE_TYPE (exp);
4085 arg0 = NULL_TREE;
4086
4087 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4088 {
4089 if (TREE_OPERAND_LENGTH (exp) > 0)
4090 arg0 = TREE_OPERAND (exp, 0);
4091 if (TREE_CODE_CLASS (code) == tcc_binary
4092 || TREE_CODE_CLASS (code) == tcc_comparison
4093 || (TREE_CODE_CLASS (code) == tcc_expression
4094 && TREE_OPERAND_LENGTH (exp) > 1))
4095 arg1 = TREE_OPERAND (exp, 1);
4096 }
4097 if (arg0 == NULL_TREE)
4098 break;
4099
4100 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4101 &high, &in_p, strict_overflow_p);
4102 if (nexp == NULL_TREE)
4103 break;
4104 exp = nexp;
4105 }
4106
4107 /* If EXP is a constant, we can evaluate whether this is true or false. */
4108 if (TREE_CODE (exp) == INTEGER_CST)
4109 {
4110 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4111 exp, 0, low, 0))
4112 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4113 exp, 1, high, 1)));
4114 low = high = 0;
4115 exp = 0;
4116 }
4117
4118 *pin_p = in_p, *plow = low, *phigh = high;
4119 return exp;
4120 }
4121 \f
4122 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4123 type, TYPE, return an expression to test if EXP is in (or out of, depending
4124 on IN_P) the range. Return 0 if the test couldn't be created. */
4125
4126 tree
4127 build_range_check (location_t loc, tree type, tree exp, int in_p,
4128 tree low, tree high)
4129 {
4130 tree etype = TREE_TYPE (exp), value;
4131
4132 #ifdef HAVE_canonicalize_funcptr_for_compare
4133 /* Disable this optimization for function pointer expressions
4134 on targets that require function pointer canonicalization. */
4135 if (HAVE_canonicalize_funcptr_for_compare
4136 && TREE_CODE (etype) == POINTER_TYPE
4137 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4138 return NULL_TREE;
4139 #endif
4140
4141 if (! in_p)
4142 {
4143 value = build_range_check (loc, type, exp, 1, low, high);
4144 if (value != 0)
4145 return invert_truthvalue_loc (loc, value);
4146
4147 return 0;
4148 }
4149
4150 if (low == 0 && high == 0)
4151 return build_int_cst (type, 1);
4152
4153 if (low == 0)
4154 return fold_build2_loc (loc, LE_EXPR, type, exp,
4155 fold_convert_loc (loc, etype, high));
4156
4157 if (high == 0)
4158 return fold_build2_loc (loc, GE_EXPR, type, exp,
4159 fold_convert_loc (loc, etype, low));
4160
4161 if (operand_equal_p (low, high, 0))
4162 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4163 fold_convert_loc (loc, etype, low));
4164
4165 if (integer_zerop (low))
4166 {
4167 if (! TYPE_UNSIGNED (etype))
4168 {
4169 etype = unsigned_type_for (etype);
4170 high = fold_convert_loc (loc, etype, high);
4171 exp = fold_convert_loc (loc, etype, exp);
4172 }
4173 return build_range_check (loc, type, exp, 1, 0, high);
4174 }
4175
4176 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4177 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4178 {
4179 unsigned HOST_WIDE_INT lo;
4180 HOST_WIDE_INT hi;
4181 int prec;
4182
4183 prec = TYPE_PRECISION (etype);
4184 if (prec <= HOST_BITS_PER_WIDE_INT)
4185 {
4186 hi = 0;
4187 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4188 }
4189 else
4190 {
4191 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4192 lo = (unsigned HOST_WIDE_INT) -1;
4193 }
4194
4195 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4196 {
4197 if (TYPE_UNSIGNED (etype))
4198 {
4199 tree signed_etype = signed_type_for (etype);
4200 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4201 etype
4202 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4203 else
4204 etype = signed_etype;
4205 exp = fold_convert_loc (loc, etype, exp);
4206 }
4207 return fold_build2_loc (loc, GT_EXPR, type, exp,
4208 build_int_cst (etype, 0));
4209 }
4210 }
4211
4212 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4213 This requires wrap-around arithmetics for the type of the expression.
4214 First make sure that arithmetics in this type is valid, then make sure
4215 that it wraps around. */
4216 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4217 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4218 TYPE_UNSIGNED (etype));
4219
4220 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4221 {
4222 tree utype, minv, maxv;
4223
4224 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4225 for the type in question, as we rely on this here. */
4226 utype = unsigned_type_for (etype);
4227 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4228 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4229 integer_one_node, 1);
4230 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4231
4232 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4233 minv, 1, maxv, 1)))
4234 etype = utype;
4235 else
4236 return 0;
4237 }
4238
4239 high = fold_convert_loc (loc, etype, high);
4240 low = fold_convert_loc (loc, etype, low);
4241 exp = fold_convert_loc (loc, etype, exp);
4242
4243 value = const_binop (MINUS_EXPR, high, low);
4244
4245
4246 if (POINTER_TYPE_P (etype))
4247 {
4248 if (value != 0 && !TREE_OVERFLOW (value))
4249 {
4250 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4251 return build_range_check (loc, type,
4252 fold_build_pointer_plus_loc (loc, exp, low),
4253 1, build_int_cst (etype, 0), value);
4254 }
4255 return 0;
4256 }
4257
4258 if (value != 0 && !TREE_OVERFLOW (value))
4259 return build_range_check (loc, type,
4260 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4261 1, build_int_cst (etype, 0), value);
4262
4263 return 0;
4264 }
4265 \f
4266 /* Return the predecessor of VAL in its type, handling the infinite case. */
4267
4268 static tree
4269 range_predecessor (tree val)
4270 {
4271 tree type = TREE_TYPE (val);
4272
4273 if (INTEGRAL_TYPE_P (type)
4274 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4275 return 0;
4276 else
4277 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4278 }
4279
4280 /* Return the successor of VAL in its type, handling the infinite case. */
4281
4282 static tree
4283 range_successor (tree val)
4284 {
4285 tree type = TREE_TYPE (val);
4286
4287 if (INTEGRAL_TYPE_P (type)
4288 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4289 return 0;
4290 else
4291 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4292 }
4293
4294 /* Given two ranges, see if we can merge them into one. Return 1 if we
4295 can, 0 if we can't. Set the output range into the specified parameters. */
4296
4297 bool
4298 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4299 tree high0, int in1_p, tree low1, tree high1)
4300 {
4301 int no_overlap;
4302 int subset;
4303 int temp;
4304 tree tem;
4305 int in_p;
4306 tree low, high;
4307 int lowequal = ((low0 == 0 && low1 == 0)
4308 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4309 low0, 0, low1, 0)));
4310 int highequal = ((high0 == 0 && high1 == 0)
4311 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4312 high0, 1, high1, 1)));
4313
4314 /* Make range 0 be the range that starts first, or ends last if they
4315 start at the same value. Swap them if it isn't. */
4316 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4317 low0, 0, low1, 0))
4318 || (lowequal
4319 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4320 high1, 1, high0, 1))))
4321 {
4322 temp = in0_p, in0_p = in1_p, in1_p = temp;
4323 tem = low0, low0 = low1, low1 = tem;
4324 tem = high0, high0 = high1, high1 = tem;
4325 }
4326
4327 /* Now flag two cases, whether the ranges are disjoint or whether the
4328 second range is totally subsumed in the first. Note that the tests
4329 below are simplified by the ones above. */
4330 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4331 high0, 1, low1, 0));
4332 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4333 high1, 1, high0, 1));
4334
4335 /* We now have four cases, depending on whether we are including or
4336 excluding the two ranges. */
4337 if (in0_p && in1_p)
4338 {
4339 /* If they don't overlap, the result is false. If the second range
4340 is a subset it is the result. Otherwise, the range is from the start
4341 of the second to the end of the first. */
4342 if (no_overlap)
4343 in_p = 0, low = high = 0;
4344 else if (subset)
4345 in_p = 1, low = low1, high = high1;
4346 else
4347 in_p = 1, low = low1, high = high0;
4348 }
4349
4350 else if (in0_p && ! in1_p)
4351 {
4352 /* If they don't overlap, the result is the first range. If they are
4353 equal, the result is false. If the second range is a subset of the
4354 first, and the ranges begin at the same place, we go from just after
4355 the end of the second range to the end of the first. If the second
4356 range is not a subset of the first, or if it is a subset and both
4357 ranges end at the same place, the range starts at the start of the
4358 first range and ends just before the second range.
4359 Otherwise, we can't describe this as a single range. */
4360 if (no_overlap)
4361 in_p = 1, low = low0, high = high0;
4362 else if (lowequal && highequal)
4363 in_p = 0, low = high = 0;
4364 else if (subset && lowequal)
4365 {
4366 low = range_successor (high1);
4367 high = high0;
4368 in_p = 1;
4369 if (low == 0)
4370 {
4371 /* We are in the weird situation where high0 > high1 but
4372 high1 has no successor. Punt. */
4373 return 0;
4374 }
4375 }
4376 else if (! subset || highequal)
4377 {
4378 low = low0;
4379 high = range_predecessor (low1);
4380 in_p = 1;
4381 if (high == 0)
4382 {
4383 /* low0 < low1 but low1 has no predecessor. Punt. */
4384 return 0;
4385 }
4386 }
4387 else
4388 return 0;
4389 }
4390
4391 else if (! in0_p && in1_p)
4392 {
4393 /* If they don't overlap, the result is the second range. If the second
4394 is a subset of the first, the result is false. Otherwise,
4395 the range starts just after the first range and ends at the
4396 end of the second. */
4397 if (no_overlap)
4398 in_p = 1, low = low1, high = high1;
4399 else if (subset || highequal)
4400 in_p = 0, low = high = 0;
4401 else
4402 {
4403 low = range_successor (high0);
4404 high = high1;
4405 in_p = 1;
4406 if (low == 0)
4407 {
4408 /* high1 > high0 but high0 has no successor. Punt. */
4409 return 0;
4410 }
4411 }
4412 }
4413
4414 else
4415 {
4416 /* The case where we are excluding both ranges. Here the complex case
4417 is if they don't overlap. In that case, the only time we have a
4418 range is if they are adjacent. If the second is a subset of the
4419 first, the result is the first. Otherwise, the range to exclude
4420 starts at the beginning of the first range and ends at the end of the
4421 second. */
4422 if (no_overlap)
4423 {
4424 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4425 range_successor (high0),
4426 1, low1, 0)))
4427 in_p = 0, low = low0, high = high1;
4428 else
4429 {
4430 /* Canonicalize - [min, x] into - [-, x]. */
4431 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4432 switch (TREE_CODE (TREE_TYPE (low0)))
4433 {
4434 case ENUMERAL_TYPE:
4435 if (TYPE_PRECISION (TREE_TYPE (low0))
4436 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4437 break;
4438 /* FALLTHROUGH */
4439 case INTEGER_TYPE:
4440 if (tree_int_cst_equal (low0,
4441 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4442 low0 = 0;
4443 break;
4444 case POINTER_TYPE:
4445 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4446 && integer_zerop (low0))
4447 low0 = 0;
4448 break;
4449 default:
4450 break;
4451 }
4452
4453 /* Canonicalize - [x, max] into - [x, -]. */
4454 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4455 switch (TREE_CODE (TREE_TYPE (high1)))
4456 {
4457 case ENUMERAL_TYPE:
4458 if (TYPE_PRECISION (TREE_TYPE (high1))
4459 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4460 break;
4461 /* FALLTHROUGH */
4462 case INTEGER_TYPE:
4463 if (tree_int_cst_equal (high1,
4464 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4465 high1 = 0;
4466 break;
4467 case POINTER_TYPE:
4468 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4469 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4470 high1, 1,
4471 integer_one_node, 1)))
4472 high1 = 0;
4473 break;
4474 default:
4475 break;
4476 }
4477
4478 /* The ranges might be also adjacent between the maximum and
4479 minimum values of the given type. For
4480 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4481 return + [x + 1, y - 1]. */
4482 if (low0 == 0 && high1 == 0)
4483 {
4484 low = range_successor (high0);
4485 high = range_predecessor (low1);
4486 if (low == 0 || high == 0)
4487 return 0;
4488
4489 in_p = 1;
4490 }
4491 else
4492 return 0;
4493 }
4494 }
4495 else if (subset)
4496 in_p = 0, low = low0, high = high0;
4497 else
4498 in_p = 0, low = low0, high = high1;
4499 }
4500
4501 *pin_p = in_p, *plow = low, *phigh = high;
4502 return 1;
4503 }
4504 \f
4505
4506 /* Subroutine of fold, looking inside expressions of the form
4507 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4508 of the COND_EXPR. This function is being used also to optimize
4509 A op B ? C : A, by reversing the comparison first.
4510
4511 Return a folded expression whose code is not a COND_EXPR
4512 anymore, or NULL_TREE if no folding opportunity is found. */
4513
4514 static tree
4515 fold_cond_expr_with_comparison (location_t loc, tree type,
4516 tree arg0, tree arg1, tree arg2)
4517 {
4518 enum tree_code comp_code = TREE_CODE (arg0);
4519 tree arg00 = TREE_OPERAND (arg0, 0);
4520 tree arg01 = TREE_OPERAND (arg0, 1);
4521 tree arg1_type = TREE_TYPE (arg1);
4522 tree tem;
4523
4524 STRIP_NOPS (arg1);
4525 STRIP_NOPS (arg2);
4526
4527 /* If we have A op 0 ? A : -A, consider applying the following
4528 transformations:
4529
4530 A == 0? A : -A same as -A
4531 A != 0? A : -A same as A
4532 A >= 0? A : -A same as abs (A)
4533 A > 0? A : -A same as abs (A)
4534 A <= 0? A : -A same as -abs (A)
4535 A < 0? A : -A same as -abs (A)
4536
4537 None of these transformations work for modes with signed
4538 zeros. If A is +/-0, the first two transformations will
4539 change the sign of the result (from +0 to -0, or vice
4540 versa). The last four will fix the sign of the result,
4541 even though the original expressions could be positive or
4542 negative, depending on the sign of A.
4543
4544 Note that all these transformations are correct if A is
4545 NaN, since the two alternatives (A and -A) are also NaNs. */
4546 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4547 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4548 ? real_zerop (arg01)
4549 : integer_zerop (arg01))
4550 && ((TREE_CODE (arg2) == NEGATE_EXPR
4551 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4552 /* In the case that A is of the form X-Y, '-A' (arg2) may
4553 have already been folded to Y-X, check for that. */
4554 || (TREE_CODE (arg1) == MINUS_EXPR
4555 && TREE_CODE (arg2) == MINUS_EXPR
4556 && operand_equal_p (TREE_OPERAND (arg1, 0),
4557 TREE_OPERAND (arg2, 1), 0)
4558 && operand_equal_p (TREE_OPERAND (arg1, 1),
4559 TREE_OPERAND (arg2, 0), 0))))
4560 switch (comp_code)
4561 {
4562 case EQ_EXPR:
4563 case UNEQ_EXPR:
4564 tem = fold_convert_loc (loc, arg1_type, arg1);
4565 return pedantic_non_lvalue_loc (loc,
4566 fold_convert_loc (loc, type,
4567 negate_expr (tem)));
4568 case NE_EXPR:
4569 case LTGT_EXPR:
4570 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4571 case UNGE_EXPR:
4572 case UNGT_EXPR:
4573 if (flag_trapping_math)
4574 break;
4575 /* Fall through. */
4576 case GE_EXPR:
4577 case GT_EXPR:
4578 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4579 arg1 = fold_convert_loc (loc, signed_type_for
4580 (TREE_TYPE (arg1)), arg1);
4581 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4582 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4583 case UNLE_EXPR:
4584 case UNLT_EXPR:
4585 if (flag_trapping_math)
4586 break;
4587 case LE_EXPR:
4588 case LT_EXPR:
4589 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4590 arg1 = fold_convert_loc (loc, signed_type_for
4591 (TREE_TYPE (arg1)), arg1);
4592 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4593 return negate_expr (fold_convert_loc (loc, type, tem));
4594 default:
4595 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4596 break;
4597 }
4598
4599 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4600 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4601 both transformations are correct when A is NaN: A != 0
4602 is then true, and A == 0 is false. */
4603
4604 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4605 && integer_zerop (arg01) && integer_zerop (arg2))
4606 {
4607 if (comp_code == NE_EXPR)
4608 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4609 else if (comp_code == EQ_EXPR)
4610 return build_int_cst (type, 0);
4611 }
4612
4613 /* Try some transformations of A op B ? A : B.
4614
4615 A == B? A : B same as B
4616 A != B? A : B same as A
4617 A >= B? A : B same as max (A, B)
4618 A > B? A : B same as max (B, A)
4619 A <= B? A : B same as min (A, B)
4620 A < B? A : B same as min (B, A)
4621
4622 As above, these transformations don't work in the presence
4623 of signed zeros. For example, if A and B are zeros of
4624 opposite sign, the first two transformations will change
4625 the sign of the result. In the last four, the original
4626 expressions give different results for (A=+0, B=-0) and
4627 (A=-0, B=+0), but the transformed expressions do not.
4628
4629 The first two transformations are correct if either A or B
4630 is a NaN. In the first transformation, the condition will
4631 be false, and B will indeed be chosen. In the case of the
4632 second transformation, the condition A != B will be true,
4633 and A will be chosen.
4634
4635 The conversions to max() and min() are not correct if B is
4636 a number and A is not. The conditions in the original
4637 expressions will be false, so all four give B. The min()
4638 and max() versions would give a NaN instead. */
4639 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4640 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4641 /* Avoid these transformations if the COND_EXPR may be used
4642 as an lvalue in the C++ front-end. PR c++/19199. */
4643 && (in_gimple_form
4644 || (strcmp (lang_hooks.name, "GNU C++") != 0
4645 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4646 || ! maybe_lvalue_p (arg1)
4647 || ! maybe_lvalue_p (arg2)))
4648 {
4649 tree comp_op0 = arg00;
4650 tree comp_op1 = arg01;
4651 tree comp_type = TREE_TYPE (comp_op0);
4652
4653 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4654 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4655 {
4656 comp_type = type;
4657 comp_op0 = arg1;
4658 comp_op1 = arg2;
4659 }
4660
4661 switch (comp_code)
4662 {
4663 case EQ_EXPR:
4664 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4665 case NE_EXPR:
4666 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4667 case LE_EXPR:
4668 case LT_EXPR:
4669 case UNLE_EXPR:
4670 case UNLT_EXPR:
4671 /* In C++ a ?: expression can be an lvalue, so put the
4672 operand which will be used if they are equal first
4673 so that we can convert this back to the
4674 corresponding COND_EXPR. */
4675 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4676 {
4677 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4678 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4679 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4680 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4681 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4682 comp_op1, comp_op0);
4683 return pedantic_non_lvalue_loc (loc,
4684 fold_convert_loc (loc, type, tem));
4685 }
4686 break;
4687 case GE_EXPR:
4688 case GT_EXPR:
4689 case UNGE_EXPR:
4690 case UNGT_EXPR:
4691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4692 {
4693 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4694 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4695 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4696 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4697 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4698 comp_op1, comp_op0);
4699 return pedantic_non_lvalue_loc (loc,
4700 fold_convert_loc (loc, type, tem));
4701 }
4702 break;
4703 case UNEQ_EXPR:
4704 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4705 return pedantic_non_lvalue_loc (loc,
4706 fold_convert_loc (loc, type, arg2));
4707 break;
4708 case LTGT_EXPR:
4709 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4710 return pedantic_non_lvalue_loc (loc,
4711 fold_convert_loc (loc, type, arg1));
4712 break;
4713 default:
4714 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4715 break;
4716 }
4717 }
4718
4719 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4720 we might still be able to simplify this. For example,
4721 if C1 is one less or one more than C2, this might have started
4722 out as a MIN or MAX and been transformed by this function.
4723 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4724
4725 if (INTEGRAL_TYPE_P (type)
4726 && TREE_CODE (arg01) == INTEGER_CST
4727 && TREE_CODE (arg2) == INTEGER_CST)
4728 switch (comp_code)
4729 {
4730 case EQ_EXPR:
4731 if (TREE_CODE (arg1) == INTEGER_CST)
4732 break;
4733 /* We can replace A with C1 in this case. */
4734 arg1 = fold_convert_loc (loc, type, arg01);
4735 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4736
4737 case LT_EXPR:
4738 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4739 MIN_EXPR, to preserve the signedness of the comparison. */
4740 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4741 OEP_ONLY_CONST)
4742 && operand_equal_p (arg01,
4743 const_binop (PLUS_EXPR, arg2,
4744 build_int_cst (type, 1)),
4745 OEP_ONLY_CONST))
4746 {
4747 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4748 fold_convert_loc (loc, TREE_TYPE (arg00),
4749 arg2));
4750 return pedantic_non_lvalue_loc (loc,
4751 fold_convert_loc (loc, type, tem));
4752 }
4753 break;
4754
4755 case LE_EXPR:
4756 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4757 as above. */
4758 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4759 OEP_ONLY_CONST)
4760 && operand_equal_p (arg01,
4761 const_binop (MINUS_EXPR, arg2,
4762 build_int_cst (type, 1)),
4763 OEP_ONLY_CONST))
4764 {
4765 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4766 fold_convert_loc (loc, TREE_TYPE (arg00),
4767 arg2));
4768 return pedantic_non_lvalue_loc (loc,
4769 fold_convert_loc (loc, type, tem));
4770 }
4771 break;
4772
4773 case GT_EXPR:
4774 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4775 MAX_EXPR, to preserve the signedness of the comparison. */
4776 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4777 OEP_ONLY_CONST)
4778 && operand_equal_p (arg01,
4779 const_binop (MINUS_EXPR, arg2,
4780 build_int_cst (type, 1)),
4781 OEP_ONLY_CONST))
4782 {
4783 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4784 fold_convert_loc (loc, TREE_TYPE (arg00),
4785 arg2));
4786 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4787 }
4788 break;
4789
4790 case GE_EXPR:
4791 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4792 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4793 OEP_ONLY_CONST)
4794 && operand_equal_p (arg01,
4795 const_binop (PLUS_EXPR, arg2,
4796 build_int_cst (type, 1)),
4797 OEP_ONLY_CONST))
4798 {
4799 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4800 fold_convert_loc (loc, TREE_TYPE (arg00),
4801 arg2));
4802 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4803 }
4804 break;
4805 case NE_EXPR:
4806 break;
4807 default:
4808 gcc_unreachable ();
4809 }
4810
4811 return NULL_TREE;
4812 }
4813
4814
4815 \f
4816 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4817 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4818 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4819 false) >= 2)
4820 #endif
4821
4822 /* EXP is some logical combination of boolean tests. See if we can
4823 merge it into some range test. Return the new tree if so. */
4824
4825 static tree
4826 fold_range_test (location_t loc, enum tree_code code, tree type,
4827 tree op0, tree op1)
4828 {
4829 int or_op = (code == TRUTH_ORIF_EXPR
4830 || code == TRUTH_OR_EXPR);
4831 int in0_p, in1_p, in_p;
4832 tree low0, low1, low, high0, high1, high;
4833 bool strict_overflow_p = false;
4834 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4835 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4836 tree tem;
4837 const char * const warnmsg = G_("assuming signed overflow does not occur "
4838 "when simplifying range test");
4839
4840 /* If this is an OR operation, invert both sides; we will invert
4841 again at the end. */
4842 if (or_op)
4843 in0_p = ! in0_p, in1_p = ! in1_p;
4844
4845 /* If both expressions are the same, if we can merge the ranges, and we
4846 can build the range test, return it or it inverted. If one of the
4847 ranges is always true or always false, consider it to be the same
4848 expression as the other. */
4849 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4850 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4851 in1_p, low1, high1)
4852 && 0 != (tem = (build_range_check (loc, type,
4853 lhs != 0 ? lhs
4854 : rhs != 0 ? rhs : integer_zero_node,
4855 in_p, low, high))))
4856 {
4857 if (strict_overflow_p)
4858 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4859 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4860 }
4861
4862 /* On machines where the branch cost is expensive, if this is a
4863 short-circuited branch and the underlying object on both sides
4864 is the same, make a non-short-circuit operation. */
4865 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4866 && lhs != 0 && rhs != 0
4867 && (code == TRUTH_ANDIF_EXPR
4868 || code == TRUTH_ORIF_EXPR)
4869 && operand_equal_p (lhs, rhs, 0))
4870 {
4871 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4872 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4873 which cases we can't do this. */
4874 if (simple_operand_p (lhs))
4875 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4876 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4877 type, op0, op1);
4878
4879 else if (!lang_hooks.decls.global_bindings_p ()
4880 && !CONTAINS_PLACEHOLDER_P (lhs))
4881 {
4882 tree common = save_expr (lhs);
4883
4884 if (0 != (lhs = build_range_check (loc, type, common,
4885 or_op ? ! in0_p : in0_p,
4886 low0, high0))
4887 && (0 != (rhs = build_range_check (loc, type, common,
4888 or_op ? ! in1_p : in1_p,
4889 low1, high1))))
4890 {
4891 if (strict_overflow_p)
4892 fold_overflow_warning (warnmsg,
4893 WARN_STRICT_OVERFLOW_COMPARISON);
4894 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4895 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4896 type, lhs, rhs);
4897 }
4898 }
4899 }
4900
4901 return 0;
4902 }
4903 \f
4904 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4905 bit value. Arrange things so the extra bits will be set to zero if and
4906 only if C is signed-extended to its full width. If MASK is nonzero,
4907 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4908
4909 static tree
4910 unextend (tree c, int p, int unsignedp, tree mask)
4911 {
4912 tree type = TREE_TYPE (c);
4913 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4914 tree temp;
4915
4916 if (p == modesize || unsignedp)
4917 return c;
4918
4919 /* We work by getting just the sign bit into the low-order bit, then
4920 into the high-order bit, then sign-extend. We then XOR that value
4921 with C. */
4922 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4923 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4924
4925 /* We must use a signed type in order to get an arithmetic right shift.
4926 However, we must also avoid introducing accidental overflows, so that
4927 a subsequent call to integer_zerop will work. Hence we must
4928 do the type conversion here. At this point, the constant is either
4929 zero or one, and the conversion to a signed type can never overflow.
4930 We could get an overflow if this conversion is done anywhere else. */
4931 if (TYPE_UNSIGNED (type))
4932 temp = fold_convert (signed_type_for (type), temp);
4933
4934 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4935 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4936 if (mask != 0)
4937 temp = const_binop (BIT_AND_EXPR, temp,
4938 fold_convert (TREE_TYPE (c), mask));
4939 /* If necessary, convert the type back to match the type of C. */
4940 if (TYPE_UNSIGNED (type))
4941 temp = fold_convert (type, temp);
4942
4943 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4944 }
4945 \f
4946 /* For an expression that has the form
4947 (A && B) || ~B
4948 or
4949 (A || B) && ~B,
4950 we can drop one of the inner expressions and simplify to
4951 A || ~B
4952 or
4953 A && ~B
4954 LOC is the location of the resulting expression. OP is the inner
4955 logical operation; the left-hand side in the examples above, while CMPOP
4956 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4957 removing a condition that guards another, as in
4958 (A != NULL && A->...) || A == NULL
4959 which we must not transform. If RHS_ONLY is true, only eliminate the
4960 right-most operand of the inner logical operation. */
4961
4962 static tree
4963 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4964 bool rhs_only)
4965 {
4966 tree type = TREE_TYPE (cmpop);
4967 enum tree_code code = TREE_CODE (cmpop);
4968 enum tree_code truthop_code = TREE_CODE (op);
4969 tree lhs = TREE_OPERAND (op, 0);
4970 tree rhs = TREE_OPERAND (op, 1);
4971 tree orig_lhs = lhs, orig_rhs = rhs;
4972 enum tree_code rhs_code = TREE_CODE (rhs);
4973 enum tree_code lhs_code = TREE_CODE (lhs);
4974 enum tree_code inv_code;
4975
4976 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4977 return NULL_TREE;
4978
4979 if (TREE_CODE_CLASS (code) != tcc_comparison)
4980 return NULL_TREE;
4981
4982 if (rhs_code == truthop_code)
4983 {
4984 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4985 if (newrhs != NULL_TREE)
4986 {
4987 rhs = newrhs;
4988 rhs_code = TREE_CODE (rhs);
4989 }
4990 }
4991 if (lhs_code == truthop_code && !rhs_only)
4992 {
4993 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4994 if (newlhs != NULL_TREE)
4995 {
4996 lhs = newlhs;
4997 lhs_code = TREE_CODE (lhs);
4998 }
4999 }
5000
5001 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5002 if (inv_code == rhs_code
5003 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5004 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5005 return lhs;
5006 if (!rhs_only && inv_code == lhs_code
5007 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5008 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5009 return rhs;
5010 if (rhs != orig_rhs || lhs != orig_lhs)
5011 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5012 lhs, rhs);
5013 return NULL_TREE;
5014 }
5015
5016 /* Find ways of folding logical expressions of LHS and RHS:
5017 Try to merge two comparisons to the same innermost item.
5018 Look for range tests like "ch >= '0' && ch <= '9'".
5019 Look for combinations of simple terms on machines with expensive branches
5020 and evaluate the RHS unconditionally.
5021
5022 For example, if we have p->a == 2 && p->b == 4 and we can make an
5023 object large enough to span both A and B, we can do this with a comparison
5024 against the object ANDed with the a mask.
5025
5026 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5027 operations to do this with one comparison.
5028
5029 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5030 function and the one above.
5031
5032 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5033 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5034
5035 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5036 two operands.
5037
5038 We return the simplified tree or 0 if no optimization is possible. */
5039
5040 static tree
5041 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5042 tree lhs, tree rhs)
5043 {
5044 /* If this is the "or" of two comparisons, we can do something if
5045 the comparisons are NE_EXPR. If this is the "and", we can do something
5046 if the comparisons are EQ_EXPR. I.e.,
5047 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5048
5049 WANTED_CODE is this operation code. For single bit fields, we can
5050 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5051 comparison for one-bit fields. */
5052
5053 enum tree_code wanted_code;
5054 enum tree_code lcode, rcode;
5055 tree ll_arg, lr_arg, rl_arg, rr_arg;
5056 tree ll_inner, lr_inner, rl_inner, rr_inner;
5057 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5058 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5059 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5060 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5061 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5062 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5063 enum machine_mode lnmode, rnmode;
5064 tree ll_mask, lr_mask, rl_mask, rr_mask;
5065 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5066 tree l_const, r_const;
5067 tree lntype, rntype, result;
5068 HOST_WIDE_INT first_bit, end_bit;
5069 int volatilep;
5070
5071 /* Start by getting the comparison codes. Fail if anything is volatile.
5072 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5073 it were surrounded with a NE_EXPR. */
5074
5075 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5076 return 0;
5077
5078 lcode = TREE_CODE (lhs);
5079 rcode = TREE_CODE (rhs);
5080
5081 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5082 {
5083 lhs = build2 (NE_EXPR, truth_type, lhs,
5084 build_int_cst (TREE_TYPE (lhs), 0));
5085 lcode = NE_EXPR;
5086 }
5087
5088 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5089 {
5090 rhs = build2 (NE_EXPR, truth_type, rhs,
5091 build_int_cst (TREE_TYPE (rhs), 0));
5092 rcode = NE_EXPR;
5093 }
5094
5095 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5096 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5097 return 0;
5098
5099 ll_arg = TREE_OPERAND (lhs, 0);
5100 lr_arg = TREE_OPERAND (lhs, 1);
5101 rl_arg = TREE_OPERAND (rhs, 0);
5102 rr_arg = TREE_OPERAND (rhs, 1);
5103
5104 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5105 if (simple_operand_p (ll_arg)
5106 && simple_operand_p (lr_arg))
5107 {
5108 if (operand_equal_p (ll_arg, rl_arg, 0)
5109 && operand_equal_p (lr_arg, rr_arg, 0))
5110 {
5111 result = combine_comparisons (loc, code, lcode, rcode,
5112 truth_type, ll_arg, lr_arg);
5113 if (result)
5114 return result;
5115 }
5116 else if (operand_equal_p (ll_arg, rr_arg, 0)
5117 && operand_equal_p (lr_arg, rl_arg, 0))
5118 {
5119 result = combine_comparisons (loc, code, lcode,
5120 swap_tree_comparison (rcode),
5121 truth_type, ll_arg, lr_arg);
5122 if (result)
5123 return result;
5124 }
5125 }
5126
5127 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5128 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5129
5130 /* If the RHS can be evaluated unconditionally and its operands are
5131 simple, it wins to evaluate the RHS unconditionally on machines
5132 with expensive branches. In this case, this isn't a comparison
5133 that can be merged. */
5134
5135 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5136 false) >= 2
5137 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5138 && simple_operand_p (rl_arg)
5139 && simple_operand_p (rr_arg))
5140 {
5141 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5142 if (code == TRUTH_OR_EXPR
5143 && lcode == NE_EXPR && integer_zerop (lr_arg)
5144 && rcode == NE_EXPR && integer_zerop (rr_arg)
5145 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5146 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5147 return build2_loc (loc, NE_EXPR, truth_type,
5148 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5149 ll_arg, rl_arg),
5150 build_int_cst (TREE_TYPE (ll_arg), 0));
5151
5152 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5153 if (code == TRUTH_AND_EXPR
5154 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5155 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5156 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5157 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5158 return build2_loc (loc, EQ_EXPR, truth_type,
5159 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5160 ll_arg, rl_arg),
5161 build_int_cst (TREE_TYPE (ll_arg), 0));
5162 }
5163
5164 /* See if the comparisons can be merged. Then get all the parameters for
5165 each side. */
5166
5167 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5168 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5169 return 0;
5170
5171 volatilep = 0;
5172 ll_inner = decode_field_reference (loc, ll_arg,
5173 &ll_bitsize, &ll_bitpos, &ll_mode,
5174 &ll_unsignedp, &volatilep, &ll_mask,
5175 &ll_and_mask);
5176 lr_inner = decode_field_reference (loc, lr_arg,
5177 &lr_bitsize, &lr_bitpos, &lr_mode,
5178 &lr_unsignedp, &volatilep, &lr_mask,
5179 &lr_and_mask);
5180 rl_inner = decode_field_reference (loc, rl_arg,
5181 &rl_bitsize, &rl_bitpos, &rl_mode,
5182 &rl_unsignedp, &volatilep, &rl_mask,
5183 &rl_and_mask);
5184 rr_inner = decode_field_reference (loc, rr_arg,
5185 &rr_bitsize, &rr_bitpos, &rr_mode,
5186 &rr_unsignedp, &volatilep, &rr_mask,
5187 &rr_and_mask);
5188
5189 /* It must be true that the inner operation on the lhs of each
5190 comparison must be the same if we are to be able to do anything.
5191 Then see if we have constants. If not, the same must be true for
5192 the rhs's. */
5193 if (volatilep || ll_inner == 0 || rl_inner == 0
5194 || ! operand_equal_p (ll_inner, rl_inner, 0))
5195 return 0;
5196
5197 if (TREE_CODE (lr_arg) == INTEGER_CST
5198 && TREE_CODE (rr_arg) == INTEGER_CST)
5199 l_const = lr_arg, r_const = rr_arg;
5200 else if (lr_inner == 0 || rr_inner == 0
5201 || ! operand_equal_p (lr_inner, rr_inner, 0))
5202 return 0;
5203 else
5204 l_const = r_const = 0;
5205
5206 /* If either comparison code is not correct for our logical operation,
5207 fail. However, we can convert a one-bit comparison against zero into
5208 the opposite comparison against that bit being set in the field. */
5209
5210 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5211 if (lcode != wanted_code)
5212 {
5213 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5214 {
5215 /* Make the left operand unsigned, since we are only interested
5216 in the value of one bit. Otherwise we are doing the wrong
5217 thing below. */
5218 ll_unsignedp = 1;
5219 l_const = ll_mask;
5220 }
5221 else
5222 return 0;
5223 }
5224
5225 /* This is analogous to the code for l_const above. */
5226 if (rcode != wanted_code)
5227 {
5228 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5229 {
5230 rl_unsignedp = 1;
5231 r_const = rl_mask;
5232 }
5233 else
5234 return 0;
5235 }
5236
5237 /* See if we can find a mode that contains both fields being compared on
5238 the left. If we can't, fail. Otherwise, update all constants and masks
5239 to be relative to a field of that size. */
5240 first_bit = MIN (ll_bitpos, rl_bitpos);
5241 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5242 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5243 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5244 volatilep);
5245 if (lnmode == VOIDmode)
5246 return 0;
5247
5248 lnbitsize = GET_MODE_BITSIZE (lnmode);
5249 lnbitpos = first_bit & ~ (lnbitsize - 1);
5250 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5251 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5252
5253 if (BYTES_BIG_ENDIAN)
5254 {
5255 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5256 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5257 }
5258
5259 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5260 size_int (xll_bitpos));
5261 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5262 size_int (xrl_bitpos));
5263
5264 if (l_const)
5265 {
5266 l_const = fold_convert_loc (loc, lntype, l_const);
5267 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5268 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5269 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5270 fold_build1_loc (loc, BIT_NOT_EXPR,
5271 lntype, ll_mask))))
5272 {
5273 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5274
5275 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5276 }
5277 }
5278 if (r_const)
5279 {
5280 r_const = fold_convert_loc (loc, lntype, r_const);
5281 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5282 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5283 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5284 fold_build1_loc (loc, BIT_NOT_EXPR,
5285 lntype, rl_mask))))
5286 {
5287 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5288
5289 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5290 }
5291 }
5292
5293 /* If the right sides are not constant, do the same for it. Also,
5294 disallow this optimization if a size or signedness mismatch occurs
5295 between the left and right sides. */
5296 if (l_const == 0)
5297 {
5298 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5299 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5300 /* Make sure the two fields on the right
5301 correspond to the left without being swapped. */
5302 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5303 return 0;
5304
5305 first_bit = MIN (lr_bitpos, rr_bitpos);
5306 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5307 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5308 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5309 volatilep);
5310 if (rnmode == VOIDmode)
5311 return 0;
5312
5313 rnbitsize = GET_MODE_BITSIZE (rnmode);
5314 rnbitpos = first_bit & ~ (rnbitsize - 1);
5315 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5316 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5317
5318 if (BYTES_BIG_ENDIAN)
5319 {
5320 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5321 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5322 }
5323
5324 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5325 rntype, lr_mask),
5326 size_int (xlr_bitpos));
5327 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5328 rntype, rr_mask),
5329 size_int (xrr_bitpos));
5330
5331 /* Make a mask that corresponds to both fields being compared.
5332 Do this for both items being compared. If the operands are the
5333 same size and the bits being compared are in the same position
5334 then we can do this by masking both and comparing the masked
5335 results. */
5336 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5337 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5338 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5339 {
5340 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5341 ll_unsignedp || rl_unsignedp);
5342 if (! all_ones_mask_p (ll_mask, lnbitsize))
5343 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5344
5345 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5346 lr_unsignedp || rr_unsignedp);
5347 if (! all_ones_mask_p (lr_mask, rnbitsize))
5348 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5349
5350 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5351 }
5352
5353 /* There is still another way we can do something: If both pairs of
5354 fields being compared are adjacent, we may be able to make a wider
5355 field containing them both.
5356
5357 Note that we still must mask the lhs/rhs expressions. Furthermore,
5358 the mask must be shifted to account for the shift done by
5359 make_bit_field_ref. */
5360 if ((ll_bitsize + ll_bitpos == rl_bitpos
5361 && lr_bitsize + lr_bitpos == rr_bitpos)
5362 || (ll_bitpos == rl_bitpos + rl_bitsize
5363 && lr_bitpos == rr_bitpos + rr_bitsize))
5364 {
5365 tree type;
5366
5367 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5368 ll_bitsize + rl_bitsize,
5369 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5370 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5371 lr_bitsize + rr_bitsize,
5372 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5373
5374 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5375 size_int (MIN (xll_bitpos, xrl_bitpos)));
5376 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5377 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5378
5379 /* Convert to the smaller type before masking out unwanted bits. */
5380 type = lntype;
5381 if (lntype != rntype)
5382 {
5383 if (lnbitsize > rnbitsize)
5384 {
5385 lhs = fold_convert_loc (loc, rntype, lhs);
5386 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5387 type = rntype;
5388 }
5389 else if (lnbitsize < rnbitsize)
5390 {
5391 rhs = fold_convert_loc (loc, lntype, rhs);
5392 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5393 type = lntype;
5394 }
5395 }
5396
5397 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5398 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5399
5400 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5401 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5402
5403 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5404 }
5405
5406 return 0;
5407 }
5408
5409 /* Handle the case of comparisons with constants. If there is something in
5410 common between the masks, those bits of the constants must be the same.
5411 If not, the condition is always false. Test for this to avoid generating
5412 incorrect code below. */
5413 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5414 if (! integer_zerop (result)
5415 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5416 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5417 {
5418 if (wanted_code == NE_EXPR)
5419 {
5420 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5421 return constant_boolean_node (true, truth_type);
5422 }
5423 else
5424 {
5425 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5426 return constant_boolean_node (false, truth_type);
5427 }
5428 }
5429
5430 /* Construct the expression we will return. First get the component
5431 reference we will make. Unless the mask is all ones the width of
5432 that field, perform the mask operation. Then compare with the
5433 merged constant. */
5434 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5435 ll_unsignedp || rl_unsignedp);
5436
5437 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5438 if (! all_ones_mask_p (ll_mask, lnbitsize))
5439 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5440
5441 return build2_loc (loc, wanted_code, truth_type, result,
5442 const_binop (BIT_IOR_EXPR, l_const, r_const));
5443 }
5444 \f
5445 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5446 constant. */
5447
5448 static tree
5449 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5450 tree op0, tree op1)
5451 {
5452 tree arg0 = op0;
5453 enum tree_code op_code;
5454 tree comp_const;
5455 tree minmax_const;
5456 int consts_equal, consts_lt;
5457 tree inner;
5458
5459 STRIP_SIGN_NOPS (arg0);
5460
5461 op_code = TREE_CODE (arg0);
5462 minmax_const = TREE_OPERAND (arg0, 1);
5463 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5464 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5465 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5466 inner = TREE_OPERAND (arg0, 0);
5467
5468 /* If something does not permit us to optimize, return the original tree. */
5469 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5470 || TREE_CODE (comp_const) != INTEGER_CST
5471 || TREE_OVERFLOW (comp_const)
5472 || TREE_CODE (minmax_const) != INTEGER_CST
5473 || TREE_OVERFLOW (minmax_const))
5474 return NULL_TREE;
5475
5476 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5477 and GT_EXPR, doing the rest with recursive calls using logical
5478 simplifications. */
5479 switch (code)
5480 {
5481 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5482 {
5483 tree tem
5484 = optimize_minmax_comparison (loc,
5485 invert_tree_comparison (code, false),
5486 type, op0, op1);
5487 if (tem)
5488 return invert_truthvalue_loc (loc, tem);
5489 return NULL_TREE;
5490 }
5491
5492 case GE_EXPR:
5493 return
5494 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5495 optimize_minmax_comparison
5496 (loc, EQ_EXPR, type, arg0, comp_const),
5497 optimize_minmax_comparison
5498 (loc, GT_EXPR, type, arg0, comp_const));
5499
5500 case EQ_EXPR:
5501 if (op_code == MAX_EXPR && consts_equal)
5502 /* MAX (X, 0) == 0 -> X <= 0 */
5503 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5504
5505 else if (op_code == MAX_EXPR && consts_lt)
5506 /* MAX (X, 0) == 5 -> X == 5 */
5507 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5508
5509 else if (op_code == MAX_EXPR)
5510 /* MAX (X, 0) == -1 -> false */
5511 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5512
5513 else if (consts_equal)
5514 /* MIN (X, 0) == 0 -> X >= 0 */
5515 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5516
5517 else if (consts_lt)
5518 /* MIN (X, 0) == 5 -> false */
5519 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5520
5521 else
5522 /* MIN (X, 0) == -1 -> X == -1 */
5523 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5524
5525 case GT_EXPR:
5526 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5527 /* MAX (X, 0) > 0 -> X > 0
5528 MAX (X, 0) > 5 -> X > 5 */
5529 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5530
5531 else if (op_code == MAX_EXPR)
5532 /* MAX (X, 0) > -1 -> true */
5533 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5534
5535 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5536 /* MIN (X, 0) > 0 -> false
5537 MIN (X, 0) > 5 -> false */
5538 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5539
5540 else
5541 /* MIN (X, 0) > -1 -> X > -1 */
5542 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5543
5544 default:
5545 return NULL_TREE;
5546 }
5547 }
5548 \f
5549 /* T is an integer expression that is being multiplied, divided, or taken a
5550 modulus (CODE says which and what kind of divide or modulus) by a
5551 constant C. See if we can eliminate that operation by folding it with
5552 other operations already in T. WIDE_TYPE, if non-null, is a type that
5553 should be used for the computation if wider than our type.
5554
5555 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5556 (X * 2) + (Y * 4). We must, however, be assured that either the original
5557 expression would not overflow or that overflow is undefined for the type
5558 in the language in question.
5559
5560 If we return a non-null expression, it is an equivalent form of the
5561 original computation, but need not be in the original type.
5562
5563 We set *STRICT_OVERFLOW_P to true if the return values depends on
5564 signed overflow being undefined. Otherwise we do not change
5565 *STRICT_OVERFLOW_P. */
5566
5567 static tree
5568 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5569 bool *strict_overflow_p)
5570 {
5571 /* To avoid exponential search depth, refuse to allow recursion past
5572 three levels. Beyond that (1) it's highly unlikely that we'll find
5573 something interesting and (2) we've probably processed it before
5574 when we built the inner expression. */
5575
5576 static int depth;
5577 tree ret;
5578
5579 if (depth > 3)
5580 return NULL;
5581
5582 depth++;
5583 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5584 depth--;
5585
5586 return ret;
5587 }
5588
5589 static tree
5590 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5591 bool *strict_overflow_p)
5592 {
5593 tree type = TREE_TYPE (t);
5594 enum tree_code tcode = TREE_CODE (t);
5595 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5596 > GET_MODE_SIZE (TYPE_MODE (type)))
5597 ? wide_type : type);
5598 tree t1, t2;
5599 int same_p = tcode == code;
5600 tree op0 = NULL_TREE, op1 = NULL_TREE;
5601 bool sub_strict_overflow_p;
5602
5603 /* Don't deal with constants of zero here; they confuse the code below. */
5604 if (integer_zerop (c))
5605 return NULL_TREE;
5606
5607 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5608 op0 = TREE_OPERAND (t, 0);
5609
5610 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5611 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5612
5613 /* Note that we need not handle conditional operations here since fold
5614 already handles those cases. So just do arithmetic here. */
5615 switch (tcode)
5616 {
5617 case INTEGER_CST:
5618 /* For a constant, we can always simplify if we are a multiply
5619 or (for divide and modulus) if it is a multiple of our constant. */
5620 if (code == MULT_EXPR
5621 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5622 return const_binop (code, fold_convert (ctype, t),
5623 fold_convert (ctype, c));
5624 break;
5625
5626 CASE_CONVERT: case NON_LVALUE_EXPR:
5627 /* If op0 is an expression ... */
5628 if ((COMPARISON_CLASS_P (op0)
5629 || UNARY_CLASS_P (op0)
5630 || BINARY_CLASS_P (op0)
5631 || VL_EXP_CLASS_P (op0)
5632 || EXPRESSION_CLASS_P (op0))
5633 /* ... and has wrapping overflow, and its type is smaller
5634 than ctype, then we cannot pass through as widening. */
5635 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5636 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5637 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5638 && (TYPE_PRECISION (ctype)
5639 > TYPE_PRECISION (TREE_TYPE (op0))))
5640 /* ... or this is a truncation (t is narrower than op0),
5641 then we cannot pass through this narrowing. */
5642 || (TYPE_PRECISION (type)
5643 < TYPE_PRECISION (TREE_TYPE (op0)))
5644 /* ... or signedness changes for division or modulus,
5645 then we cannot pass through this conversion. */
5646 || (code != MULT_EXPR
5647 && (TYPE_UNSIGNED (ctype)
5648 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5649 /* ... or has undefined overflow while the converted to
5650 type has not, we cannot do the operation in the inner type
5651 as that would introduce undefined overflow. */
5652 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5653 && !TYPE_OVERFLOW_UNDEFINED (type))))
5654 break;
5655
5656 /* Pass the constant down and see if we can make a simplification. If
5657 we can, replace this expression with the inner simplification for
5658 possible later conversion to our or some other type. */
5659 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5660 && TREE_CODE (t2) == INTEGER_CST
5661 && !TREE_OVERFLOW (t2)
5662 && (0 != (t1 = extract_muldiv (op0, t2, code,
5663 code == MULT_EXPR
5664 ? ctype : NULL_TREE,
5665 strict_overflow_p))))
5666 return t1;
5667 break;
5668
5669 case ABS_EXPR:
5670 /* If widening the type changes it from signed to unsigned, then we
5671 must avoid building ABS_EXPR itself as unsigned. */
5672 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5673 {
5674 tree cstype = (*signed_type_for) (ctype);
5675 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5676 != 0)
5677 {
5678 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5679 return fold_convert (ctype, t1);
5680 }
5681 break;
5682 }
5683 /* If the constant is negative, we cannot simplify this. */
5684 if (tree_int_cst_sgn (c) == -1)
5685 break;
5686 /* FALLTHROUGH */
5687 case NEGATE_EXPR:
5688 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5689 != 0)
5690 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5691 break;
5692
5693 case MIN_EXPR: case MAX_EXPR:
5694 /* If widening the type changes the signedness, then we can't perform
5695 this optimization as that changes the result. */
5696 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5697 break;
5698
5699 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5700 sub_strict_overflow_p = false;
5701 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5702 &sub_strict_overflow_p)) != 0
5703 && (t2 = extract_muldiv (op1, c, code, wide_type,
5704 &sub_strict_overflow_p)) != 0)
5705 {
5706 if (tree_int_cst_sgn (c) < 0)
5707 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5708 if (sub_strict_overflow_p)
5709 *strict_overflow_p = true;
5710 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5711 fold_convert (ctype, t2));
5712 }
5713 break;
5714
5715 case LSHIFT_EXPR: case RSHIFT_EXPR:
5716 /* If the second operand is constant, this is a multiplication
5717 or floor division, by a power of two, so we can treat it that
5718 way unless the multiplier or divisor overflows. Signed
5719 left-shift overflow is implementation-defined rather than
5720 undefined in C90, so do not convert signed left shift into
5721 multiplication. */
5722 if (TREE_CODE (op1) == INTEGER_CST
5723 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5724 /* const_binop may not detect overflow correctly,
5725 so check for it explicitly here. */
5726 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5727 && TREE_INT_CST_HIGH (op1) == 0
5728 && 0 != (t1 = fold_convert (ctype,
5729 const_binop (LSHIFT_EXPR,
5730 size_one_node,
5731 op1)))
5732 && !TREE_OVERFLOW (t1))
5733 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5734 ? MULT_EXPR : FLOOR_DIV_EXPR,
5735 ctype,
5736 fold_convert (ctype, op0),
5737 t1),
5738 c, code, wide_type, strict_overflow_p);
5739 break;
5740
5741 case PLUS_EXPR: case MINUS_EXPR:
5742 /* See if we can eliminate the operation on both sides. If we can, we
5743 can return a new PLUS or MINUS. If we can't, the only remaining
5744 cases where we can do anything are if the second operand is a
5745 constant. */
5746 sub_strict_overflow_p = false;
5747 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5748 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5749 if (t1 != 0 && t2 != 0
5750 && (code == MULT_EXPR
5751 /* If not multiplication, we can only do this if both operands
5752 are divisible by c. */
5753 || (multiple_of_p (ctype, op0, c)
5754 && multiple_of_p (ctype, op1, c))))
5755 {
5756 if (sub_strict_overflow_p)
5757 *strict_overflow_p = true;
5758 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5759 fold_convert (ctype, t2));
5760 }
5761
5762 /* If this was a subtraction, negate OP1 and set it to be an addition.
5763 This simplifies the logic below. */
5764 if (tcode == MINUS_EXPR)
5765 {
5766 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5767 /* If OP1 was not easily negatable, the constant may be OP0. */
5768 if (TREE_CODE (op0) == INTEGER_CST)
5769 {
5770 tree tem = op0;
5771 op0 = op1;
5772 op1 = tem;
5773 tem = t1;
5774 t1 = t2;
5775 t2 = tem;
5776 }
5777 }
5778
5779 if (TREE_CODE (op1) != INTEGER_CST)
5780 break;
5781
5782 /* If either OP1 or C are negative, this optimization is not safe for
5783 some of the division and remainder types while for others we need
5784 to change the code. */
5785 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5786 {
5787 if (code == CEIL_DIV_EXPR)
5788 code = FLOOR_DIV_EXPR;
5789 else if (code == FLOOR_DIV_EXPR)
5790 code = CEIL_DIV_EXPR;
5791 else if (code != MULT_EXPR
5792 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5793 break;
5794 }
5795
5796 /* If it's a multiply or a division/modulus operation of a multiple
5797 of our constant, do the operation and verify it doesn't overflow. */
5798 if (code == MULT_EXPR
5799 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5800 {
5801 op1 = const_binop (code, fold_convert (ctype, op1),
5802 fold_convert (ctype, c));
5803 /* We allow the constant to overflow with wrapping semantics. */
5804 if (op1 == 0
5805 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5806 break;
5807 }
5808 else
5809 break;
5810
5811 /* If we have an unsigned type is not a sizetype, we cannot widen
5812 the operation since it will change the result if the original
5813 computation overflowed. */
5814 if (TYPE_UNSIGNED (ctype)
5815 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5816 && ctype != type)
5817 break;
5818
5819 /* If we were able to eliminate our operation from the first side,
5820 apply our operation to the second side and reform the PLUS. */
5821 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5822 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5823
5824 /* The last case is if we are a multiply. In that case, we can
5825 apply the distributive law to commute the multiply and addition
5826 if the multiplication of the constants doesn't overflow. */
5827 if (code == MULT_EXPR)
5828 return fold_build2 (tcode, ctype,
5829 fold_build2 (code, ctype,
5830 fold_convert (ctype, op0),
5831 fold_convert (ctype, c)),
5832 op1);
5833
5834 break;
5835
5836 case MULT_EXPR:
5837 /* We have a special case here if we are doing something like
5838 (C * 8) % 4 since we know that's zero. */
5839 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5840 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5841 /* If the multiplication can overflow we cannot optimize this.
5842 ??? Until we can properly mark individual operations as
5843 not overflowing we need to treat sizetype special here as
5844 stor-layout relies on this opimization to make
5845 DECL_FIELD_BIT_OFFSET always a constant. */
5846 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5847 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5848 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5849 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5850 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5851 {
5852 *strict_overflow_p = true;
5853 return omit_one_operand (type, integer_zero_node, op0);
5854 }
5855
5856 /* ... fall through ... */
5857
5858 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5859 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5860 /* If we can extract our operation from the LHS, do so and return a
5861 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5862 do something only if the second operand is a constant. */
5863 if (same_p
5864 && (t1 = extract_muldiv (op0, c, code, wide_type,
5865 strict_overflow_p)) != 0)
5866 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5867 fold_convert (ctype, op1));
5868 else if (tcode == MULT_EXPR && code == MULT_EXPR
5869 && (t1 = extract_muldiv (op1, c, code, wide_type,
5870 strict_overflow_p)) != 0)
5871 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5872 fold_convert (ctype, t1));
5873 else if (TREE_CODE (op1) != INTEGER_CST)
5874 return 0;
5875
5876 /* If these are the same operation types, we can associate them
5877 assuming no overflow. */
5878 if (tcode == code)
5879 {
5880 double_int mul;
5881 int overflow_p;
5882 mul = double_int_mul_with_sign
5883 (double_int_ext
5884 (tree_to_double_int (op1),
5885 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5886 double_int_ext
5887 (tree_to_double_int (c),
5888 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5889 false, &overflow_p);
5890 overflow_p = (((!TYPE_UNSIGNED (ctype)
5891 || (TREE_CODE (ctype) == INTEGER_TYPE
5892 && TYPE_IS_SIZETYPE (ctype)))
5893 && overflow_p)
5894 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5895 if (!double_int_fits_to_tree_p (ctype, mul)
5896 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5897 || !TYPE_UNSIGNED (ctype)
5898 || (TREE_CODE (ctype) == INTEGER_TYPE
5899 && TYPE_IS_SIZETYPE (ctype))))
5900 overflow_p = 1;
5901 if (!overflow_p)
5902 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5903 double_int_to_tree (ctype, mul));
5904 }
5905
5906 /* If these operations "cancel" each other, we have the main
5907 optimizations of this pass, which occur when either constant is a
5908 multiple of the other, in which case we replace this with either an
5909 operation or CODE or TCODE.
5910
5911 If we have an unsigned type that is not a sizetype, we cannot do
5912 this since it will change the result if the original computation
5913 overflowed. */
5914 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5915 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5916 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5917 || (tcode == MULT_EXPR
5918 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5919 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5920 && code != MULT_EXPR)))
5921 {
5922 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5923 {
5924 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5925 *strict_overflow_p = true;
5926 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5927 fold_convert (ctype,
5928 const_binop (TRUNC_DIV_EXPR,
5929 op1, c)));
5930 }
5931 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5932 {
5933 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5934 *strict_overflow_p = true;
5935 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5936 fold_convert (ctype,
5937 const_binop (TRUNC_DIV_EXPR,
5938 c, op1)));
5939 }
5940 }
5941 break;
5942
5943 default:
5944 break;
5945 }
5946
5947 return 0;
5948 }
5949 \f
5950 /* Return a node which has the indicated constant VALUE (either 0 or
5951 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5952 and is of the indicated TYPE. */
5953
5954 tree
5955 constant_boolean_node (bool value, tree type)
5956 {
5957 if (type == integer_type_node)
5958 return value ? integer_one_node : integer_zero_node;
5959 else if (type == boolean_type_node)
5960 return value ? boolean_true_node : boolean_false_node;
5961 else if (TREE_CODE (type) == VECTOR_TYPE)
5962 return build_vector_from_val (type,
5963 build_int_cst (TREE_TYPE (type),
5964 value ? -1 : 0));
5965 else
5966 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5967 }
5968
5969
5970 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5971 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5972 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5973 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5974 COND is the first argument to CODE; otherwise (as in the example
5975 given here), it is the second argument. TYPE is the type of the
5976 original expression. Return NULL_TREE if no simplification is
5977 possible. */
5978
5979 static tree
5980 fold_binary_op_with_conditional_arg (location_t loc,
5981 enum tree_code code,
5982 tree type, tree op0, tree op1,
5983 tree cond, tree arg, int cond_first_p)
5984 {
5985 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5986 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5987 tree test, true_value, false_value;
5988 tree lhs = NULL_TREE;
5989 tree rhs = NULL_TREE;
5990
5991 if (TREE_CODE (cond) == COND_EXPR)
5992 {
5993 test = TREE_OPERAND (cond, 0);
5994 true_value = TREE_OPERAND (cond, 1);
5995 false_value = TREE_OPERAND (cond, 2);
5996 /* If this operand throws an expression, then it does not make
5997 sense to try to perform a logical or arithmetic operation
5998 involving it. */
5999 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6000 lhs = true_value;
6001 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6002 rhs = false_value;
6003 }
6004 else
6005 {
6006 tree testtype = TREE_TYPE (cond);
6007 test = cond;
6008 true_value = constant_boolean_node (true, testtype);
6009 false_value = constant_boolean_node (false, testtype);
6010 }
6011
6012 /* This transformation is only worthwhile if we don't have to wrap ARG
6013 in a SAVE_EXPR and the operation can be simplified on at least one
6014 of the branches once its pushed inside the COND_EXPR. */
6015 if (!TREE_CONSTANT (arg)
6016 && (TREE_SIDE_EFFECTS (arg)
6017 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6018 return NULL_TREE;
6019
6020 arg = fold_convert_loc (loc, arg_type, arg);
6021 if (lhs == 0)
6022 {
6023 true_value = fold_convert_loc (loc, cond_type, true_value);
6024 if (cond_first_p)
6025 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6026 else
6027 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6028 }
6029 if (rhs == 0)
6030 {
6031 false_value = fold_convert_loc (loc, cond_type, false_value);
6032 if (cond_first_p)
6033 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6034 else
6035 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6036 }
6037
6038 /* Check that we have simplified at least one of the branches. */
6039 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6040 return NULL_TREE;
6041
6042 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6043 }
6044
6045 \f
6046 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6047
6048 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6049 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6050 ADDEND is the same as X.
6051
6052 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6053 and finite. The problematic cases are when X is zero, and its mode
6054 has signed zeros. In the case of rounding towards -infinity,
6055 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6056 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6057
6058 bool
6059 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6060 {
6061 if (!real_zerop (addend))
6062 return false;
6063
6064 /* Don't allow the fold with -fsignaling-nans. */
6065 if (HONOR_SNANS (TYPE_MODE (type)))
6066 return false;
6067
6068 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6069 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6070 return true;
6071
6072 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6073 if (TREE_CODE (addend) == REAL_CST
6074 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6075 negate = !negate;
6076
6077 /* The mode has signed zeros, and we have to honor their sign.
6078 In this situation, there is only one case we can return true for.
6079 X - 0 is the same as X unless rounding towards -infinity is
6080 supported. */
6081 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6082 }
6083
6084 /* Subroutine of fold() that checks comparisons of built-in math
6085 functions against real constants.
6086
6087 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6088 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6089 is the type of the result and ARG0 and ARG1 are the operands of the
6090 comparison. ARG1 must be a TREE_REAL_CST.
6091
6092 The function returns the constant folded tree if a simplification
6093 can be made, and NULL_TREE otherwise. */
6094
6095 static tree
6096 fold_mathfn_compare (location_t loc,
6097 enum built_in_function fcode, enum tree_code code,
6098 tree type, tree arg0, tree arg1)
6099 {
6100 REAL_VALUE_TYPE c;
6101
6102 if (BUILTIN_SQRT_P (fcode))
6103 {
6104 tree arg = CALL_EXPR_ARG (arg0, 0);
6105 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6106
6107 c = TREE_REAL_CST (arg1);
6108 if (REAL_VALUE_NEGATIVE (c))
6109 {
6110 /* sqrt(x) < y is always false, if y is negative. */
6111 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6112 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6113
6114 /* sqrt(x) > y is always true, if y is negative and we
6115 don't care about NaNs, i.e. negative values of x. */
6116 if (code == NE_EXPR || !HONOR_NANS (mode))
6117 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6118
6119 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6120 return fold_build2_loc (loc, GE_EXPR, type, arg,
6121 build_real (TREE_TYPE (arg), dconst0));
6122 }
6123 else if (code == GT_EXPR || code == GE_EXPR)
6124 {
6125 REAL_VALUE_TYPE c2;
6126
6127 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6128 real_convert (&c2, mode, &c2);
6129
6130 if (REAL_VALUE_ISINF (c2))
6131 {
6132 /* sqrt(x) > y is x == +Inf, when y is very large. */
6133 if (HONOR_INFINITIES (mode))
6134 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6135 build_real (TREE_TYPE (arg), c2));
6136
6137 /* sqrt(x) > y is always false, when y is very large
6138 and we don't care about infinities. */
6139 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6140 }
6141
6142 /* sqrt(x) > c is the same as x > c*c. */
6143 return fold_build2_loc (loc, code, type, arg,
6144 build_real (TREE_TYPE (arg), c2));
6145 }
6146 else if (code == LT_EXPR || code == LE_EXPR)
6147 {
6148 REAL_VALUE_TYPE c2;
6149
6150 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6151 real_convert (&c2, mode, &c2);
6152
6153 if (REAL_VALUE_ISINF (c2))
6154 {
6155 /* sqrt(x) < y is always true, when y is a very large
6156 value and we don't care about NaNs or Infinities. */
6157 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6158 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6159
6160 /* sqrt(x) < y is x != +Inf when y is very large and we
6161 don't care about NaNs. */
6162 if (! HONOR_NANS (mode))
6163 return fold_build2_loc (loc, NE_EXPR, type, arg,
6164 build_real (TREE_TYPE (arg), c2));
6165
6166 /* sqrt(x) < y is x >= 0 when y is very large and we
6167 don't care about Infinities. */
6168 if (! HONOR_INFINITIES (mode))
6169 return fold_build2_loc (loc, GE_EXPR, type, arg,
6170 build_real (TREE_TYPE (arg), dconst0));
6171
6172 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6173 arg = save_expr (arg);
6174 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6175 fold_build2_loc (loc, GE_EXPR, type, arg,
6176 build_real (TREE_TYPE (arg),
6177 dconst0)),
6178 fold_build2_loc (loc, NE_EXPR, type, arg,
6179 build_real (TREE_TYPE (arg),
6180 c2)));
6181 }
6182
6183 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6184 if (! HONOR_NANS (mode))
6185 return fold_build2_loc (loc, code, type, arg,
6186 build_real (TREE_TYPE (arg), c2));
6187
6188 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6189 arg = save_expr (arg);
6190 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6191 fold_build2_loc (loc, GE_EXPR, type, arg,
6192 build_real (TREE_TYPE (arg),
6193 dconst0)),
6194 fold_build2_loc (loc, code, type, arg,
6195 build_real (TREE_TYPE (arg),
6196 c2)));
6197 }
6198 }
6199
6200 return NULL_TREE;
6201 }
6202
6203 /* Subroutine of fold() that optimizes comparisons against Infinities,
6204 either +Inf or -Inf.
6205
6206 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6207 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6208 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6209
6210 The function returns the constant folded tree if a simplification
6211 can be made, and NULL_TREE otherwise. */
6212
6213 static tree
6214 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6215 tree arg0, tree arg1)
6216 {
6217 enum machine_mode mode;
6218 REAL_VALUE_TYPE max;
6219 tree temp;
6220 bool neg;
6221
6222 mode = TYPE_MODE (TREE_TYPE (arg0));
6223
6224 /* For negative infinity swap the sense of the comparison. */
6225 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6226 if (neg)
6227 code = swap_tree_comparison (code);
6228
6229 switch (code)
6230 {
6231 case GT_EXPR:
6232 /* x > +Inf is always false, if with ignore sNANs. */
6233 if (HONOR_SNANS (mode))
6234 return NULL_TREE;
6235 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6236
6237 case LE_EXPR:
6238 /* x <= +Inf is always true, if we don't case about NaNs. */
6239 if (! HONOR_NANS (mode))
6240 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6241
6242 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6243 arg0 = save_expr (arg0);
6244 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6245
6246 case EQ_EXPR:
6247 case GE_EXPR:
6248 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6249 real_maxval (&max, neg, mode);
6250 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6251 arg0, build_real (TREE_TYPE (arg0), max));
6252
6253 case LT_EXPR:
6254 /* x < +Inf is always equal to x <= DBL_MAX. */
6255 real_maxval (&max, neg, mode);
6256 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6257 arg0, build_real (TREE_TYPE (arg0), max));
6258
6259 case NE_EXPR:
6260 /* x != +Inf is always equal to !(x > DBL_MAX). */
6261 real_maxval (&max, neg, mode);
6262 if (! HONOR_NANS (mode))
6263 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6264 arg0, build_real (TREE_TYPE (arg0), max));
6265
6266 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6267 arg0, build_real (TREE_TYPE (arg0), max));
6268 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6269
6270 default:
6271 break;
6272 }
6273
6274 return NULL_TREE;
6275 }
6276
6277 /* Subroutine of fold() that optimizes comparisons of a division by
6278 a nonzero integer constant against an integer constant, i.e.
6279 X/C1 op C2.
6280
6281 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6282 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6283 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6284
6285 The function returns the constant folded tree if a simplification
6286 can be made, and NULL_TREE otherwise. */
6287
6288 static tree
6289 fold_div_compare (location_t loc,
6290 enum tree_code code, tree type, tree arg0, tree arg1)
6291 {
6292 tree prod, tmp, hi, lo;
6293 tree arg00 = TREE_OPERAND (arg0, 0);
6294 tree arg01 = TREE_OPERAND (arg0, 1);
6295 double_int val;
6296 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6297 bool neg_overflow;
6298 int overflow;
6299
6300 /* We have to do this the hard way to detect unsigned overflow.
6301 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6302 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6303 TREE_INT_CST_HIGH (arg01),
6304 TREE_INT_CST_LOW (arg1),
6305 TREE_INT_CST_HIGH (arg1),
6306 &val.low, &val.high, unsigned_p);
6307 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6308 neg_overflow = false;
6309
6310 if (unsigned_p)
6311 {
6312 tmp = int_const_binop (MINUS_EXPR, arg01,
6313 build_int_cst (TREE_TYPE (arg01), 1));
6314 lo = prod;
6315
6316 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6317 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6318 TREE_INT_CST_HIGH (prod),
6319 TREE_INT_CST_LOW (tmp),
6320 TREE_INT_CST_HIGH (tmp),
6321 &val.low, &val.high, unsigned_p);
6322 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6323 -1, overflow | TREE_OVERFLOW (prod));
6324 }
6325 else if (tree_int_cst_sgn (arg01) >= 0)
6326 {
6327 tmp = int_const_binop (MINUS_EXPR, arg01,
6328 build_int_cst (TREE_TYPE (arg01), 1));
6329 switch (tree_int_cst_sgn (arg1))
6330 {
6331 case -1:
6332 neg_overflow = true;
6333 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6334 hi = prod;
6335 break;
6336
6337 case 0:
6338 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6339 hi = tmp;
6340 break;
6341
6342 case 1:
6343 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6344 lo = prod;
6345 break;
6346
6347 default:
6348 gcc_unreachable ();
6349 }
6350 }
6351 else
6352 {
6353 /* A negative divisor reverses the relational operators. */
6354 code = swap_tree_comparison (code);
6355
6356 tmp = int_const_binop (PLUS_EXPR, arg01,
6357 build_int_cst (TREE_TYPE (arg01), 1));
6358 switch (tree_int_cst_sgn (arg1))
6359 {
6360 case -1:
6361 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6362 lo = prod;
6363 break;
6364
6365 case 0:
6366 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6367 lo = tmp;
6368 break;
6369
6370 case 1:
6371 neg_overflow = true;
6372 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6373 hi = prod;
6374 break;
6375
6376 default:
6377 gcc_unreachable ();
6378 }
6379 }
6380
6381 switch (code)
6382 {
6383 case EQ_EXPR:
6384 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6385 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6386 if (TREE_OVERFLOW (hi))
6387 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6388 if (TREE_OVERFLOW (lo))
6389 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6390 return build_range_check (loc, type, arg00, 1, lo, hi);
6391
6392 case NE_EXPR:
6393 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6394 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6395 if (TREE_OVERFLOW (hi))
6396 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6397 if (TREE_OVERFLOW (lo))
6398 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6399 return build_range_check (loc, type, arg00, 0, lo, hi);
6400
6401 case LT_EXPR:
6402 if (TREE_OVERFLOW (lo))
6403 {
6404 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6405 return omit_one_operand_loc (loc, type, tmp, arg00);
6406 }
6407 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6408
6409 case LE_EXPR:
6410 if (TREE_OVERFLOW (hi))
6411 {
6412 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6413 return omit_one_operand_loc (loc, type, tmp, arg00);
6414 }
6415 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6416
6417 case GT_EXPR:
6418 if (TREE_OVERFLOW (hi))
6419 {
6420 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6421 return omit_one_operand_loc (loc, type, tmp, arg00);
6422 }
6423 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6424
6425 case GE_EXPR:
6426 if (TREE_OVERFLOW (lo))
6427 {
6428 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6429 return omit_one_operand_loc (loc, type, tmp, arg00);
6430 }
6431 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6432
6433 default:
6434 break;
6435 }
6436
6437 return NULL_TREE;
6438 }
6439
6440
6441 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6442 equality/inequality test, then return a simplified form of the test
6443 using a sign testing. Otherwise return NULL. TYPE is the desired
6444 result type. */
6445
6446 static tree
6447 fold_single_bit_test_into_sign_test (location_t loc,
6448 enum tree_code code, tree arg0, tree arg1,
6449 tree result_type)
6450 {
6451 /* If this is testing a single bit, we can optimize the test. */
6452 if ((code == NE_EXPR || code == EQ_EXPR)
6453 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6454 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6455 {
6456 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6457 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6458 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6459
6460 if (arg00 != NULL_TREE
6461 /* This is only a win if casting to a signed type is cheap,
6462 i.e. when arg00's type is not a partial mode. */
6463 && TYPE_PRECISION (TREE_TYPE (arg00))
6464 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6465 {
6466 tree stype = signed_type_for (TREE_TYPE (arg00));
6467 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6468 result_type,
6469 fold_convert_loc (loc, stype, arg00),
6470 build_int_cst (stype, 0));
6471 }
6472 }
6473
6474 return NULL_TREE;
6475 }
6476
6477 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6478 equality/inequality test, then return a simplified form of
6479 the test using shifts and logical operations. Otherwise return
6480 NULL. TYPE is the desired result type. */
6481
6482 tree
6483 fold_single_bit_test (location_t loc, enum tree_code code,
6484 tree arg0, tree arg1, tree result_type)
6485 {
6486 /* If this is testing a single bit, we can optimize the test. */
6487 if ((code == NE_EXPR || code == EQ_EXPR)
6488 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6489 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6490 {
6491 tree inner = TREE_OPERAND (arg0, 0);
6492 tree type = TREE_TYPE (arg0);
6493 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6494 enum machine_mode operand_mode = TYPE_MODE (type);
6495 int ops_unsigned;
6496 tree signed_type, unsigned_type, intermediate_type;
6497 tree tem, one;
6498
6499 /* First, see if we can fold the single bit test into a sign-bit
6500 test. */
6501 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6502 result_type);
6503 if (tem)
6504 return tem;
6505
6506 /* Otherwise we have (A & C) != 0 where C is a single bit,
6507 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6508 Similarly for (A & C) == 0. */
6509
6510 /* If INNER is a right shift of a constant and it plus BITNUM does
6511 not overflow, adjust BITNUM and INNER. */
6512 if (TREE_CODE (inner) == RSHIFT_EXPR
6513 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6514 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6515 && bitnum < TYPE_PRECISION (type)
6516 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6517 bitnum - TYPE_PRECISION (type)))
6518 {
6519 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6520 inner = TREE_OPERAND (inner, 0);
6521 }
6522
6523 /* If we are going to be able to omit the AND below, we must do our
6524 operations as unsigned. If we must use the AND, we have a choice.
6525 Normally unsigned is faster, but for some machines signed is. */
6526 #ifdef LOAD_EXTEND_OP
6527 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6528 && !flag_syntax_only) ? 0 : 1;
6529 #else
6530 ops_unsigned = 1;
6531 #endif
6532
6533 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6534 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6535 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6536 inner = fold_convert_loc (loc, intermediate_type, inner);
6537
6538 if (bitnum != 0)
6539 inner = build2 (RSHIFT_EXPR, intermediate_type,
6540 inner, size_int (bitnum));
6541
6542 one = build_int_cst (intermediate_type, 1);
6543
6544 if (code == EQ_EXPR)
6545 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6546
6547 /* Put the AND last so it can combine with more things. */
6548 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6549
6550 /* Make sure to return the proper type. */
6551 inner = fold_convert_loc (loc, result_type, inner);
6552
6553 return inner;
6554 }
6555 return NULL_TREE;
6556 }
6557
6558 /* Check whether we are allowed to reorder operands arg0 and arg1,
6559 such that the evaluation of arg1 occurs before arg0. */
6560
6561 static bool
6562 reorder_operands_p (const_tree arg0, const_tree arg1)
6563 {
6564 if (! flag_evaluation_order)
6565 return true;
6566 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6567 return true;
6568 return ! TREE_SIDE_EFFECTS (arg0)
6569 && ! TREE_SIDE_EFFECTS (arg1);
6570 }
6571
6572 /* Test whether it is preferable two swap two operands, ARG0 and
6573 ARG1, for example because ARG0 is an integer constant and ARG1
6574 isn't. If REORDER is true, only recommend swapping if we can
6575 evaluate the operands in reverse order. */
6576
6577 bool
6578 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6579 {
6580 STRIP_SIGN_NOPS (arg0);
6581 STRIP_SIGN_NOPS (arg1);
6582
6583 if (TREE_CODE (arg1) == INTEGER_CST)
6584 return 0;
6585 if (TREE_CODE (arg0) == INTEGER_CST)
6586 return 1;
6587
6588 if (TREE_CODE (arg1) == REAL_CST)
6589 return 0;
6590 if (TREE_CODE (arg0) == REAL_CST)
6591 return 1;
6592
6593 if (TREE_CODE (arg1) == FIXED_CST)
6594 return 0;
6595 if (TREE_CODE (arg0) == FIXED_CST)
6596 return 1;
6597
6598 if (TREE_CODE (arg1) == COMPLEX_CST)
6599 return 0;
6600 if (TREE_CODE (arg0) == COMPLEX_CST)
6601 return 1;
6602
6603 if (TREE_CONSTANT (arg1))
6604 return 0;
6605 if (TREE_CONSTANT (arg0))
6606 return 1;
6607
6608 if (optimize_function_for_size_p (cfun))
6609 return 0;
6610
6611 if (reorder && flag_evaluation_order
6612 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6613 return 0;
6614
6615 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6616 for commutative and comparison operators. Ensuring a canonical
6617 form allows the optimizers to find additional redundancies without
6618 having to explicitly check for both orderings. */
6619 if (TREE_CODE (arg0) == SSA_NAME
6620 && TREE_CODE (arg1) == SSA_NAME
6621 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6622 return 1;
6623
6624 /* Put SSA_NAMEs last. */
6625 if (TREE_CODE (arg1) == SSA_NAME)
6626 return 0;
6627 if (TREE_CODE (arg0) == SSA_NAME)
6628 return 1;
6629
6630 /* Put variables last. */
6631 if (DECL_P (arg1))
6632 return 0;
6633 if (DECL_P (arg0))
6634 return 1;
6635
6636 return 0;
6637 }
6638
6639 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6640 ARG0 is extended to a wider type. */
6641
6642 static tree
6643 fold_widened_comparison (location_t loc, enum tree_code code,
6644 tree type, tree arg0, tree arg1)
6645 {
6646 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6647 tree arg1_unw;
6648 tree shorter_type, outer_type;
6649 tree min, max;
6650 bool above, below;
6651
6652 if (arg0_unw == arg0)
6653 return NULL_TREE;
6654 shorter_type = TREE_TYPE (arg0_unw);
6655
6656 #ifdef HAVE_canonicalize_funcptr_for_compare
6657 /* Disable this optimization if we're casting a function pointer
6658 type on targets that require function pointer canonicalization. */
6659 if (HAVE_canonicalize_funcptr_for_compare
6660 && TREE_CODE (shorter_type) == POINTER_TYPE
6661 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6662 return NULL_TREE;
6663 #endif
6664
6665 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6666 return NULL_TREE;
6667
6668 arg1_unw = get_unwidened (arg1, NULL_TREE);
6669
6670 /* If possible, express the comparison in the shorter mode. */
6671 if ((code == EQ_EXPR || code == NE_EXPR
6672 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6673 && (TREE_TYPE (arg1_unw) == shorter_type
6674 || ((TYPE_PRECISION (shorter_type)
6675 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6676 && (TYPE_UNSIGNED (shorter_type)
6677 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6678 || (TREE_CODE (arg1_unw) == INTEGER_CST
6679 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6680 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6681 && int_fits_type_p (arg1_unw, shorter_type))))
6682 return fold_build2_loc (loc, code, type, arg0_unw,
6683 fold_convert_loc (loc, shorter_type, arg1_unw));
6684
6685 if (TREE_CODE (arg1_unw) != INTEGER_CST
6686 || TREE_CODE (shorter_type) != INTEGER_TYPE
6687 || !int_fits_type_p (arg1_unw, shorter_type))
6688 return NULL_TREE;
6689
6690 /* If we are comparing with the integer that does not fit into the range
6691 of the shorter type, the result is known. */
6692 outer_type = TREE_TYPE (arg1_unw);
6693 min = lower_bound_in_type (outer_type, shorter_type);
6694 max = upper_bound_in_type (outer_type, shorter_type);
6695
6696 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6697 max, arg1_unw));
6698 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6699 arg1_unw, min));
6700
6701 switch (code)
6702 {
6703 case EQ_EXPR:
6704 if (above || below)
6705 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6706 break;
6707
6708 case NE_EXPR:
6709 if (above || below)
6710 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6711 break;
6712
6713 case LT_EXPR:
6714 case LE_EXPR:
6715 if (above)
6716 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6717 else if (below)
6718 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6719
6720 case GT_EXPR:
6721 case GE_EXPR:
6722 if (above)
6723 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6724 else if (below)
6725 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6726
6727 default:
6728 break;
6729 }
6730
6731 return NULL_TREE;
6732 }
6733
6734 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6735 ARG0 just the signedness is changed. */
6736
6737 static tree
6738 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6739 tree arg0, tree arg1)
6740 {
6741 tree arg0_inner;
6742 tree inner_type, outer_type;
6743
6744 if (!CONVERT_EXPR_P (arg0))
6745 return NULL_TREE;
6746
6747 outer_type = TREE_TYPE (arg0);
6748 arg0_inner = TREE_OPERAND (arg0, 0);
6749 inner_type = TREE_TYPE (arg0_inner);
6750
6751 #ifdef HAVE_canonicalize_funcptr_for_compare
6752 /* Disable this optimization if we're casting a function pointer
6753 type on targets that require function pointer canonicalization. */
6754 if (HAVE_canonicalize_funcptr_for_compare
6755 && TREE_CODE (inner_type) == POINTER_TYPE
6756 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6757 return NULL_TREE;
6758 #endif
6759
6760 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6761 return NULL_TREE;
6762
6763 if (TREE_CODE (arg1) != INTEGER_CST
6764 && !(CONVERT_EXPR_P (arg1)
6765 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6766 return NULL_TREE;
6767
6768 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6769 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6770 && code != NE_EXPR
6771 && code != EQ_EXPR)
6772 return NULL_TREE;
6773
6774 if (TREE_CODE (arg1) == INTEGER_CST)
6775 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6776 0, TREE_OVERFLOW (arg1));
6777 else
6778 arg1 = fold_convert_loc (loc, inner_type, arg1);
6779
6780 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6781 }
6782
6783 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6784 step of the array. Reconstructs s and delta in the case of s *
6785 delta being an integer constant (and thus already folded). ADDR is
6786 the address. MULT is the multiplicative expression. If the
6787 function succeeds, the new address expression is returned.
6788 Otherwise NULL_TREE is returned. LOC is the location of the
6789 resulting expression. */
6790
6791 static tree
6792 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6793 {
6794 tree s, delta, step;
6795 tree ref = TREE_OPERAND (addr, 0), pref;
6796 tree ret, pos;
6797 tree itype;
6798 bool mdim = false;
6799
6800 /* Strip the nops that might be added when converting op1 to sizetype. */
6801 STRIP_NOPS (op1);
6802
6803 /* Canonicalize op1 into a possibly non-constant delta
6804 and an INTEGER_CST s. */
6805 if (TREE_CODE (op1) == MULT_EXPR)
6806 {
6807 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6808
6809 STRIP_NOPS (arg0);
6810 STRIP_NOPS (arg1);
6811
6812 if (TREE_CODE (arg0) == INTEGER_CST)
6813 {
6814 s = arg0;
6815 delta = arg1;
6816 }
6817 else if (TREE_CODE (arg1) == INTEGER_CST)
6818 {
6819 s = arg1;
6820 delta = arg0;
6821 }
6822 else
6823 return NULL_TREE;
6824 }
6825 else if (TREE_CODE (op1) == INTEGER_CST)
6826 {
6827 delta = op1;
6828 s = NULL_TREE;
6829 }
6830 else
6831 {
6832 /* Simulate we are delta * 1. */
6833 delta = op1;
6834 s = integer_one_node;
6835 }
6836
6837 /* Handle &x.array the same as we would handle &x.array[0]. */
6838 if (TREE_CODE (ref) == COMPONENT_REF
6839 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6840 {
6841 tree domain;
6842
6843 /* Remember if this was a multi-dimensional array. */
6844 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6845 mdim = true;
6846
6847 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6848 if (! domain)
6849 goto cont;
6850 itype = TREE_TYPE (domain);
6851
6852 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6853 if (TREE_CODE (step) != INTEGER_CST)
6854 goto cont;
6855
6856 if (s)
6857 {
6858 if (! tree_int_cst_equal (step, s))
6859 goto cont;
6860 }
6861 else
6862 {
6863 /* Try if delta is a multiple of step. */
6864 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6865 if (! tmp)
6866 goto cont;
6867 delta = tmp;
6868 }
6869
6870 /* Only fold here if we can verify we do not overflow one
6871 dimension of a multi-dimensional array. */
6872 if (mdim)
6873 {
6874 tree tmp;
6875
6876 if (!TYPE_MIN_VALUE (domain)
6877 || !TYPE_MAX_VALUE (domain)
6878 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6879 goto cont;
6880
6881 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6882 fold_convert_loc (loc, itype,
6883 TYPE_MIN_VALUE (domain)),
6884 fold_convert_loc (loc, itype, delta));
6885 if (TREE_CODE (tmp) != INTEGER_CST
6886 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6887 goto cont;
6888 }
6889
6890 /* We found a suitable component reference. */
6891
6892 pref = TREE_OPERAND (addr, 0);
6893 ret = copy_node (pref);
6894 SET_EXPR_LOCATION (ret, loc);
6895
6896 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6897 fold_build2_loc
6898 (loc, PLUS_EXPR, itype,
6899 fold_convert_loc (loc, itype,
6900 TYPE_MIN_VALUE
6901 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6902 fold_convert_loc (loc, itype, delta)),
6903 NULL_TREE, NULL_TREE);
6904 return build_fold_addr_expr_loc (loc, ret);
6905 }
6906
6907 cont:
6908
6909 for (;; ref = TREE_OPERAND (ref, 0))
6910 {
6911 if (TREE_CODE (ref) == ARRAY_REF)
6912 {
6913 tree domain;
6914
6915 /* Remember if this was a multi-dimensional array. */
6916 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6917 mdim = true;
6918
6919 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6920 if (! domain)
6921 continue;
6922 itype = TREE_TYPE (domain);
6923
6924 step = array_ref_element_size (ref);
6925 if (TREE_CODE (step) != INTEGER_CST)
6926 continue;
6927
6928 if (s)
6929 {
6930 if (! tree_int_cst_equal (step, s))
6931 continue;
6932 }
6933 else
6934 {
6935 /* Try if delta is a multiple of step. */
6936 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6937 if (! tmp)
6938 continue;
6939 delta = tmp;
6940 }
6941
6942 /* Only fold here if we can verify we do not overflow one
6943 dimension of a multi-dimensional array. */
6944 if (mdim)
6945 {
6946 tree tmp;
6947
6948 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6949 || !TYPE_MAX_VALUE (domain)
6950 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6951 continue;
6952
6953 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6954 fold_convert_loc (loc, itype,
6955 TREE_OPERAND (ref, 1)),
6956 fold_convert_loc (loc, itype, delta));
6957 if (!tmp
6958 || TREE_CODE (tmp) != INTEGER_CST
6959 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6960 continue;
6961 }
6962
6963 break;
6964 }
6965 else
6966 mdim = false;
6967
6968 if (!handled_component_p (ref))
6969 return NULL_TREE;
6970 }
6971
6972 /* We found the suitable array reference. So copy everything up to it,
6973 and replace the index. */
6974
6975 pref = TREE_OPERAND (addr, 0);
6976 ret = copy_node (pref);
6977 SET_EXPR_LOCATION (ret, loc);
6978 pos = ret;
6979
6980 while (pref != ref)
6981 {
6982 pref = TREE_OPERAND (pref, 0);
6983 TREE_OPERAND (pos, 0) = copy_node (pref);
6984 pos = TREE_OPERAND (pos, 0);
6985 }
6986
6987 TREE_OPERAND (pos, 1)
6988 = fold_build2_loc (loc, PLUS_EXPR, itype,
6989 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6990 fold_convert_loc (loc, itype, delta));
6991 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6992 }
6993
6994
6995 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6996 means A >= Y && A != MAX, but in this case we know that
6997 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6998
6999 static tree
7000 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7001 {
7002 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7003
7004 if (TREE_CODE (bound) == LT_EXPR)
7005 a = TREE_OPERAND (bound, 0);
7006 else if (TREE_CODE (bound) == GT_EXPR)
7007 a = TREE_OPERAND (bound, 1);
7008 else
7009 return NULL_TREE;
7010
7011 typea = TREE_TYPE (a);
7012 if (!INTEGRAL_TYPE_P (typea)
7013 && !POINTER_TYPE_P (typea))
7014 return NULL_TREE;
7015
7016 if (TREE_CODE (ineq) == LT_EXPR)
7017 {
7018 a1 = TREE_OPERAND (ineq, 1);
7019 y = TREE_OPERAND (ineq, 0);
7020 }
7021 else if (TREE_CODE (ineq) == GT_EXPR)
7022 {
7023 a1 = TREE_OPERAND (ineq, 0);
7024 y = TREE_OPERAND (ineq, 1);
7025 }
7026 else
7027 return NULL_TREE;
7028
7029 if (TREE_TYPE (a1) != typea)
7030 return NULL_TREE;
7031
7032 if (POINTER_TYPE_P (typea))
7033 {
7034 /* Convert the pointer types into integer before taking the difference. */
7035 tree ta = fold_convert_loc (loc, ssizetype, a);
7036 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7037 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7038 }
7039 else
7040 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7041
7042 if (!diff || !integer_onep (diff))
7043 return NULL_TREE;
7044
7045 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7046 }
7047
7048 /* Fold a sum or difference of at least one multiplication.
7049 Returns the folded tree or NULL if no simplification could be made. */
7050
7051 static tree
7052 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7053 tree arg0, tree arg1)
7054 {
7055 tree arg00, arg01, arg10, arg11;
7056 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7057
7058 /* (A * C) +- (B * C) -> (A+-B) * C.
7059 (A * C) +- A -> A * (C+-1).
7060 We are most concerned about the case where C is a constant,
7061 but other combinations show up during loop reduction. Since
7062 it is not difficult, try all four possibilities. */
7063
7064 if (TREE_CODE (arg0) == MULT_EXPR)
7065 {
7066 arg00 = TREE_OPERAND (arg0, 0);
7067 arg01 = TREE_OPERAND (arg0, 1);
7068 }
7069 else if (TREE_CODE (arg0) == INTEGER_CST)
7070 {
7071 arg00 = build_one_cst (type);
7072 arg01 = arg0;
7073 }
7074 else
7075 {
7076 /* We cannot generate constant 1 for fract. */
7077 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7078 return NULL_TREE;
7079 arg00 = arg0;
7080 arg01 = build_one_cst (type);
7081 }
7082 if (TREE_CODE (arg1) == MULT_EXPR)
7083 {
7084 arg10 = TREE_OPERAND (arg1, 0);
7085 arg11 = TREE_OPERAND (arg1, 1);
7086 }
7087 else if (TREE_CODE (arg1) == INTEGER_CST)
7088 {
7089 arg10 = build_one_cst (type);
7090 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7091 the purpose of this canonicalization. */
7092 if (TREE_INT_CST_HIGH (arg1) == -1
7093 && negate_expr_p (arg1)
7094 && code == PLUS_EXPR)
7095 {
7096 arg11 = negate_expr (arg1);
7097 code = MINUS_EXPR;
7098 }
7099 else
7100 arg11 = arg1;
7101 }
7102 else
7103 {
7104 /* We cannot generate constant 1 for fract. */
7105 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7106 return NULL_TREE;
7107 arg10 = arg1;
7108 arg11 = build_one_cst (type);
7109 }
7110 same = NULL_TREE;
7111
7112 if (operand_equal_p (arg01, arg11, 0))
7113 same = arg01, alt0 = arg00, alt1 = arg10;
7114 else if (operand_equal_p (arg00, arg10, 0))
7115 same = arg00, alt0 = arg01, alt1 = arg11;
7116 else if (operand_equal_p (arg00, arg11, 0))
7117 same = arg00, alt0 = arg01, alt1 = arg10;
7118 else if (operand_equal_p (arg01, arg10, 0))
7119 same = arg01, alt0 = arg00, alt1 = arg11;
7120
7121 /* No identical multiplicands; see if we can find a common
7122 power-of-two factor in non-power-of-two multiplies. This
7123 can help in multi-dimensional array access. */
7124 else if (host_integerp (arg01, 0)
7125 && host_integerp (arg11, 0))
7126 {
7127 HOST_WIDE_INT int01, int11, tmp;
7128 bool swap = false;
7129 tree maybe_same;
7130 int01 = TREE_INT_CST_LOW (arg01);
7131 int11 = TREE_INT_CST_LOW (arg11);
7132
7133 /* Move min of absolute values to int11. */
7134 if (absu_hwi (int01) < absu_hwi (int11))
7135 {
7136 tmp = int01, int01 = int11, int11 = tmp;
7137 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7138 maybe_same = arg01;
7139 swap = true;
7140 }
7141 else
7142 maybe_same = arg11;
7143
7144 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7145 /* The remainder should not be a constant, otherwise we
7146 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7147 increased the number of multiplications necessary. */
7148 && TREE_CODE (arg10) != INTEGER_CST)
7149 {
7150 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7151 build_int_cst (TREE_TYPE (arg00),
7152 int01 / int11));
7153 alt1 = arg10;
7154 same = maybe_same;
7155 if (swap)
7156 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7157 }
7158 }
7159
7160 if (same)
7161 return fold_build2_loc (loc, MULT_EXPR, type,
7162 fold_build2_loc (loc, code, type,
7163 fold_convert_loc (loc, type, alt0),
7164 fold_convert_loc (loc, type, alt1)),
7165 fold_convert_loc (loc, type, same));
7166
7167 return NULL_TREE;
7168 }
7169
7170 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7171 specified by EXPR into the buffer PTR of length LEN bytes.
7172 Return the number of bytes placed in the buffer, or zero
7173 upon failure. */
7174
7175 static int
7176 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7177 {
7178 tree type = TREE_TYPE (expr);
7179 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7180 int byte, offset, word, words;
7181 unsigned char value;
7182
7183 if (total_bytes > len)
7184 return 0;
7185 words = total_bytes / UNITS_PER_WORD;
7186
7187 for (byte = 0; byte < total_bytes; byte++)
7188 {
7189 int bitpos = byte * BITS_PER_UNIT;
7190 if (bitpos < HOST_BITS_PER_WIDE_INT)
7191 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7192 else
7193 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7194 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7195
7196 if (total_bytes > UNITS_PER_WORD)
7197 {
7198 word = byte / UNITS_PER_WORD;
7199 if (WORDS_BIG_ENDIAN)
7200 word = (words - 1) - word;
7201 offset = word * UNITS_PER_WORD;
7202 if (BYTES_BIG_ENDIAN)
7203 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7204 else
7205 offset += byte % UNITS_PER_WORD;
7206 }
7207 else
7208 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7209 ptr[offset] = value;
7210 }
7211 return total_bytes;
7212 }
7213
7214
7215 /* Subroutine of native_encode_expr. Encode the REAL_CST
7216 specified by EXPR into the buffer PTR of length LEN bytes.
7217 Return the number of bytes placed in the buffer, or zero
7218 upon failure. */
7219
7220 static int
7221 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7222 {
7223 tree type = TREE_TYPE (expr);
7224 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7225 int byte, offset, word, words, bitpos;
7226 unsigned char value;
7227
7228 /* There are always 32 bits in each long, no matter the size of
7229 the hosts long. We handle floating point representations with
7230 up to 192 bits. */
7231 long tmp[6];
7232
7233 if (total_bytes > len)
7234 return 0;
7235 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7236
7237 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7238
7239 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7240 bitpos += BITS_PER_UNIT)
7241 {
7242 byte = (bitpos / BITS_PER_UNIT) & 3;
7243 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7244
7245 if (UNITS_PER_WORD < 4)
7246 {
7247 word = byte / UNITS_PER_WORD;
7248 if (WORDS_BIG_ENDIAN)
7249 word = (words - 1) - word;
7250 offset = word * UNITS_PER_WORD;
7251 if (BYTES_BIG_ENDIAN)
7252 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7253 else
7254 offset += byte % UNITS_PER_WORD;
7255 }
7256 else
7257 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7258 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7259 }
7260 return total_bytes;
7261 }
7262
7263 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7264 specified by EXPR into the buffer PTR of length LEN bytes.
7265 Return the number of bytes placed in the buffer, or zero
7266 upon failure. */
7267
7268 static int
7269 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7270 {
7271 int rsize, isize;
7272 tree part;
7273
7274 part = TREE_REALPART (expr);
7275 rsize = native_encode_expr (part, ptr, len);
7276 if (rsize == 0)
7277 return 0;
7278 part = TREE_IMAGPART (expr);
7279 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7280 if (isize != rsize)
7281 return 0;
7282 return rsize + isize;
7283 }
7284
7285
7286 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7287 specified by EXPR into the buffer PTR of length LEN bytes.
7288 Return the number of bytes placed in the buffer, or zero
7289 upon failure. */
7290
7291 static int
7292 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7293 {
7294 unsigned i, count;
7295 int size, offset;
7296 tree itype, elem;
7297
7298 offset = 0;
7299 count = VECTOR_CST_NELTS (expr);
7300 itype = TREE_TYPE (TREE_TYPE (expr));
7301 size = GET_MODE_SIZE (TYPE_MODE (itype));
7302 for (i = 0; i < count; i++)
7303 {
7304 elem = VECTOR_CST_ELT (expr, i);
7305 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7306 return 0;
7307 offset += size;
7308 }
7309 return offset;
7310 }
7311
7312
7313 /* Subroutine of native_encode_expr. Encode the STRING_CST
7314 specified by EXPR into the buffer PTR of length LEN bytes.
7315 Return the number of bytes placed in the buffer, or zero
7316 upon failure. */
7317
7318 static int
7319 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7320 {
7321 tree type = TREE_TYPE (expr);
7322 HOST_WIDE_INT total_bytes;
7323
7324 if (TREE_CODE (type) != ARRAY_TYPE
7325 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7326 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7327 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7328 return 0;
7329 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7330 if (total_bytes > len)
7331 return 0;
7332 if (TREE_STRING_LENGTH (expr) < total_bytes)
7333 {
7334 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7335 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7336 total_bytes - TREE_STRING_LENGTH (expr));
7337 }
7338 else
7339 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7340 return total_bytes;
7341 }
7342
7343
7344 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7345 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7346 buffer PTR of length LEN bytes. Return the number of bytes
7347 placed in the buffer, or zero upon failure. */
7348
7349 int
7350 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7351 {
7352 switch (TREE_CODE (expr))
7353 {
7354 case INTEGER_CST:
7355 return native_encode_int (expr, ptr, len);
7356
7357 case REAL_CST:
7358 return native_encode_real (expr, ptr, len);
7359
7360 case COMPLEX_CST:
7361 return native_encode_complex (expr, ptr, len);
7362
7363 case VECTOR_CST:
7364 return native_encode_vector (expr, ptr, len);
7365
7366 case STRING_CST:
7367 return native_encode_string (expr, ptr, len);
7368
7369 default:
7370 return 0;
7371 }
7372 }
7373
7374
7375 /* Subroutine of native_interpret_expr. Interpret the contents of
7376 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7377 If the buffer cannot be interpreted, return NULL_TREE. */
7378
7379 static tree
7380 native_interpret_int (tree type, const unsigned char *ptr, int len)
7381 {
7382 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7383 int byte, offset, word, words;
7384 unsigned char value;
7385 double_int result;
7386
7387 if (total_bytes > len)
7388 return NULL_TREE;
7389 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7390 return NULL_TREE;
7391
7392 result = double_int_zero;
7393 words = total_bytes / UNITS_PER_WORD;
7394
7395 for (byte = 0; byte < total_bytes; byte++)
7396 {
7397 int bitpos = byte * BITS_PER_UNIT;
7398 if (total_bytes > UNITS_PER_WORD)
7399 {
7400 word = byte / UNITS_PER_WORD;
7401 if (WORDS_BIG_ENDIAN)
7402 word = (words - 1) - word;
7403 offset = word * UNITS_PER_WORD;
7404 if (BYTES_BIG_ENDIAN)
7405 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7406 else
7407 offset += byte % UNITS_PER_WORD;
7408 }
7409 else
7410 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7411 value = ptr[offset];
7412
7413 if (bitpos < HOST_BITS_PER_WIDE_INT)
7414 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7415 else
7416 result.high |= (unsigned HOST_WIDE_INT) value
7417 << (bitpos - HOST_BITS_PER_WIDE_INT);
7418 }
7419
7420 return double_int_to_tree (type, result);
7421 }
7422
7423
7424 /* Subroutine of native_interpret_expr. Interpret the contents of
7425 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7426 If the buffer cannot be interpreted, return NULL_TREE. */
7427
7428 static tree
7429 native_interpret_real (tree type, const unsigned char *ptr, int len)
7430 {
7431 enum machine_mode mode = TYPE_MODE (type);
7432 int total_bytes = GET_MODE_SIZE (mode);
7433 int byte, offset, word, words, bitpos;
7434 unsigned char value;
7435 /* There are always 32 bits in each long, no matter the size of
7436 the hosts long. We handle floating point representations with
7437 up to 192 bits. */
7438 REAL_VALUE_TYPE r;
7439 long tmp[6];
7440
7441 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7442 if (total_bytes > len || total_bytes > 24)
7443 return NULL_TREE;
7444 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7445
7446 memset (tmp, 0, sizeof (tmp));
7447 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7448 bitpos += BITS_PER_UNIT)
7449 {
7450 byte = (bitpos / BITS_PER_UNIT) & 3;
7451 if (UNITS_PER_WORD < 4)
7452 {
7453 word = byte / UNITS_PER_WORD;
7454 if (WORDS_BIG_ENDIAN)
7455 word = (words - 1) - word;
7456 offset = word * UNITS_PER_WORD;
7457 if (BYTES_BIG_ENDIAN)
7458 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7459 else
7460 offset += byte % UNITS_PER_WORD;
7461 }
7462 else
7463 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7464 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7465
7466 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7467 }
7468
7469 real_from_target (&r, tmp, mode);
7470 return build_real (type, r);
7471 }
7472
7473
7474 /* Subroutine of native_interpret_expr. Interpret the contents of
7475 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7476 If the buffer cannot be interpreted, return NULL_TREE. */
7477
7478 static tree
7479 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7480 {
7481 tree etype, rpart, ipart;
7482 int size;
7483
7484 etype = TREE_TYPE (type);
7485 size = GET_MODE_SIZE (TYPE_MODE (etype));
7486 if (size * 2 > len)
7487 return NULL_TREE;
7488 rpart = native_interpret_expr (etype, ptr, size);
7489 if (!rpart)
7490 return NULL_TREE;
7491 ipart = native_interpret_expr (etype, ptr+size, size);
7492 if (!ipart)
7493 return NULL_TREE;
7494 return build_complex (type, rpart, ipart);
7495 }
7496
7497
7498 /* Subroutine of native_interpret_expr. Interpret the contents of
7499 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7500 If the buffer cannot be interpreted, return NULL_TREE. */
7501
7502 static tree
7503 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7504 {
7505 tree etype, elem;
7506 int i, size, count;
7507 tree *elements;
7508
7509 etype = TREE_TYPE (type);
7510 size = GET_MODE_SIZE (TYPE_MODE (etype));
7511 count = TYPE_VECTOR_SUBPARTS (type);
7512 if (size * count > len)
7513 return NULL_TREE;
7514
7515 elements = XALLOCAVEC (tree, count);
7516 for (i = count - 1; i >= 0; i--)
7517 {
7518 elem = native_interpret_expr (etype, ptr+(i*size), size);
7519 if (!elem)
7520 return NULL_TREE;
7521 elements[i] = elem;
7522 }
7523 return build_vector (type, elements);
7524 }
7525
7526
7527 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7528 the buffer PTR of length LEN as a constant of type TYPE. For
7529 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7530 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7531 return NULL_TREE. */
7532
7533 tree
7534 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7535 {
7536 switch (TREE_CODE (type))
7537 {
7538 case INTEGER_TYPE:
7539 case ENUMERAL_TYPE:
7540 case BOOLEAN_TYPE:
7541 case POINTER_TYPE:
7542 case REFERENCE_TYPE:
7543 return native_interpret_int (type, ptr, len);
7544
7545 case REAL_TYPE:
7546 return native_interpret_real (type, ptr, len);
7547
7548 case COMPLEX_TYPE:
7549 return native_interpret_complex (type, ptr, len);
7550
7551 case VECTOR_TYPE:
7552 return native_interpret_vector (type, ptr, len);
7553
7554 default:
7555 return NULL_TREE;
7556 }
7557 }
7558
7559 /* Returns true if we can interpret the contents of a native encoding
7560 as TYPE. */
7561
7562 static bool
7563 can_native_interpret_type_p (tree type)
7564 {
7565 switch (TREE_CODE (type))
7566 {
7567 case INTEGER_TYPE:
7568 case ENUMERAL_TYPE:
7569 case BOOLEAN_TYPE:
7570 case POINTER_TYPE:
7571 case REFERENCE_TYPE:
7572 case REAL_TYPE:
7573 case COMPLEX_TYPE:
7574 case VECTOR_TYPE:
7575 return true;
7576 default:
7577 return false;
7578 }
7579 }
7580
7581 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7582 TYPE at compile-time. If we're unable to perform the conversion
7583 return NULL_TREE. */
7584
7585 static tree
7586 fold_view_convert_expr (tree type, tree expr)
7587 {
7588 /* We support up to 512-bit values (for V8DFmode). */
7589 unsigned char buffer[64];
7590 int len;
7591
7592 /* Check that the host and target are sane. */
7593 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7594 return NULL_TREE;
7595
7596 len = native_encode_expr (expr, buffer, sizeof (buffer));
7597 if (len == 0)
7598 return NULL_TREE;
7599
7600 return native_interpret_expr (type, buffer, len);
7601 }
7602
7603 /* Build an expression for the address of T. Folds away INDIRECT_REF
7604 to avoid confusing the gimplify process. */
7605
7606 tree
7607 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7608 {
7609 /* The size of the object is not relevant when talking about its address. */
7610 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7611 t = TREE_OPERAND (t, 0);
7612
7613 if (TREE_CODE (t) == INDIRECT_REF)
7614 {
7615 t = TREE_OPERAND (t, 0);
7616
7617 if (TREE_TYPE (t) != ptrtype)
7618 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7619 }
7620 else if (TREE_CODE (t) == MEM_REF
7621 && integer_zerop (TREE_OPERAND (t, 1)))
7622 return TREE_OPERAND (t, 0);
7623 else if (TREE_CODE (t) == MEM_REF
7624 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7625 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7626 TREE_OPERAND (t, 0),
7627 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7628 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7629 {
7630 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7631
7632 if (TREE_TYPE (t) != ptrtype)
7633 t = fold_convert_loc (loc, ptrtype, t);
7634 }
7635 else
7636 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7637
7638 return t;
7639 }
7640
7641 /* Build an expression for the address of T. */
7642
7643 tree
7644 build_fold_addr_expr_loc (location_t loc, tree t)
7645 {
7646 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7647
7648 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7649 }
7650
7651 static bool vec_cst_ctor_to_array (tree, tree *);
7652
7653 /* Fold a unary expression of code CODE and type TYPE with operand
7654 OP0. Return the folded expression if folding is successful.
7655 Otherwise, return NULL_TREE. */
7656
7657 tree
7658 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7659 {
7660 tree tem;
7661 tree arg0;
7662 enum tree_code_class kind = TREE_CODE_CLASS (code);
7663
7664 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7665 && TREE_CODE_LENGTH (code) == 1);
7666
7667 arg0 = op0;
7668 if (arg0)
7669 {
7670 if (CONVERT_EXPR_CODE_P (code)
7671 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7672 {
7673 /* Don't use STRIP_NOPS, because signedness of argument type
7674 matters. */
7675 STRIP_SIGN_NOPS (arg0);
7676 }
7677 else
7678 {
7679 /* Strip any conversions that don't change the mode. This
7680 is safe for every expression, except for a comparison
7681 expression because its signedness is derived from its
7682 operands.
7683
7684 Note that this is done as an internal manipulation within
7685 the constant folder, in order to find the simplest
7686 representation of the arguments so that their form can be
7687 studied. In any cases, the appropriate type conversions
7688 should be put back in the tree that will get out of the
7689 constant folder. */
7690 STRIP_NOPS (arg0);
7691 }
7692 }
7693
7694 if (TREE_CODE_CLASS (code) == tcc_unary)
7695 {
7696 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7697 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7698 fold_build1_loc (loc, code, type,
7699 fold_convert_loc (loc, TREE_TYPE (op0),
7700 TREE_OPERAND (arg0, 1))));
7701 else if (TREE_CODE (arg0) == COND_EXPR)
7702 {
7703 tree arg01 = TREE_OPERAND (arg0, 1);
7704 tree arg02 = TREE_OPERAND (arg0, 2);
7705 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7706 arg01 = fold_build1_loc (loc, code, type,
7707 fold_convert_loc (loc,
7708 TREE_TYPE (op0), arg01));
7709 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7710 arg02 = fold_build1_loc (loc, code, type,
7711 fold_convert_loc (loc,
7712 TREE_TYPE (op0), arg02));
7713 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7714 arg01, arg02);
7715
7716 /* If this was a conversion, and all we did was to move into
7717 inside the COND_EXPR, bring it back out. But leave it if
7718 it is a conversion from integer to integer and the
7719 result precision is no wider than a word since such a
7720 conversion is cheap and may be optimized away by combine,
7721 while it couldn't if it were outside the COND_EXPR. Then return
7722 so we don't get into an infinite recursion loop taking the
7723 conversion out and then back in. */
7724
7725 if ((CONVERT_EXPR_CODE_P (code)
7726 || code == NON_LVALUE_EXPR)
7727 && TREE_CODE (tem) == COND_EXPR
7728 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7729 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7730 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7731 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7732 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7733 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7734 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7735 && (INTEGRAL_TYPE_P
7736 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7737 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7738 || flag_syntax_only))
7739 tem = build1_loc (loc, code, type,
7740 build3 (COND_EXPR,
7741 TREE_TYPE (TREE_OPERAND
7742 (TREE_OPERAND (tem, 1), 0)),
7743 TREE_OPERAND (tem, 0),
7744 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7745 TREE_OPERAND (TREE_OPERAND (tem, 2),
7746 0)));
7747 return tem;
7748 }
7749 }
7750
7751 switch (code)
7752 {
7753 case PAREN_EXPR:
7754 /* Re-association barriers around constants and other re-association
7755 barriers can be removed. */
7756 if (CONSTANT_CLASS_P (op0)
7757 || TREE_CODE (op0) == PAREN_EXPR)
7758 return fold_convert_loc (loc, type, op0);
7759 return NULL_TREE;
7760
7761 CASE_CONVERT:
7762 case FLOAT_EXPR:
7763 case FIX_TRUNC_EXPR:
7764 if (TREE_TYPE (op0) == type)
7765 return op0;
7766
7767 if (COMPARISON_CLASS_P (op0))
7768 {
7769 /* If we have (type) (a CMP b) and type is an integral type, return
7770 new expression involving the new type. Canonicalize
7771 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7772 non-integral type.
7773 Do not fold the result as that would not simplify further, also
7774 folding again results in recursions. */
7775 if (TREE_CODE (type) == BOOLEAN_TYPE)
7776 return build2_loc (loc, TREE_CODE (op0), type,
7777 TREE_OPERAND (op0, 0),
7778 TREE_OPERAND (op0, 1));
7779 else if (!INTEGRAL_TYPE_P (type))
7780 return build3_loc (loc, COND_EXPR, type, op0,
7781 constant_boolean_node (true, type),
7782 constant_boolean_node (false, type));
7783 }
7784
7785 /* Handle cases of two conversions in a row. */
7786 if (CONVERT_EXPR_P (op0))
7787 {
7788 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7789 tree inter_type = TREE_TYPE (op0);
7790 int inside_int = INTEGRAL_TYPE_P (inside_type);
7791 int inside_ptr = POINTER_TYPE_P (inside_type);
7792 int inside_float = FLOAT_TYPE_P (inside_type);
7793 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7794 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7795 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7796 int inter_int = INTEGRAL_TYPE_P (inter_type);
7797 int inter_ptr = POINTER_TYPE_P (inter_type);
7798 int inter_float = FLOAT_TYPE_P (inter_type);
7799 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7800 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7801 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7802 int final_int = INTEGRAL_TYPE_P (type);
7803 int final_ptr = POINTER_TYPE_P (type);
7804 int final_float = FLOAT_TYPE_P (type);
7805 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7806 unsigned int final_prec = TYPE_PRECISION (type);
7807 int final_unsignedp = TYPE_UNSIGNED (type);
7808
7809 /* In addition to the cases of two conversions in a row
7810 handled below, if we are converting something to its own
7811 type via an object of identical or wider precision, neither
7812 conversion is needed. */
7813 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7814 && (((inter_int || inter_ptr) && final_int)
7815 || (inter_float && final_float))
7816 && inter_prec >= final_prec)
7817 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7818
7819 /* Likewise, if the intermediate and initial types are either both
7820 float or both integer, we don't need the middle conversion if the
7821 former is wider than the latter and doesn't change the signedness
7822 (for integers). Avoid this if the final type is a pointer since
7823 then we sometimes need the middle conversion. Likewise if the
7824 final type has a precision not equal to the size of its mode. */
7825 if (((inter_int && inside_int)
7826 || (inter_float && inside_float)
7827 || (inter_vec && inside_vec))
7828 && inter_prec >= inside_prec
7829 && (inter_float || inter_vec
7830 || inter_unsignedp == inside_unsignedp)
7831 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7832 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7833 && ! final_ptr
7834 && (! final_vec || inter_prec == inside_prec))
7835 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7836
7837 /* If we have a sign-extension of a zero-extended value, we can
7838 replace that by a single zero-extension. Likewise if the
7839 final conversion does not change precision we can drop the
7840 intermediate conversion. */
7841 if (inside_int && inter_int && final_int
7842 && ((inside_prec < inter_prec && inter_prec < final_prec
7843 && inside_unsignedp && !inter_unsignedp)
7844 || final_prec == inter_prec))
7845 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7846
7847 /* Two conversions in a row are not needed unless:
7848 - some conversion is floating-point (overstrict for now), or
7849 - some conversion is a vector (overstrict for now), or
7850 - the intermediate type is narrower than both initial and
7851 final, or
7852 - the intermediate type and innermost type differ in signedness,
7853 and the outermost type is wider than the intermediate, or
7854 - the initial type is a pointer type and the precisions of the
7855 intermediate and final types differ, or
7856 - the final type is a pointer type and the precisions of the
7857 initial and intermediate types differ. */
7858 if (! inside_float && ! inter_float && ! final_float
7859 && ! inside_vec && ! inter_vec && ! final_vec
7860 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7861 && ! (inside_int && inter_int
7862 && inter_unsignedp != inside_unsignedp
7863 && inter_prec < final_prec)
7864 && ((inter_unsignedp && inter_prec > inside_prec)
7865 == (final_unsignedp && final_prec > inter_prec))
7866 && ! (inside_ptr && inter_prec != final_prec)
7867 && ! (final_ptr && inside_prec != inter_prec)
7868 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7869 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7870 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7871 }
7872
7873 /* Handle (T *)&A.B.C for A being of type T and B and C
7874 living at offset zero. This occurs frequently in
7875 C++ upcasting and then accessing the base. */
7876 if (TREE_CODE (op0) == ADDR_EXPR
7877 && POINTER_TYPE_P (type)
7878 && handled_component_p (TREE_OPERAND (op0, 0)))
7879 {
7880 HOST_WIDE_INT bitsize, bitpos;
7881 tree offset;
7882 enum machine_mode mode;
7883 int unsignedp, volatilep;
7884 tree base = TREE_OPERAND (op0, 0);
7885 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7886 &mode, &unsignedp, &volatilep, false);
7887 /* If the reference was to a (constant) zero offset, we can use
7888 the address of the base if it has the same base type
7889 as the result type and the pointer type is unqualified. */
7890 if (! offset && bitpos == 0
7891 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7892 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7893 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7894 return fold_convert_loc (loc, type,
7895 build_fold_addr_expr_loc (loc, base));
7896 }
7897
7898 if (TREE_CODE (op0) == MODIFY_EXPR
7899 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7900 /* Detect assigning a bitfield. */
7901 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7902 && DECL_BIT_FIELD
7903 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7904 {
7905 /* Don't leave an assignment inside a conversion
7906 unless assigning a bitfield. */
7907 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7908 /* First do the assignment, then return converted constant. */
7909 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7910 TREE_NO_WARNING (tem) = 1;
7911 TREE_USED (tem) = 1;
7912 return tem;
7913 }
7914
7915 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7916 constants (if x has signed type, the sign bit cannot be set
7917 in c). This folds extension into the BIT_AND_EXPR.
7918 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7919 very likely don't have maximal range for their precision and this
7920 transformation effectively doesn't preserve non-maximal ranges. */
7921 if (TREE_CODE (type) == INTEGER_TYPE
7922 && TREE_CODE (op0) == BIT_AND_EXPR
7923 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7924 {
7925 tree and_expr = op0;
7926 tree and0 = TREE_OPERAND (and_expr, 0);
7927 tree and1 = TREE_OPERAND (and_expr, 1);
7928 int change = 0;
7929
7930 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7931 || (TYPE_PRECISION (type)
7932 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7933 change = 1;
7934 else if (TYPE_PRECISION (TREE_TYPE (and1))
7935 <= HOST_BITS_PER_WIDE_INT
7936 && host_integerp (and1, 1))
7937 {
7938 unsigned HOST_WIDE_INT cst;
7939
7940 cst = tree_low_cst (and1, 1);
7941 cst &= (HOST_WIDE_INT) -1
7942 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7943 change = (cst == 0);
7944 #ifdef LOAD_EXTEND_OP
7945 if (change
7946 && !flag_syntax_only
7947 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7948 == ZERO_EXTEND))
7949 {
7950 tree uns = unsigned_type_for (TREE_TYPE (and0));
7951 and0 = fold_convert_loc (loc, uns, and0);
7952 and1 = fold_convert_loc (loc, uns, and1);
7953 }
7954 #endif
7955 }
7956 if (change)
7957 {
7958 tem = force_fit_type_double (type, tree_to_double_int (and1),
7959 0, TREE_OVERFLOW (and1));
7960 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7961 fold_convert_loc (loc, type, and0), tem);
7962 }
7963 }
7964
7965 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7966 when one of the new casts will fold away. Conservatively we assume
7967 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7968 if (POINTER_TYPE_P (type)
7969 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7970 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7971 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7972 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7973 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7974 {
7975 tree arg00 = TREE_OPERAND (arg0, 0);
7976 tree arg01 = TREE_OPERAND (arg0, 1);
7977
7978 return fold_build_pointer_plus_loc
7979 (loc, fold_convert_loc (loc, type, arg00), arg01);
7980 }
7981
7982 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7983 of the same precision, and X is an integer type not narrower than
7984 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7985 if (INTEGRAL_TYPE_P (type)
7986 && TREE_CODE (op0) == BIT_NOT_EXPR
7987 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7988 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7989 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7990 {
7991 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7992 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7993 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7994 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7995 fold_convert_loc (loc, type, tem));
7996 }
7997
7998 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7999 type of X and Y (integer types only). */
8000 if (INTEGRAL_TYPE_P (type)
8001 && TREE_CODE (op0) == MULT_EXPR
8002 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8003 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8004 {
8005 /* Be careful not to introduce new overflows. */
8006 tree mult_type;
8007 if (TYPE_OVERFLOW_WRAPS (type))
8008 mult_type = type;
8009 else
8010 mult_type = unsigned_type_for (type);
8011
8012 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8013 {
8014 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8015 fold_convert_loc (loc, mult_type,
8016 TREE_OPERAND (op0, 0)),
8017 fold_convert_loc (loc, mult_type,
8018 TREE_OPERAND (op0, 1)));
8019 return fold_convert_loc (loc, type, tem);
8020 }
8021 }
8022
8023 tem = fold_convert_const (code, type, op0);
8024 return tem ? tem : NULL_TREE;
8025
8026 case ADDR_SPACE_CONVERT_EXPR:
8027 if (integer_zerop (arg0))
8028 return fold_convert_const (code, type, arg0);
8029 return NULL_TREE;
8030
8031 case FIXED_CONVERT_EXPR:
8032 tem = fold_convert_const (code, type, arg0);
8033 return tem ? tem : NULL_TREE;
8034
8035 case VIEW_CONVERT_EXPR:
8036 if (TREE_TYPE (op0) == type)
8037 return op0;
8038 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8039 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8040 type, TREE_OPERAND (op0, 0));
8041 if (TREE_CODE (op0) == MEM_REF)
8042 return fold_build2_loc (loc, MEM_REF, type,
8043 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8044
8045 /* For integral conversions with the same precision or pointer
8046 conversions use a NOP_EXPR instead. */
8047 if ((INTEGRAL_TYPE_P (type)
8048 || POINTER_TYPE_P (type))
8049 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8050 || POINTER_TYPE_P (TREE_TYPE (op0)))
8051 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8052 return fold_convert_loc (loc, type, op0);
8053
8054 /* Strip inner integral conversions that do not change the precision. */
8055 if (CONVERT_EXPR_P (op0)
8056 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8057 || POINTER_TYPE_P (TREE_TYPE (op0)))
8058 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8059 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8060 && (TYPE_PRECISION (TREE_TYPE (op0))
8061 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8062 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8063 type, TREE_OPERAND (op0, 0));
8064
8065 return fold_view_convert_expr (type, op0);
8066
8067 case NEGATE_EXPR:
8068 tem = fold_negate_expr (loc, arg0);
8069 if (tem)
8070 return fold_convert_loc (loc, type, tem);
8071 return NULL_TREE;
8072
8073 case ABS_EXPR:
8074 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8075 return fold_abs_const (arg0, type);
8076 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8077 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8078 /* Convert fabs((double)float) into (double)fabsf(float). */
8079 else if (TREE_CODE (arg0) == NOP_EXPR
8080 && TREE_CODE (type) == REAL_TYPE)
8081 {
8082 tree targ0 = strip_float_extensions (arg0);
8083 if (targ0 != arg0)
8084 return fold_convert_loc (loc, type,
8085 fold_build1_loc (loc, ABS_EXPR,
8086 TREE_TYPE (targ0),
8087 targ0));
8088 }
8089 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8090 else if (TREE_CODE (arg0) == ABS_EXPR)
8091 return arg0;
8092 else if (tree_expr_nonnegative_p (arg0))
8093 return arg0;
8094
8095 /* Strip sign ops from argument. */
8096 if (TREE_CODE (type) == REAL_TYPE)
8097 {
8098 tem = fold_strip_sign_ops (arg0);
8099 if (tem)
8100 return fold_build1_loc (loc, ABS_EXPR, type,
8101 fold_convert_loc (loc, type, tem));
8102 }
8103 return NULL_TREE;
8104
8105 case CONJ_EXPR:
8106 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8107 return fold_convert_loc (loc, type, arg0);
8108 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8109 {
8110 tree itype = TREE_TYPE (type);
8111 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8112 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8113 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8114 negate_expr (ipart));
8115 }
8116 if (TREE_CODE (arg0) == COMPLEX_CST)
8117 {
8118 tree itype = TREE_TYPE (type);
8119 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8120 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8121 return build_complex (type, rpart, negate_expr (ipart));
8122 }
8123 if (TREE_CODE (arg0) == CONJ_EXPR)
8124 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8125 return NULL_TREE;
8126
8127 case BIT_NOT_EXPR:
8128 if (TREE_CODE (arg0) == INTEGER_CST)
8129 return fold_not_const (arg0, type);
8130 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8131 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8132 /* Convert ~ (-A) to A - 1. */
8133 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8134 return fold_build2_loc (loc, MINUS_EXPR, type,
8135 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8136 build_int_cst (type, 1));
8137 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8138 else if (INTEGRAL_TYPE_P (type)
8139 && ((TREE_CODE (arg0) == MINUS_EXPR
8140 && integer_onep (TREE_OPERAND (arg0, 1)))
8141 || (TREE_CODE (arg0) == PLUS_EXPR
8142 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8143 return fold_build1_loc (loc, NEGATE_EXPR, type,
8144 fold_convert_loc (loc, type,
8145 TREE_OPERAND (arg0, 0)));
8146 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8147 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8148 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8149 fold_convert_loc (loc, type,
8150 TREE_OPERAND (arg0, 0)))))
8151 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8152 fold_convert_loc (loc, type,
8153 TREE_OPERAND (arg0, 1)));
8154 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8155 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8156 fold_convert_loc (loc, type,
8157 TREE_OPERAND (arg0, 1)))))
8158 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8159 fold_convert_loc (loc, type,
8160 TREE_OPERAND (arg0, 0)), tem);
8161 /* Perform BIT_NOT_EXPR on each element individually. */
8162 else if (TREE_CODE (arg0) == VECTOR_CST)
8163 {
8164 tree *elements;
8165 tree elem;
8166 unsigned count = VECTOR_CST_NELTS (arg0), i;
8167
8168 elements = XALLOCAVEC (tree, count);
8169 for (i = 0; i < count; i++)
8170 {
8171 elem = VECTOR_CST_ELT (arg0, i);
8172 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8173 if (elem == NULL_TREE)
8174 break;
8175 elements[i] = elem;
8176 }
8177 if (i == count)
8178 return build_vector (type, elements);
8179 }
8180
8181 return NULL_TREE;
8182
8183 case TRUTH_NOT_EXPR:
8184 /* The argument to invert_truthvalue must have Boolean type. */
8185 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8186 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8187
8188 /* Note that the operand of this must be an int
8189 and its values must be 0 or 1.
8190 ("true" is a fixed value perhaps depending on the language,
8191 but we don't handle values other than 1 correctly yet.) */
8192 tem = fold_truth_not_expr (loc, arg0);
8193 if (!tem)
8194 return NULL_TREE;
8195 return fold_convert_loc (loc, type, tem);
8196
8197 case REALPART_EXPR:
8198 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8199 return fold_convert_loc (loc, type, arg0);
8200 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8201 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8202 TREE_OPERAND (arg0, 1));
8203 if (TREE_CODE (arg0) == COMPLEX_CST)
8204 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8205 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8206 {
8207 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8208 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8209 fold_build1_loc (loc, REALPART_EXPR, itype,
8210 TREE_OPERAND (arg0, 0)),
8211 fold_build1_loc (loc, REALPART_EXPR, itype,
8212 TREE_OPERAND (arg0, 1)));
8213 return fold_convert_loc (loc, type, tem);
8214 }
8215 if (TREE_CODE (arg0) == CONJ_EXPR)
8216 {
8217 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8218 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8219 TREE_OPERAND (arg0, 0));
8220 return fold_convert_loc (loc, type, tem);
8221 }
8222 if (TREE_CODE (arg0) == CALL_EXPR)
8223 {
8224 tree fn = get_callee_fndecl (arg0);
8225 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8226 switch (DECL_FUNCTION_CODE (fn))
8227 {
8228 CASE_FLT_FN (BUILT_IN_CEXPI):
8229 fn = mathfn_built_in (type, BUILT_IN_COS);
8230 if (fn)
8231 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8232 break;
8233
8234 default:
8235 break;
8236 }
8237 }
8238 return NULL_TREE;
8239
8240 case IMAGPART_EXPR:
8241 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8242 return build_zero_cst (type);
8243 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8244 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8245 TREE_OPERAND (arg0, 0));
8246 if (TREE_CODE (arg0) == COMPLEX_CST)
8247 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8248 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8249 {
8250 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8251 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8252 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8253 TREE_OPERAND (arg0, 0)),
8254 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8255 TREE_OPERAND (arg0, 1)));
8256 return fold_convert_loc (loc, type, tem);
8257 }
8258 if (TREE_CODE (arg0) == CONJ_EXPR)
8259 {
8260 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8261 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8262 return fold_convert_loc (loc, type, negate_expr (tem));
8263 }
8264 if (TREE_CODE (arg0) == CALL_EXPR)
8265 {
8266 tree fn = get_callee_fndecl (arg0);
8267 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8268 switch (DECL_FUNCTION_CODE (fn))
8269 {
8270 CASE_FLT_FN (BUILT_IN_CEXPI):
8271 fn = mathfn_built_in (type, BUILT_IN_SIN);
8272 if (fn)
8273 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8274 break;
8275
8276 default:
8277 break;
8278 }
8279 }
8280 return NULL_TREE;
8281
8282 case INDIRECT_REF:
8283 /* Fold *&X to X if X is an lvalue. */
8284 if (TREE_CODE (op0) == ADDR_EXPR)
8285 {
8286 tree op00 = TREE_OPERAND (op0, 0);
8287 if ((TREE_CODE (op00) == VAR_DECL
8288 || TREE_CODE (op00) == PARM_DECL
8289 || TREE_CODE (op00) == RESULT_DECL)
8290 && !TREE_READONLY (op00))
8291 return op00;
8292 }
8293 return NULL_TREE;
8294
8295 case VEC_UNPACK_LO_EXPR:
8296 case VEC_UNPACK_HI_EXPR:
8297 case VEC_UNPACK_FLOAT_LO_EXPR:
8298 case VEC_UNPACK_FLOAT_HI_EXPR:
8299 {
8300 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8301 tree *elts;
8302 enum tree_code subcode;
8303
8304 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8305 if (TREE_CODE (arg0) != VECTOR_CST)
8306 return NULL_TREE;
8307
8308 elts = XALLOCAVEC (tree, nelts * 2);
8309 if (!vec_cst_ctor_to_array (arg0, elts))
8310 return NULL_TREE;
8311
8312 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8313 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8314 elts += nelts;
8315
8316 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8317 subcode = NOP_EXPR;
8318 else
8319 subcode = FLOAT_EXPR;
8320
8321 for (i = 0; i < nelts; i++)
8322 {
8323 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8324 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8325 return NULL_TREE;
8326 }
8327
8328 return build_vector (type, elts);
8329 }
8330
8331 default:
8332 return NULL_TREE;
8333 } /* switch (code) */
8334 }
8335
8336
8337 /* If the operation was a conversion do _not_ mark a resulting constant
8338 with TREE_OVERFLOW if the original constant was not. These conversions
8339 have implementation defined behavior and retaining the TREE_OVERFLOW
8340 flag here would confuse later passes such as VRP. */
8341 tree
8342 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8343 tree type, tree op0)
8344 {
8345 tree res = fold_unary_loc (loc, code, type, op0);
8346 if (res
8347 && TREE_CODE (res) == INTEGER_CST
8348 && TREE_CODE (op0) == INTEGER_CST
8349 && CONVERT_EXPR_CODE_P (code))
8350 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8351
8352 return res;
8353 }
8354
8355 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8356 operands OP0 and OP1. LOC is the location of the resulting expression.
8357 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8358 Return the folded expression if folding is successful. Otherwise,
8359 return NULL_TREE. */
8360 static tree
8361 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8362 tree arg0, tree arg1, tree op0, tree op1)
8363 {
8364 tree tem;
8365
8366 /* We only do these simplifications if we are optimizing. */
8367 if (!optimize)
8368 return NULL_TREE;
8369
8370 /* Check for things like (A || B) && (A || C). We can convert this
8371 to A || (B && C). Note that either operator can be any of the four
8372 truth and/or operations and the transformation will still be
8373 valid. Also note that we only care about order for the
8374 ANDIF and ORIF operators. If B contains side effects, this
8375 might change the truth-value of A. */
8376 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8377 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8378 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8379 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8380 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8381 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8382 {
8383 tree a00 = TREE_OPERAND (arg0, 0);
8384 tree a01 = TREE_OPERAND (arg0, 1);
8385 tree a10 = TREE_OPERAND (arg1, 0);
8386 tree a11 = TREE_OPERAND (arg1, 1);
8387 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8388 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8389 && (code == TRUTH_AND_EXPR
8390 || code == TRUTH_OR_EXPR));
8391
8392 if (operand_equal_p (a00, a10, 0))
8393 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8394 fold_build2_loc (loc, code, type, a01, a11));
8395 else if (commutative && operand_equal_p (a00, a11, 0))
8396 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8397 fold_build2_loc (loc, code, type, a01, a10));
8398 else if (commutative && operand_equal_p (a01, a10, 0))
8399 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8400 fold_build2_loc (loc, code, type, a00, a11));
8401
8402 /* This case if tricky because we must either have commutative
8403 operators or else A10 must not have side-effects. */
8404
8405 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8406 && operand_equal_p (a01, a11, 0))
8407 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8408 fold_build2_loc (loc, code, type, a00, a10),
8409 a01);
8410 }
8411
8412 /* See if we can build a range comparison. */
8413 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8414 return tem;
8415
8416 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8417 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8418 {
8419 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8420 if (tem)
8421 return fold_build2_loc (loc, code, type, tem, arg1);
8422 }
8423
8424 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8425 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8426 {
8427 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8428 if (tem)
8429 return fold_build2_loc (loc, code, type, arg0, tem);
8430 }
8431
8432 /* Check for the possibility of merging component references. If our
8433 lhs is another similar operation, try to merge its rhs with our
8434 rhs. Then try to merge our lhs and rhs. */
8435 if (TREE_CODE (arg0) == code
8436 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8437 TREE_OPERAND (arg0, 1), arg1)))
8438 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8439
8440 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8441 return tem;
8442
8443 if ((BRANCH_COST (optimize_function_for_speed_p (cfun),
8444 false) >= 2)
8445 && LOGICAL_OP_NON_SHORT_CIRCUIT
8446 && (code == TRUTH_AND_EXPR
8447 || code == TRUTH_ANDIF_EXPR
8448 || code == TRUTH_OR_EXPR
8449 || code == TRUTH_ORIF_EXPR))
8450 {
8451 enum tree_code ncode, icode;
8452
8453 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8454 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8455 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8456
8457 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8458 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8459 We don't want to pack more than two leafs to a non-IF AND/OR
8460 expression.
8461 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8462 equal to IF-CODE, then we don't want to add right-hand operand.
8463 If the inner right-hand side of left-hand operand has
8464 side-effects, or isn't simple, then we can't add to it,
8465 as otherwise we might destroy if-sequence. */
8466 if (TREE_CODE (arg0) == icode
8467 && simple_operand_p_2 (arg1)
8468 /* Needed for sequence points to handle trappings, and
8469 side-effects. */
8470 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8471 {
8472 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8473 arg1);
8474 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8475 tem);
8476 }
8477 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8478 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8479 else if (TREE_CODE (arg1) == icode
8480 && simple_operand_p_2 (arg0)
8481 /* Needed for sequence points to handle trappings, and
8482 side-effects. */
8483 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8484 {
8485 tem = fold_build2_loc (loc, ncode, type,
8486 arg0, TREE_OPERAND (arg1, 0));
8487 return fold_build2_loc (loc, icode, type, tem,
8488 TREE_OPERAND (arg1, 1));
8489 }
8490 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8491 into (A OR B).
8492 For sequence point consistancy, we need to check for trapping,
8493 and side-effects. */
8494 else if (code == icode && simple_operand_p_2 (arg0)
8495 && simple_operand_p_2 (arg1))
8496 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8497 }
8498
8499 return NULL_TREE;
8500 }
8501
8502 /* Fold a binary expression of code CODE and type TYPE with operands
8503 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8504 Return the folded expression if folding is successful. Otherwise,
8505 return NULL_TREE. */
8506
8507 static tree
8508 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8509 {
8510 enum tree_code compl_code;
8511
8512 if (code == MIN_EXPR)
8513 compl_code = MAX_EXPR;
8514 else if (code == MAX_EXPR)
8515 compl_code = MIN_EXPR;
8516 else
8517 gcc_unreachable ();
8518
8519 /* MIN (MAX (a, b), b) == b. */
8520 if (TREE_CODE (op0) == compl_code
8521 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8522 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8523
8524 /* MIN (MAX (b, a), b) == b. */
8525 if (TREE_CODE (op0) == compl_code
8526 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8527 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8528 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8529
8530 /* MIN (a, MAX (a, b)) == a. */
8531 if (TREE_CODE (op1) == compl_code
8532 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8533 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8534 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8535
8536 /* MIN (a, MAX (b, a)) == a. */
8537 if (TREE_CODE (op1) == compl_code
8538 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8539 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8540 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8541
8542 return NULL_TREE;
8543 }
8544
8545 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8546 by changing CODE to reduce the magnitude of constants involved in
8547 ARG0 of the comparison.
8548 Returns a canonicalized comparison tree if a simplification was
8549 possible, otherwise returns NULL_TREE.
8550 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8551 valid if signed overflow is undefined. */
8552
8553 static tree
8554 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8555 tree arg0, tree arg1,
8556 bool *strict_overflow_p)
8557 {
8558 enum tree_code code0 = TREE_CODE (arg0);
8559 tree t, cst0 = NULL_TREE;
8560 int sgn0;
8561 bool swap = false;
8562
8563 /* Match A +- CST code arg1 and CST code arg1. We can change the
8564 first form only if overflow is undefined. */
8565 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8566 /* In principle pointers also have undefined overflow behavior,
8567 but that causes problems elsewhere. */
8568 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8569 && (code0 == MINUS_EXPR
8570 || code0 == PLUS_EXPR)
8571 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8572 || code0 == INTEGER_CST))
8573 return NULL_TREE;
8574
8575 /* Identify the constant in arg0 and its sign. */
8576 if (code0 == INTEGER_CST)
8577 cst0 = arg0;
8578 else
8579 cst0 = TREE_OPERAND (arg0, 1);
8580 sgn0 = tree_int_cst_sgn (cst0);
8581
8582 /* Overflowed constants and zero will cause problems. */
8583 if (integer_zerop (cst0)
8584 || TREE_OVERFLOW (cst0))
8585 return NULL_TREE;
8586
8587 /* See if we can reduce the magnitude of the constant in
8588 arg0 by changing the comparison code. */
8589 if (code0 == INTEGER_CST)
8590 {
8591 /* CST <= arg1 -> CST-1 < arg1. */
8592 if (code == LE_EXPR && sgn0 == 1)
8593 code = LT_EXPR;
8594 /* -CST < arg1 -> -CST-1 <= arg1. */
8595 else if (code == LT_EXPR && sgn0 == -1)
8596 code = LE_EXPR;
8597 /* CST > arg1 -> CST-1 >= arg1. */
8598 else if (code == GT_EXPR && sgn0 == 1)
8599 code = GE_EXPR;
8600 /* -CST >= arg1 -> -CST-1 > arg1. */
8601 else if (code == GE_EXPR && sgn0 == -1)
8602 code = GT_EXPR;
8603 else
8604 return NULL_TREE;
8605 /* arg1 code' CST' might be more canonical. */
8606 swap = true;
8607 }
8608 else
8609 {
8610 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8611 if (code == LT_EXPR
8612 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8613 code = LE_EXPR;
8614 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8615 else if (code == GT_EXPR
8616 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8617 code = GE_EXPR;
8618 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8619 else if (code == LE_EXPR
8620 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8621 code = LT_EXPR;
8622 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8623 else if (code == GE_EXPR
8624 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8625 code = GT_EXPR;
8626 else
8627 return NULL_TREE;
8628 *strict_overflow_p = true;
8629 }
8630
8631 /* Now build the constant reduced in magnitude. But not if that
8632 would produce one outside of its types range. */
8633 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8634 && ((sgn0 == 1
8635 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8636 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8637 || (sgn0 == -1
8638 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8639 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8640 /* We cannot swap the comparison here as that would cause us to
8641 endlessly recurse. */
8642 return NULL_TREE;
8643
8644 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8645 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8646 if (code0 != INTEGER_CST)
8647 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8648 t = fold_convert (TREE_TYPE (arg1), t);
8649
8650 /* If swapping might yield to a more canonical form, do so. */
8651 if (swap)
8652 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8653 else
8654 return fold_build2_loc (loc, code, type, t, arg1);
8655 }
8656
8657 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8658 overflow further. Try to decrease the magnitude of constants involved
8659 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8660 and put sole constants at the second argument position.
8661 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8662
8663 static tree
8664 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8665 tree arg0, tree arg1)
8666 {
8667 tree t;
8668 bool strict_overflow_p;
8669 const char * const warnmsg = G_("assuming signed overflow does not occur "
8670 "when reducing constant in comparison");
8671
8672 /* Try canonicalization by simplifying arg0. */
8673 strict_overflow_p = false;
8674 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8675 &strict_overflow_p);
8676 if (t)
8677 {
8678 if (strict_overflow_p)
8679 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8680 return t;
8681 }
8682
8683 /* Try canonicalization by simplifying arg1 using the swapped
8684 comparison. */
8685 code = swap_tree_comparison (code);
8686 strict_overflow_p = false;
8687 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8688 &strict_overflow_p);
8689 if (t && strict_overflow_p)
8690 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8691 return t;
8692 }
8693
8694 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8695 space. This is used to avoid issuing overflow warnings for
8696 expressions like &p->x which can not wrap. */
8697
8698 static bool
8699 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8700 {
8701 unsigned HOST_WIDE_INT offset_low, total_low;
8702 HOST_WIDE_INT size, offset_high, total_high;
8703
8704 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8705 return true;
8706
8707 if (bitpos < 0)
8708 return true;
8709
8710 if (offset == NULL_TREE)
8711 {
8712 offset_low = 0;
8713 offset_high = 0;
8714 }
8715 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8716 return true;
8717 else
8718 {
8719 offset_low = TREE_INT_CST_LOW (offset);
8720 offset_high = TREE_INT_CST_HIGH (offset);
8721 }
8722
8723 if (add_double_with_sign (offset_low, offset_high,
8724 bitpos / BITS_PER_UNIT, 0,
8725 &total_low, &total_high,
8726 true))
8727 return true;
8728
8729 if (total_high != 0)
8730 return true;
8731
8732 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8733 if (size <= 0)
8734 return true;
8735
8736 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8737 array. */
8738 if (TREE_CODE (base) == ADDR_EXPR)
8739 {
8740 HOST_WIDE_INT base_size;
8741
8742 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8743 if (base_size > 0 && size < base_size)
8744 size = base_size;
8745 }
8746
8747 return total_low > (unsigned HOST_WIDE_INT) size;
8748 }
8749
8750 /* Subroutine of fold_binary. This routine performs all of the
8751 transformations that are common to the equality/inequality
8752 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8753 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8754 fold_binary should call fold_binary. Fold a comparison with
8755 tree code CODE and type TYPE with operands OP0 and OP1. Return
8756 the folded comparison or NULL_TREE. */
8757
8758 static tree
8759 fold_comparison (location_t loc, enum tree_code code, tree type,
8760 tree op0, tree op1)
8761 {
8762 tree arg0, arg1, tem;
8763
8764 arg0 = op0;
8765 arg1 = op1;
8766
8767 STRIP_SIGN_NOPS (arg0);
8768 STRIP_SIGN_NOPS (arg1);
8769
8770 tem = fold_relational_const (code, type, arg0, arg1);
8771 if (tem != NULL_TREE)
8772 return tem;
8773
8774 /* If one arg is a real or integer constant, put it last. */
8775 if (tree_swap_operands_p (arg0, arg1, true))
8776 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8777
8778 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8779 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8780 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8781 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8782 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8783 && (TREE_CODE (arg1) == INTEGER_CST
8784 && !TREE_OVERFLOW (arg1)))
8785 {
8786 tree const1 = TREE_OPERAND (arg0, 1);
8787 tree const2 = arg1;
8788 tree variable = TREE_OPERAND (arg0, 0);
8789 tree lhs;
8790 int lhs_add;
8791 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8792
8793 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8794 TREE_TYPE (arg1), const2, const1);
8795
8796 /* If the constant operation overflowed this can be
8797 simplified as a comparison against INT_MAX/INT_MIN. */
8798 if (TREE_CODE (lhs) == INTEGER_CST
8799 && TREE_OVERFLOW (lhs))
8800 {
8801 int const1_sgn = tree_int_cst_sgn (const1);
8802 enum tree_code code2 = code;
8803
8804 /* Get the sign of the constant on the lhs if the
8805 operation were VARIABLE + CONST1. */
8806 if (TREE_CODE (arg0) == MINUS_EXPR)
8807 const1_sgn = -const1_sgn;
8808
8809 /* The sign of the constant determines if we overflowed
8810 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8811 Canonicalize to the INT_MIN overflow by swapping the comparison
8812 if necessary. */
8813 if (const1_sgn == -1)
8814 code2 = swap_tree_comparison (code);
8815
8816 /* We now can look at the canonicalized case
8817 VARIABLE + 1 CODE2 INT_MIN
8818 and decide on the result. */
8819 if (code2 == LT_EXPR
8820 || code2 == LE_EXPR
8821 || code2 == EQ_EXPR)
8822 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8823 else if (code2 == NE_EXPR
8824 || code2 == GE_EXPR
8825 || code2 == GT_EXPR)
8826 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8827 }
8828
8829 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8830 && (TREE_CODE (lhs) != INTEGER_CST
8831 || !TREE_OVERFLOW (lhs)))
8832 {
8833 if (code != EQ_EXPR && code != NE_EXPR)
8834 fold_overflow_warning ("assuming signed overflow does not occur "
8835 "when changing X +- C1 cmp C2 to "
8836 "X cmp C1 +- C2",
8837 WARN_STRICT_OVERFLOW_COMPARISON);
8838 return fold_build2_loc (loc, code, type, variable, lhs);
8839 }
8840 }
8841
8842 /* For comparisons of pointers we can decompose it to a compile time
8843 comparison of the base objects and the offsets into the object.
8844 This requires at least one operand being an ADDR_EXPR or a
8845 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8846 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8847 && (TREE_CODE (arg0) == ADDR_EXPR
8848 || TREE_CODE (arg1) == ADDR_EXPR
8849 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8850 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8851 {
8852 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8853 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8854 enum machine_mode mode;
8855 int volatilep, unsignedp;
8856 bool indirect_base0 = false, indirect_base1 = false;
8857
8858 /* Get base and offset for the access. Strip ADDR_EXPR for
8859 get_inner_reference, but put it back by stripping INDIRECT_REF
8860 off the base object if possible. indirect_baseN will be true
8861 if baseN is not an address but refers to the object itself. */
8862 base0 = arg0;
8863 if (TREE_CODE (arg0) == ADDR_EXPR)
8864 {
8865 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8866 &bitsize, &bitpos0, &offset0, &mode,
8867 &unsignedp, &volatilep, false);
8868 if (TREE_CODE (base0) == INDIRECT_REF)
8869 base0 = TREE_OPERAND (base0, 0);
8870 else
8871 indirect_base0 = true;
8872 }
8873 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8874 {
8875 base0 = TREE_OPERAND (arg0, 0);
8876 STRIP_SIGN_NOPS (base0);
8877 if (TREE_CODE (base0) == ADDR_EXPR)
8878 {
8879 base0 = TREE_OPERAND (base0, 0);
8880 indirect_base0 = true;
8881 }
8882 offset0 = TREE_OPERAND (arg0, 1);
8883 if (host_integerp (offset0, 0))
8884 {
8885 HOST_WIDE_INT off = size_low_cst (offset0);
8886 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8887 * BITS_PER_UNIT)
8888 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8889 {
8890 bitpos0 = off * BITS_PER_UNIT;
8891 offset0 = NULL_TREE;
8892 }
8893 }
8894 }
8895
8896 base1 = arg1;
8897 if (TREE_CODE (arg1) == ADDR_EXPR)
8898 {
8899 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8900 &bitsize, &bitpos1, &offset1, &mode,
8901 &unsignedp, &volatilep, false);
8902 if (TREE_CODE (base1) == INDIRECT_REF)
8903 base1 = TREE_OPERAND (base1, 0);
8904 else
8905 indirect_base1 = true;
8906 }
8907 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8908 {
8909 base1 = TREE_OPERAND (arg1, 0);
8910 STRIP_SIGN_NOPS (base1);
8911 if (TREE_CODE (base1) == ADDR_EXPR)
8912 {
8913 base1 = TREE_OPERAND (base1, 0);
8914 indirect_base1 = true;
8915 }
8916 offset1 = TREE_OPERAND (arg1, 1);
8917 if (host_integerp (offset1, 0))
8918 {
8919 HOST_WIDE_INT off = size_low_cst (offset1);
8920 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8921 * BITS_PER_UNIT)
8922 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8923 {
8924 bitpos1 = off * BITS_PER_UNIT;
8925 offset1 = NULL_TREE;
8926 }
8927 }
8928 }
8929
8930 /* A local variable can never be pointed to by
8931 the default SSA name of an incoming parameter. */
8932 if ((TREE_CODE (arg0) == ADDR_EXPR
8933 && indirect_base0
8934 && TREE_CODE (base0) == VAR_DECL
8935 && auto_var_in_fn_p (base0, current_function_decl)
8936 && !indirect_base1
8937 && TREE_CODE (base1) == SSA_NAME
8938 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8939 && SSA_NAME_IS_DEFAULT_DEF (base1))
8940 || (TREE_CODE (arg1) == ADDR_EXPR
8941 && indirect_base1
8942 && TREE_CODE (base1) == VAR_DECL
8943 && auto_var_in_fn_p (base1, current_function_decl)
8944 && !indirect_base0
8945 && TREE_CODE (base0) == SSA_NAME
8946 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8947 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8948 {
8949 if (code == NE_EXPR)
8950 return constant_boolean_node (1, type);
8951 else if (code == EQ_EXPR)
8952 return constant_boolean_node (0, type);
8953 }
8954 /* If we have equivalent bases we might be able to simplify. */
8955 else if (indirect_base0 == indirect_base1
8956 && operand_equal_p (base0, base1, 0))
8957 {
8958 /* We can fold this expression to a constant if the non-constant
8959 offset parts are equal. */
8960 if ((offset0 == offset1
8961 || (offset0 && offset1
8962 && operand_equal_p (offset0, offset1, 0)))
8963 && (code == EQ_EXPR
8964 || code == NE_EXPR
8965 || (indirect_base0 && DECL_P (base0))
8966 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8967
8968 {
8969 if (code != EQ_EXPR
8970 && code != NE_EXPR
8971 && bitpos0 != bitpos1
8972 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8973 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8974 fold_overflow_warning (("assuming pointer wraparound does not "
8975 "occur when comparing P +- C1 with "
8976 "P +- C2"),
8977 WARN_STRICT_OVERFLOW_CONDITIONAL);
8978
8979 switch (code)
8980 {
8981 case EQ_EXPR:
8982 return constant_boolean_node (bitpos0 == bitpos1, type);
8983 case NE_EXPR:
8984 return constant_boolean_node (bitpos0 != bitpos1, type);
8985 case LT_EXPR:
8986 return constant_boolean_node (bitpos0 < bitpos1, type);
8987 case LE_EXPR:
8988 return constant_boolean_node (bitpos0 <= bitpos1, type);
8989 case GE_EXPR:
8990 return constant_boolean_node (bitpos0 >= bitpos1, type);
8991 case GT_EXPR:
8992 return constant_boolean_node (bitpos0 > bitpos1, type);
8993 default:;
8994 }
8995 }
8996 /* We can simplify the comparison to a comparison of the variable
8997 offset parts if the constant offset parts are equal.
8998 Be careful to use signed size type here because otherwise we
8999 mess with array offsets in the wrong way. This is possible
9000 because pointer arithmetic is restricted to retain within an
9001 object and overflow on pointer differences is undefined as of
9002 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9003 else if (bitpos0 == bitpos1
9004 && ((code == EQ_EXPR || code == NE_EXPR)
9005 || (indirect_base0 && DECL_P (base0))
9006 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9007 {
9008 /* By converting to signed size type we cover middle-end pointer
9009 arithmetic which operates on unsigned pointer types of size
9010 type size and ARRAY_REF offsets which are properly sign or
9011 zero extended from their type in case it is narrower than
9012 size type. */
9013 if (offset0 == NULL_TREE)
9014 offset0 = build_int_cst (ssizetype, 0);
9015 else
9016 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9017 if (offset1 == NULL_TREE)
9018 offset1 = build_int_cst (ssizetype, 0);
9019 else
9020 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9021
9022 if (code != EQ_EXPR
9023 && code != NE_EXPR
9024 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9025 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9026 fold_overflow_warning (("assuming pointer wraparound does not "
9027 "occur when comparing P +- C1 with "
9028 "P +- C2"),
9029 WARN_STRICT_OVERFLOW_COMPARISON);
9030
9031 return fold_build2_loc (loc, code, type, offset0, offset1);
9032 }
9033 }
9034 /* For non-equal bases we can simplify if they are addresses
9035 of local binding decls or constants. */
9036 else if (indirect_base0 && indirect_base1
9037 /* We know that !operand_equal_p (base0, base1, 0)
9038 because the if condition was false. But make
9039 sure two decls are not the same. */
9040 && base0 != base1
9041 && TREE_CODE (arg0) == ADDR_EXPR
9042 && TREE_CODE (arg1) == ADDR_EXPR
9043 && (((TREE_CODE (base0) == VAR_DECL
9044 || TREE_CODE (base0) == PARM_DECL)
9045 && (targetm.binds_local_p (base0)
9046 || CONSTANT_CLASS_P (base1)))
9047 || CONSTANT_CLASS_P (base0))
9048 && (((TREE_CODE (base1) == VAR_DECL
9049 || TREE_CODE (base1) == PARM_DECL)
9050 && (targetm.binds_local_p (base1)
9051 || CONSTANT_CLASS_P (base0)))
9052 || CONSTANT_CLASS_P (base1)))
9053 {
9054 if (code == EQ_EXPR)
9055 return omit_two_operands_loc (loc, type, boolean_false_node,
9056 arg0, arg1);
9057 else if (code == NE_EXPR)
9058 return omit_two_operands_loc (loc, type, boolean_true_node,
9059 arg0, arg1);
9060 }
9061 /* For equal offsets we can simplify to a comparison of the
9062 base addresses. */
9063 else if (bitpos0 == bitpos1
9064 && (indirect_base0
9065 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9066 && (indirect_base1
9067 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9068 && ((offset0 == offset1)
9069 || (offset0 && offset1
9070 && operand_equal_p (offset0, offset1, 0))))
9071 {
9072 if (indirect_base0)
9073 base0 = build_fold_addr_expr_loc (loc, base0);
9074 if (indirect_base1)
9075 base1 = build_fold_addr_expr_loc (loc, base1);
9076 return fold_build2_loc (loc, code, type, base0, base1);
9077 }
9078 }
9079
9080 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9081 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9082 the resulting offset is smaller in absolute value than the
9083 original one. */
9084 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9085 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9086 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9087 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9088 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9089 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9090 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9091 {
9092 tree const1 = TREE_OPERAND (arg0, 1);
9093 tree const2 = TREE_OPERAND (arg1, 1);
9094 tree variable1 = TREE_OPERAND (arg0, 0);
9095 tree variable2 = TREE_OPERAND (arg1, 0);
9096 tree cst;
9097 const char * const warnmsg = G_("assuming signed overflow does not "
9098 "occur when combining constants around "
9099 "a comparison");
9100
9101 /* Put the constant on the side where it doesn't overflow and is
9102 of lower absolute value than before. */
9103 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9104 ? MINUS_EXPR : PLUS_EXPR,
9105 const2, const1);
9106 if (!TREE_OVERFLOW (cst)
9107 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9108 {
9109 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9110 return fold_build2_loc (loc, code, type,
9111 variable1,
9112 fold_build2_loc (loc,
9113 TREE_CODE (arg1), TREE_TYPE (arg1),
9114 variable2, cst));
9115 }
9116
9117 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9118 ? MINUS_EXPR : PLUS_EXPR,
9119 const1, const2);
9120 if (!TREE_OVERFLOW (cst)
9121 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9122 {
9123 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9124 return fold_build2_loc (loc, code, type,
9125 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9126 variable1, cst),
9127 variable2);
9128 }
9129 }
9130
9131 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9132 signed arithmetic case. That form is created by the compiler
9133 often enough for folding it to be of value. One example is in
9134 computing loop trip counts after Operator Strength Reduction. */
9135 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9136 && TREE_CODE (arg0) == MULT_EXPR
9137 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9138 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9139 && integer_zerop (arg1))
9140 {
9141 tree const1 = TREE_OPERAND (arg0, 1);
9142 tree const2 = arg1; /* zero */
9143 tree variable1 = TREE_OPERAND (arg0, 0);
9144 enum tree_code cmp_code = code;
9145
9146 /* Handle unfolded multiplication by zero. */
9147 if (integer_zerop (const1))
9148 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9149
9150 fold_overflow_warning (("assuming signed overflow does not occur when "
9151 "eliminating multiplication in comparison "
9152 "with zero"),
9153 WARN_STRICT_OVERFLOW_COMPARISON);
9154
9155 /* If const1 is negative we swap the sense of the comparison. */
9156 if (tree_int_cst_sgn (const1) < 0)
9157 cmp_code = swap_tree_comparison (cmp_code);
9158
9159 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9160 }
9161
9162 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9163 if (tem)
9164 return tem;
9165
9166 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9167 {
9168 tree targ0 = strip_float_extensions (arg0);
9169 tree targ1 = strip_float_extensions (arg1);
9170 tree newtype = TREE_TYPE (targ0);
9171
9172 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9173 newtype = TREE_TYPE (targ1);
9174
9175 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9176 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9177 return fold_build2_loc (loc, code, type,
9178 fold_convert_loc (loc, newtype, targ0),
9179 fold_convert_loc (loc, newtype, targ1));
9180
9181 /* (-a) CMP (-b) -> b CMP a */
9182 if (TREE_CODE (arg0) == NEGATE_EXPR
9183 && TREE_CODE (arg1) == NEGATE_EXPR)
9184 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9185 TREE_OPERAND (arg0, 0));
9186
9187 if (TREE_CODE (arg1) == REAL_CST)
9188 {
9189 REAL_VALUE_TYPE cst;
9190 cst = TREE_REAL_CST (arg1);
9191
9192 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9193 if (TREE_CODE (arg0) == NEGATE_EXPR)
9194 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9195 TREE_OPERAND (arg0, 0),
9196 build_real (TREE_TYPE (arg1),
9197 real_value_negate (&cst)));
9198
9199 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9200 /* a CMP (-0) -> a CMP 0 */
9201 if (REAL_VALUE_MINUS_ZERO (cst))
9202 return fold_build2_loc (loc, code, type, arg0,
9203 build_real (TREE_TYPE (arg1), dconst0));
9204
9205 /* x != NaN is always true, other ops are always false. */
9206 if (REAL_VALUE_ISNAN (cst)
9207 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9208 {
9209 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9210 return omit_one_operand_loc (loc, type, tem, arg0);
9211 }
9212
9213 /* Fold comparisons against infinity. */
9214 if (REAL_VALUE_ISINF (cst)
9215 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9216 {
9217 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9218 if (tem != NULL_TREE)
9219 return tem;
9220 }
9221 }
9222
9223 /* If this is a comparison of a real constant with a PLUS_EXPR
9224 or a MINUS_EXPR of a real constant, we can convert it into a
9225 comparison with a revised real constant as long as no overflow
9226 occurs when unsafe_math_optimizations are enabled. */
9227 if (flag_unsafe_math_optimizations
9228 && TREE_CODE (arg1) == REAL_CST
9229 && (TREE_CODE (arg0) == PLUS_EXPR
9230 || TREE_CODE (arg0) == MINUS_EXPR)
9231 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9232 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9233 ? MINUS_EXPR : PLUS_EXPR,
9234 arg1, TREE_OPERAND (arg0, 1)))
9235 && !TREE_OVERFLOW (tem))
9236 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9237
9238 /* Likewise, we can simplify a comparison of a real constant with
9239 a MINUS_EXPR whose first operand is also a real constant, i.e.
9240 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9241 floating-point types only if -fassociative-math is set. */
9242 if (flag_associative_math
9243 && TREE_CODE (arg1) == REAL_CST
9244 && TREE_CODE (arg0) == MINUS_EXPR
9245 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9246 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9247 arg1))
9248 && !TREE_OVERFLOW (tem))
9249 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9250 TREE_OPERAND (arg0, 1), tem);
9251
9252 /* Fold comparisons against built-in math functions. */
9253 if (TREE_CODE (arg1) == REAL_CST
9254 && flag_unsafe_math_optimizations
9255 && ! flag_errno_math)
9256 {
9257 enum built_in_function fcode = builtin_mathfn_code (arg0);
9258
9259 if (fcode != END_BUILTINS)
9260 {
9261 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9262 if (tem != NULL_TREE)
9263 return tem;
9264 }
9265 }
9266 }
9267
9268 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9269 && CONVERT_EXPR_P (arg0))
9270 {
9271 /* If we are widening one operand of an integer comparison,
9272 see if the other operand is similarly being widened. Perhaps we
9273 can do the comparison in the narrower type. */
9274 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9275 if (tem)
9276 return tem;
9277
9278 /* Or if we are changing signedness. */
9279 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9280 if (tem)
9281 return tem;
9282 }
9283
9284 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9285 constant, we can simplify it. */
9286 if (TREE_CODE (arg1) == INTEGER_CST
9287 && (TREE_CODE (arg0) == MIN_EXPR
9288 || TREE_CODE (arg0) == MAX_EXPR)
9289 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9290 {
9291 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9292 if (tem)
9293 return tem;
9294 }
9295
9296 /* Simplify comparison of something with itself. (For IEEE
9297 floating-point, we can only do some of these simplifications.) */
9298 if (operand_equal_p (arg0, arg1, 0))
9299 {
9300 switch (code)
9301 {
9302 case EQ_EXPR:
9303 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9304 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9305 return constant_boolean_node (1, type);
9306 break;
9307
9308 case GE_EXPR:
9309 case LE_EXPR:
9310 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9311 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9312 return constant_boolean_node (1, type);
9313 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9314
9315 case NE_EXPR:
9316 /* For NE, we can only do this simplification if integer
9317 or we don't honor IEEE floating point NaNs. */
9318 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9319 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9320 break;
9321 /* ... fall through ... */
9322 case GT_EXPR:
9323 case LT_EXPR:
9324 return constant_boolean_node (0, type);
9325 default:
9326 gcc_unreachable ();
9327 }
9328 }
9329
9330 /* If we are comparing an expression that just has comparisons
9331 of two integer values, arithmetic expressions of those comparisons,
9332 and constants, we can simplify it. There are only three cases
9333 to check: the two values can either be equal, the first can be
9334 greater, or the second can be greater. Fold the expression for
9335 those three values. Since each value must be 0 or 1, we have
9336 eight possibilities, each of which corresponds to the constant 0
9337 or 1 or one of the six possible comparisons.
9338
9339 This handles common cases like (a > b) == 0 but also handles
9340 expressions like ((x > y) - (y > x)) > 0, which supposedly
9341 occur in macroized code. */
9342
9343 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9344 {
9345 tree cval1 = 0, cval2 = 0;
9346 int save_p = 0;
9347
9348 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9349 /* Don't handle degenerate cases here; they should already
9350 have been handled anyway. */
9351 && cval1 != 0 && cval2 != 0
9352 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9353 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9354 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9355 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9356 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9357 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9358 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9359 {
9360 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9361 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9362
9363 /* We can't just pass T to eval_subst in case cval1 or cval2
9364 was the same as ARG1. */
9365
9366 tree high_result
9367 = fold_build2_loc (loc, code, type,
9368 eval_subst (loc, arg0, cval1, maxval,
9369 cval2, minval),
9370 arg1);
9371 tree equal_result
9372 = fold_build2_loc (loc, code, type,
9373 eval_subst (loc, arg0, cval1, maxval,
9374 cval2, maxval),
9375 arg1);
9376 tree low_result
9377 = fold_build2_loc (loc, code, type,
9378 eval_subst (loc, arg0, cval1, minval,
9379 cval2, maxval),
9380 arg1);
9381
9382 /* All three of these results should be 0 or 1. Confirm they are.
9383 Then use those values to select the proper code to use. */
9384
9385 if (TREE_CODE (high_result) == INTEGER_CST
9386 && TREE_CODE (equal_result) == INTEGER_CST
9387 && TREE_CODE (low_result) == INTEGER_CST)
9388 {
9389 /* Make a 3-bit mask with the high-order bit being the
9390 value for `>', the next for '=', and the low for '<'. */
9391 switch ((integer_onep (high_result) * 4)
9392 + (integer_onep (equal_result) * 2)
9393 + integer_onep (low_result))
9394 {
9395 case 0:
9396 /* Always false. */
9397 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9398 case 1:
9399 code = LT_EXPR;
9400 break;
9401 case 2:
9402 code = EQ_EXPR;
9403 break;
9404 case 3:
9405 code = LE_EXPR;
9406 break;
9407 case 4:
9408 code = GT_EXPR;
9409 break;
9410 case 5:
9411 code = NE_EXPR;
9412 break;
9413 case 6:
9414 code = GE_EXPR;
9415 break;
9416 case 7:
9417 /* Always true. */
9418 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9419 }
9420
9421 if (save_p)
9422 {
9423 tem = save_expr (build2 (code, type, cval1, cval2));
9424 SET_EXPR_LOCATION (tem, loc);
9425 return tem;
9426 }
9427 return fold_build2_loc (loc, code, type, cval1, cval2);
9428 }
9429 }
9430 }
9431
9432 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9433 into a single range test. */
9434 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9435 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9436 && TREE_CODE (arg1) == INTEGER_CST
9437 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9438 && !integer_zerop (TREE_OPERAND (arg0, 1))
9439 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9440 && !TREE_OVERFLOW (arg1))
9441 {
9442 tem = fold_div_compare (loc, code, type, arg0, arg1);
9443 if (tem != NULL_TREE)
9444 return tem;
9445 }
9446
9447 /* Fold ~X op ~Y as Y op X. */
9448 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9449 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9450 {
9451 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9452 return fold_build2_loc (loc, code, type,
9453 fold_convert_loc (loc, cmp_type,
9454 TREE_OPERAND (arg1, 0)),
9455 TREE_OPERAND (arg0, 0));
9456 }
9457
9458 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9459 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9460 && TREE_CODE (arg1) == INTEGER_CST)
9461 {
9462 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9463 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9464 TREE_OPERAND (arg0, 0),
9465 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9466 fold_convert_loc (loc, cmp_type, arg1)));
9467 }
9468
9469 return NULL_TREE;
9470 }
9471
9472
9473 /* Subroutine of fold_binary. Optimize complex multiplications of the
9474 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9475 argument EXPR represents the expression "z" of type TYPE. */
9476
9477 static tree
9478 fold_mult_zconjz (location_t loc, tree type, tree expr)
9479 {
9480 tree itype = TREE_TYPE (type);
9481 tree rpart, ipart, tem;
9482
9483 if (TREE_CODE (expr) == COMPLEX_EXPR)
9484 {
9485 rpart = TREE_OPERAND (expr, 0);
9486 ipart = TREE_OPERAND (expr, 1);
9487 }
9488 else if (TREE_CODE (expr) == COMPLEX_CST)
9489 {
9490 rpart = TREE_REALPART (expr);
9491 ipart = TREE_IMAGPART (expr);
9492 }
9493 else
9494 {
9495 expr = save_expr (expr);
9496 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9497 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9498 }
9499
9500 rpart = save_expr (rpart);
9501 ipart = save_expr (ipart);
9502 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9503 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9504 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9505 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9506 build_zero_cst (itype));
9507 }
9508
9509
9510 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9511 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9512 guarantees that P and N have the same least significant log2(M) bits.
9513 N is not otherwise constrained. In particular, N is not normalized to
9514 0 <= N < M as is common. In general, the precise value of P is unknown.
9515 M is chosen as large as possible such that constant N can be determined.
9516
9517 Returns M and sets *RESIDUE to N.
9518
9519 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9520 account. This is not always possible due to PR 35705.
9521 */
9522
9523 static unsigned HOST_WIDE_INT
9524 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9525 bool allow_func_align)
9526 {
9527 enum tree_code code;
9528
9529 *residue = 0;
9530
9531 code = TREE_CODE (expr);
9532 if (code == ADDR_EXPR)
9533 {
9534 unsigned int bitalign;
9535 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9536 *residue /= BITS_PER_UNIT;
9537 return bitalign / BITS_PER_UNIT;
9538 }
9539 else if (code == POINTER_PLUS_EXPR)
9540 {
9541 tree op0, op1;
9542 unsigned HOST_WIDE_INT modulus;
9543 enum tree_code inner_code;
9544
9545 op0 = TREE_OPERAND (expr, 0);
9546 STRIP_NOPS (op0);
9547 modulus = get_pointer_modulus_and_residue (op0, residue,
9548 allow_func_align);
9549
9550 op1 = TREE_OPERAND (expr, 1);
9551 STRIP_NOPS (op1);
9552 inner_code = TREE_CODE (op1);
9553 if (inner_code == INTEGER_CST)
9554 {
9555 *residue += TREE_INT_CST_LOW (op1);
9556 return modulus;
9557 }
9558 else if (inner_code == MULT_EXPR)
9559 {
9560 op1 = TREE_OPERAND (op1, 1);
9561 if (TREE_CODE (op1) == INTEGER_CST)
9562 {
9563 unsigned HOST_WIDE_INT align;
9564
9565 /* Compute the greatest power-of-2 divisor of op1. */
9566 align = TREE_INT_CST_LOW (op1);
9567 align &= -align;
9568
9569 /* If align is non-zero and less than *modulus, replace
9570 *modulus with align., If align is 0, then either op1 is 0
9571 or the greatest power-of-2 divisor of op1 doesn't fit in an
9572 unsigned HOST_WIDE_INT. In either case, no additional
9573 constraint is imposed. */
9574 if (align)
9575 modulus = MIN (modulus, align);
9576
9577 return modulus;
9578 }
9579 }
9580 }
9581
9582 /* If we get here, we were unable to determine anything useful about the
9583 expression. */
9584 return 1;
9585 }
9586
9587 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9588 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9589
9590 static bool
9591 vec_cst_ctor_to_array (tree arg, tree *elts)
9592 {
9593 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9594
9595 if (TREE_CODE (arg) == VECTOR_CST)
9596 {
9597 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9598 elts[i] = VECTOR_CST_ELT (arg, i);
9599 }
9600 else if (TREE_CODE (arg) == CONSTRUCTOR)
9601 {
9602 constructor_elt *elt;
9603
9604 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
9605 if (i >= nelts)
9606 return false;
9607 else
9608 elts[i] = elt->value;
9609 }
9610 else
9611 return false;
9612 for (; i < nelts; i++)
9613 elts[i]
9614 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9615 return true;
9616 }
9617
9618 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9619 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9620 NULL_TREE otherwise. */
9621
9622 static tree
9623 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9624 {
9625 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9626 tree *elts;
9627 bool need_ctor = false;
9628
9629 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9630 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9631 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9632 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9633 return NULL_TREE;
9634
9635 elts = XALLOCAVEC (tree, nelts * 3);
9636 if (!vec_cst_ctor_to_array (arg0, elts)
9637 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9638 return NULL_TREE;
9639
9640 for (i = 0; i < nelts; i++)
9641 {
9642 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9643 need_ctor = true;
9644 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9645 }
9646
9647 if (need_ctor)
9648 {
9649 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
9650 for (i = 0; i < nelts; i++)
9651 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9652 return build_constructor (type, v);
9653 }
9654 else
9655 return build_vector (type, &elts[2 * nelts]);
9656 }
9657
9658 /* Try to fold a pointer difference of type TYPE two address expressions of
9659 array references AREF0 and AREF1 using location LOC. Return a
9660 simplified expression for the difference or NULL_TREE. */
9661
9662 static tree
9663 fold_addr_of_array_ref_difference (location_t loc, tree type,
9664 tree aref0, tree aref1)
9665 {
9666 tree base0 = TREE_OPERAND (aref0, 0);
9667 tree base1 = TREE_OPERAND (aref1, 0);
9668 tree base_offset = build_int_cst (type, 0);
9669
9670 /* If the bases are array references as well, recurse. If the bases
9671 are pointer indirections compute the difference of the pointers.
9672 If the bases are equal, we are set. */
9673 if ((TREE_CODE (base0) == ARRAY_REF
9674 && TREE_CODE (base1) == ARRAY_REF
9675 && (base_offset
9676 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9677 || (INDIRECT_REF_P (base0)
9678 && INDIRECT_REF_P (base1)
9679 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9680 TREE_OPERAND (base0, 0),
9681 TREE_OPERAND (base1, 0))))
9682 || operand_equal_p (base0, base1, 0))
9683 {
9684 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9685 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9686 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9687 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9688 return fold_build2_loc (loc, PLUS_EXPR, type,
9689 base_offset,
9690 fold_build2_loc (loc, MULT_EXPR, type,
9691 diff, esz));
9692 }
9693 return NULL_TREE;
9694 }
9695
9696 /* If the real or vector real constant CST of type TYPE has an exact
9697 inverse, return it, else return NULL. */
9698
9699 static tree
9700 exact_inverse (tree type, tree cst)
9701 {
9702 REAL_VALUE_TYPE r;
9703 tree unit_type, *elts;
9704 enum machine_mode mode;
9705 unsigned vec_nelts, i;
9706
9707 switch (TREE_CODE (cst))
9708 {
9709 case REAL_CST:
9710 r = TREE_REAL_CST (cst);
9711
9712 if (exact_real_inverse (TYPE_MODE (type), &r))
9713 return build_real (type, r);
9714
9715 return NULL_TREE;
9716
9717 case VECTOR_CST:
9718 vec_nelts = VECTOR_CST_NELTS (cst);
9719 elts = XALLOCAVEC (tree, vec_nelts);
9720 unit_type = TREE_TYPE (type);
9721 mode = TYPE_MODE (unit_type);
9722
9723 for (i = 0; i < vec_nelts; i++)
9724 {
9725 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9726 if (!exact_real_inverse (mode, &r))
9727 return NULL_TREE;
9728 elts[i] = build_real (unit_type, r);
9729 }
9730
9731 return build_vector (type, elts);
9732
9733 default:
9734 return NULL_TREE;
9735 }
9736 }
9737
9738 /* Fold a binary expression of code CODE and type TYPE with operands
9739 OP0 and OP1. LOC is the location of the resulting expression.
9740 Return the folded expression if folding is successful. Otherwise,
9741 return NULL_TREE. */
9742
9743 tree
9744 fold_binary_loc (location_t loc,
9745 enum tree_code code, tree type, tree op0, tree op1)
9746 {
9747 enum tree_code_class kind = TREE_CODE_CLASS (code);
9748 tree arg0, arg1, tem;
9749 tree t1 = NULL_TREE;
9750 bool strict_overflow_p;
9751
9752 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9753 && TREE_CODE_LENGTH (code) == 2
9754 && op0 != NULL_TREE
9755 && op1 != NULL_TREE);
9756
9757 arg0 = op0;
9758 arg1 = op1;
9759
9760 /* Strip any conversions that don't change the mode. This is
9761 safe for every expression, except for a comparison expression
9762 because its signedness is derived from its operands. So, in
9763 the latter case, only strip conversions that don't change the
9764 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9765 preserved.
9766
9767 Note that this is done as an internal manipulation within the
9768 constant folder, in order to find the simplest representation
9769 of the arguments so that their form can be studied. In any
9770 cases, the appropriate type conversions should be put back in
9771 the tree that will get out of the constant folder. */
9772
9773 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9774 {
9775 STRIP_SIGN_NOPS (arg0);
9776 STRIP_SIGN_NOPS (arg1);
9777 }
9778 else
9779 {
9780 STRIP_NOPS (arg0);
9781 STRIP_NOPS (arg1);
9782 }
9783
9784 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9785 constant but we can't do arithmetic on them. */
9786 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9787 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9788 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9789 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9790 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9791 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9792 {
9793 if (kind == tcc_binary)
9794 {
9795 /* Make sure type and arg0 have the same saturating flag. */
9796 gcc_assert (TYPE_SATURATING (type)
9797 == TYPE_SATURATING (TREE_TYPE (arg0)));
9798 tem = const_binop (code, arg0, arg1);
9799 }
9800 else if (kind == tcc_comparison)
9801 tem = fold_relational_const (code, type, arg0, arg1);
9802 else
9803 tem = NULL_TREE;
9804
9805 if (tem != NULL_TREE)
9806 {
9807 if (TREE_TYPE (tem) != type)
9808 tem = fold_convert_loc (loc, type, tem);
9809 return tem;
9810 }
9811 }
9812
9813 /* If this is a commutative operation, and ARG0 is a constant, move it
9814 to ARG1 to reduce the number of tests below. */
9815 if (commutative_tree_code (code)
9816 && tree_swap_operands_p (arg0, arg1, true))
9817 return fold_build2_loc (loc, code, type, op1, op0);
9818
9819 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9820
9821 First check for cases where an arithmetic operation is applied to a
9822 compound, conditional, or comparison operation. Push the arithmetic
9823 operation inside the compound or conditional to see if any folding
9824 can then be done. Convert comparison to conditional for this purpose.
9825 The also optimizes non-constant cases that used to be done in
9826 expand_expr.
9827
9828 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9829 one of the operands is a comparison and the other is a comparison, a
9830 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9831 code below would make the expression more complex. Change it to a
9832 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9833 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9834
9835 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9836 || code == EQ_EXPR || code == NE_EXPR)
9837 && ((truth_value_p (TREE_CODE (arg0))
9838 && (truth_value_p (TREE_CODE (arg1))
9839 || (TREE_CODE (arg1) == BIT_AND_EXPR
9840 && integer_onep (TREE_OPERAND (arg1, 1)))))
9841 || (truth_value_p (TREE_CODE (arg1))
9842 && (truth_value_p (TREE_CODE (arg0))
9843 || (TREE_CODE (arg0) == BIT_AND_EXPR
9844 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9845 {
9846 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9847 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9848 : TRUTH_XOR_EXPR,
9849 boolean_type_node,
9850 fold_convert_loc (loc, boolean_type_node, arg0),
9851 fold_convert_loc (loc, boolean_type_node, arg1));
9852
9853 if (code == EQ_EXPR)
9854 tem = invert_truthvalue_loc (loc, tem);
9855
9856 return fold_convert_loc (loc, type, tem);
9857 }
9858
9859 if (TREE_CODE_CLASS (code) == tcc_binary
9860 || TREE_CODE_CLASS (code) == tcc_comparison)
9861 {
9862 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9863 {
9864 tem = fold_build2_loc (loc, code, type,
9865 fold_convert_loc (loc, TREE_TYPE (op0),
9866 TREE_OPERAND (arg0, 1)), op1);
9867 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9868 tem);
9869 }
9870 if (TREE_CODE (arg1) == COMPOUND_EXPR
9871 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9872 {
9873 tem = fold_build2_loc (loc, code, type, op0,
9874 fold_convert_loc (loc, TREE_TYPE (op1),
9875 TREE_OPERAND (arg1, 1)));
9876 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9877 tem);
9878 }
9879
9880 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9881 {
9882 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9883 arg0, arg1,
9884 /*cond_first_p=*/1);
9885 if (tem != NULL_TREE)
9886 return tem;
9887 }
9888
9889 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9890 {
9891 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9892 arg1, arg0,
9893 /*cond_first_p=*/0);
9894 if (tem != NULL_TREE)
9895 return tem;
9896 }
9897 }
9898
9899 switch (code)
9900 {
9901 case MEM_REF:
9902 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9903 if (TREE_CODE (arg0) == ADDR_EXPR
9904 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9905 {
9906 tree iref = TREE_OPERAND (arg0, 0);
9907 return fold_build2 (MEM_REF, type,
9908 TREE_OPERAND (iref, 0),
9909 int_const_binop (PLUS_EXPR, arg1,
9910 TREE_OPERAND (iref, 1)));
9911 }
9912
9913 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9914 if (TREE_CODE (arg0) == ADDR_EXPR
9915 && handled_component_p (TREE_OPERAND (arg0, 0)))
9916 {
9917 tree base;
9918 HOST_WIDE_INT coffset;
9919 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9920 &coffset);
9921 if (!base)
9922 return NULL_TREE;
9923 return fold_build2 (MEM_REF, type,
9924 build_fold_addr_expr (base),
9925 int_const_binop (PLUS_EXPR, arg1,
9926 size_int (coffset)));
9927 }
9928
9929 return NULL_TREE;
9930
9931 case POINTER_PLUS_EXPR:
9932 /* 0 +p index -> (type)index */
9933 if (integer_zerop (arg0))
9934 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9935
9936 /* PTR +p 0 -> PTR */
9937 if (integer_zerop (arg1))
9938 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9939
9940 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9941 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9942 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9943 return fold_convert_loc (loc, type,
9944 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9945 fold_convert_loc (loc, sizetype,
9946 arg1),
9947 fold_convert_loc (loc, sizetype,
9948 arg0)));
9949
9950 /* (PTR +p B) +p A -> PTR +p (B + A) */
9951 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9952 {
9953 tree inner;
9954 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9955 tree arg00 = TREE_OPERAND (arg0, 0);
9956 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9957 arg01, fold_convert_loc (loc, sizetype, arg1));
9958 return fold_convert_loc (loc, type,
9959 fold_build_pointer_plus_loc (loc,
9960 arg00, inner));
9961 }
9962
9963 /* PTR_CST +p CST -> CST1 */
9964 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9965 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9966 fold_convert_loc (loc, type, arg1));
9967
9968 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9969 of the array. Loop optimizer sometimes produce this type of
9970 expressions. */
9971 if (TREE_CODE (arg0) == ADDR_EXPR)
9972 {
9973 tem = try_move_mult_to_index (loc, arg0,
9974 fold_convert_loc (loc, sizetype, arg1));
9975 if (tem)
9976 return fold_convert_loc (loc, type, tem);
9977 }
9978
9979 return NULL_TREE;
9980
9981 case PLUS_EXPR:
9982 /* A + (-B) -> A - B */
9983 if (TREE_CODE (arg1) == NEGATE_EXPR)
9984 return fold_build2_loc (loc, MINUS_EXPR, type,
9985 fold_convert_loc (loc, type, arg0),
9986 fold_convert_loc (loc, type,
9987 TREE_OPERAND (arg1, 0)));
9988 /* (-A) + B -> B - A */
9989 if (TREE_CODE (arg0) == NEGATE_EXPR
9990 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9991 return fold_build2_loc (loc, MINUS_EXPR, type,
9992 fold_convert_loc (loc, type, arg1),
9993 fold_convert_loc (loc, type,
9994 TREE_OPERAND (arg0, 0)));
9995
9996 if (INTEGRAL_TYPE_P (type))
9997 {
9998 /* Convert ~A + 1 to -A. */
9999 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10000 && integer_onep (arg1))
10001 return fold_build1_loc (loc, NEGATE_EXPR, type,
10002 fold_convert_loc (loc, type,
10003 TREE_OPERAND (arg0, 0)));
10004
10005 /* ~X + X is -1. */
10006 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10007 && !TYPE_OVERFLOW_TRAPS (type))
10008 {
10009 tree tem = TREE_OPERAND (arg0, 0);
10010
10011 STRIP_NOPS (tem);
10012 if (operand_equal_p (tem, arg1, 0))
10013 {
10014 t1 = build_int_cst_type (type, -1);
10015 return omit_one_operand_loc (loc, type, t1, arg1);
10016 }
10017 }
10018
10019 /* X + ~X is -1. */
10020 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10021 && !TYPE_OVERFLOW_TRAPS (type))
10022 {
10023 tree tem = TREE_OPERAND (arg1, 0);
10024
10025 STRIP_NOPS (tem);
10026 if (operand_equal_p (arg0, tem, 0))
10027 {
10028 t1 = build_int_cst_type (type, -1);
10029 return omit_one_operand_loc (loc, type, t1, arg0);
10030 }
10031 }
10032
10033 /* X + (X / CST) * -CST is X % CST. */
10034 if (TREE_CODE (arg1) == MULT_EXPR
10035 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10036 && operand_equal_p (arg0,
10037 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10038 {
10039 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10040 tree cst1 = TREE_OPERAND (arg1, 1);
10041 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10042 cst1, cst0);
10043 if (sum && integer_zerop (sum))
10044 return fold_convert_loc (loc, type,
10045 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10046 TREE_TYPE (arg0), arg0,
10047 cst0));
10048 }
10049 }
10050
10051 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10052 same or one. Make sure type is not saturating.
10053 fold_plusminus_mult_expr will re-associate. */
10054 if ((TREE_CODE (arg0) == MULT_EXPR
10055 || TREE_CODE (arg1) == MULT_EXPR)
10056 && !TYPE_SATURATING (type)
10057 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10058 {
10059 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10060 if (tem)
10061 return tem;
10062 }
10063
10064 if (! FLOAT_TYPE_P (type))
10065 {
10066 if (integer_zerop (arg1))
10067 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10068
10069 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10070 with a constant, and the two constants have no bits in common,
10071 we should treat this as a BIT_IOR_EXPR since this may produce more
10072 simplifications. */
10073 if (TREE_CODE (arg0) == BIT_AND_EXPR
10074 && TREE_CODE (arg1) == BIT_AND_EXPR
10075 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10076 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10077 && integer_zerop (const_binop (BIT_AND_EXPR,
10078 TREE_OPERAND (arg0, 1),
10079 TREE_OPERAND (arg1, 1))))
10080 {
10081 code = BIT_IOR_EXPR;
10082 goto bit_ior;
10083 }
10084
10085 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10086 (plus (plus (mult) (mult)) (foo)) so that we can
10087 take advantage of the factoring cases below. */
10088 if (TYPE_OVERFLOW_WRAPS (type)
10089 && (((TREE_CODE (arg0) == PLUS_EXPR
10090 || TREE_CODE (arg0) == MINUS_EXPR)
10091 && TREE_CODE (arg1) == MULT_EXPR)
10092 || ((TREE_CODE (arg1) == PLUS_EXPR
10093 || TREE_CODE (arg1) == MINUS_EXPR)
10094 && TREE_CODE (arg0) == MULT_EXPR)))
10095 {
10096 tree parg0, parg1, parg, marg;
10097 enum tree_code pcode;
10098
10099 if (TREE_CODE (arg1) == MULT_EXPR)
10100 parg = arg0, marg = arg1;
10101 else
10102 parg = arg1, marg = arg0;
10103 pcode = TREE_CODE (parg);
10104 parg0 = TREE_OPERAND (parg, 0);
10105 parg1 = TREE_OPERAND (parg, 1);
10106 STRIP_NOPS (parg0);
10107 STRIP_NOPS (parg1);
10108
10109 if (TREE_CODE (parg0) == MULT_EXPR
10110 && TREE_CODE (parg1) != MULT_EXPR)
10111 return fold_build2_loc (loc, pcode, type,
10112 fold_build2_loc (loc, PLUS_EXPR, type,
10113 fold_convert_loc (loc, type,
10114 parg0),
10115 fold_convert_loc (loc, type,
10116 marg)),
10117 fold_convert_loc (loc, type, parg1));
10118 if (TREE_CODE (parg0) != MULT_EXPR
10119 && TREE_CODE (parg1) == MULT_EXPR)
10120 return
10121 fold_build2_loc (loc, PLUS_EXPR, type,
10122 fold_convert_loc (loc, type, parg0),
10123 fold_build2_loc (loc, pcode, type,
10124 fold_convert_loc (loc, type, marg),
10125 fold_convert_loc (loc, type,
10126 parg1)));
10127 }
10128 }
10129 else
10130 {
10131 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10132 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10133 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10134
10135 /* Likewise if the operands are reversed. */
10136 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10137 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10138
10139 /* Convert X + -C into X - C. */
10140 if (TREE_CODE (arg1) == REAL_CST
10141 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10142 {
10143 tem = fold_negate_const (arg1, type);
10144 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10145 return fold_build2_loc (loc, MINUS_EXPR, type,
10146 fold_convert_loc (loc, type, arg0),
10147 fold_convert_loc (loc, type, tem));
10148 }
10149
10150 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10151 to __complex__ ( x, y ). This is not the same for SNaNs or
10152 if signed zeros are involved. */
10153 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10154 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10155 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10156 {
10157 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10158 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10159 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10160 bool arg0rz = false, arg0iz = false;
10161 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10162 || (arg0i && (arg0iz = real_zerop (arg0i))))
10163 {
10164 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10165 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10166 if (arg0rz && arg1i && real_zerop (arg1i))
10167 {
10168 tree rp = arg1r ? arg1r
10169 : build1 (REALPART_EXPR, rtype, arg1);
10170 tree ip = arg0i ? arg0i
10171 : build1 (IMAGPART_EXPR, rtype, arg0);
10172 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10173 }
10174 else if (arg0iz && arg1r && real_zerop (arg1r))
10175 {
10176 tree rp = arg0r ? arg0r
10177 : build1 (REALPART_EXPR, rtype, arg0);
10178 tree ip = arg1i ? arg1i
10179 : build1 (IMAGPART_EXPR, rtype, arg1);
10180 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10181 }
10182 }
10183 }
10184
10185 if (flag_unsafe_math_optimizations
10186 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10187 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10188 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10189 return tem;
10190
10191 /* Convert x+x into x*2.0. */
10192 if (operand_equal_p (arg0, arg1, 0)
10193 && SCALAR_FLOAT_TYPE_P (type))
10194 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10195 build_real (type, dconst2));
10196
10197 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10198 We associate floats only if the user has specified
10199 -fassociative-math. */
10200 if (flag_associative_math
10201 && TREE_CODE (arg1) == PLUS_EXPR
10202 && TREE_CODE (arg0) != MULT_EXPR)
10203 {
10204 tree tree10 = TREE_OPERAND (arg1, 0);
10205 tree tree11 = TREE_OPERAND (arg1, 1);
10206 if (TREE_CODE (tree11) == MULT_EXPR
10207 && TREE_CODE (tree10) == MULT_EXPR)
10208 {
10209 tree tree0;
10210 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10211 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10212 }
10213 }
10214 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10215 We associate floats only if the user has specified
10216 -fassociative-math. */
10217 if (flag_associative_math
10218 && TREE_CODE (arg0) == PLUS_EXPR
10219 && TREE_CODE (arg1) != MULT_EXPR)
10220 {
10221 tree tree00 = TREE_OPERAND (arg0, 0);
10222 tree tree01 = TREE_OPERAND (arg0, 1);
10223 if (TREE_CODE (tree01) == MULT_EXPR
10224 && TREE_CODE (tree00) == MULT_EXPR)
10225 {
10226 tree tree0;
10227 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10228 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10229 }
10230 }
10231 }
10232
10233 bit_rotate:
10234 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10235 is a rotate of A by C1 bits. */
10236 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10237 is a rotate of A by B bits. */
10238 {
10239 enum tree_code code0, code1;
10240 tree rtype;
10241 code0 = TREE_CODE (arg0);
10242 code1 = TREE_CODE (arg1);
10243 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10244 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10245 && operand_equal_p (TREE_OPERAND (arg0, 0),
10246 TREE_OPERAND (arg1, 0), 0)
10247 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10248 TYPE_UNSIGNED (rtype))
10249 /* Only create rotates in complete modes. Other cases are not
10250 expanded properly. */
10251 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10252 {
10253 tree tree01, tree11;
10254 enum tree_code code01, code11;
10255
10256 tree01 = TREE_OPERAND (arg0, 1);
10257 tree11 = TREE_OPERAND (arg1, 1);
10258 STRIP_NOPS (tree01);
10259 STRIP_NOPS (tree11);
10260 code01 = TREE_CODE (tree01);
10261 code11 = TREE_CODE (tree11);
10262 if (code01 == INTEGER_CST
10263 && code11 == INTEGER_CST
10264 && TREE_INT_CST_HIGH (tree01) == 0
10265 && TREE_INT_CST_HIGH (tree11) == 0
10266 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10267 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10268 {
10269 tem = build2_loc (loc, LROTATE_EXPR,
10270 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10271 TREE_OPERAND (arg0, 0),
10272 code0 == LSHIFT_EXPR ? tree01 : tree11);
10273 return fold_convert_loc (loc, type, tem);
10274 }
10275 else if (code11 == MINUS_EXPR)
10276 {
10277 tree tree110, tree111;
10278 tree110 = TREE_OPERAND (tree11, 0);
10279 tree111 = TREE_OPERAND (tree11, 1);
10280 STRIP_NOPS (tree110);
10281 STRIP_NOPS (tree111);
10282 if (TREE_CODE (tree110) == INTEGER_CST
10283 && 0 == compare_tree_int (tree110,
10284 TYPE_PRECISION
10285 (TREE_TYPE (TREE_OPERAND
10286 (arg0, 0))))
10287 && operand_equal_p (tree01, tree111, 0))
10288 return
10289 fold_convert_loc (loc, type,
10290 build2 ((code0 == LSHIFT_EXPR
10291 ? LROTATE_EXPR
10292 : RROTATE_EXPR),
10293 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10294 TREE_OPERAND (arg0, 0), tree01));
10295 }
10296 else if (code01 == MINUS_EXPR)
10297 {
10298 tree tree010, tree011;
10299 tree010 = TREE_OPERAND (tree01, 0);
10300 tree011 = TREE_OPERAND (tree01, 1);
10301 STRIP_NOPS (tree010);
10302 STRIP_NOPS (tree011);
10303 if (TREE_CODE (tree010) == INTEGER_CST
10304 && 0 == compare_tree_int (tree010,
10305 TYPE_PRECISION
10306 (TREE_TYPE (TREE_OPERAND
10307 (arg0, 0))))
10308 && operand_equal_p (tree11, tree011, 0))
10309 return fold_convert_loc
10310 (loc, type,
10311 build2 ((code0 != LSHIFT_EXPR
10312 ? LROTATE_EXPR
10313 : RROTATE_EXPR),
10314 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10315 TREE_OPERAND (arg0, 0), tree11));
10316 }
10317 }
10318 }
10319
10320 associate:
10321 /* In most languages, can't associate operations on floats through
10322 parentheses. Rather than remember where the parentheses were, we
10323 don't associate floats at all, unless the user has specified
10324 -fassociative-math.
10325 And, we need to make sure type is not saturating. */
10326
10327 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10328 && !TYPE_SATURATING (type))
10329 {
10330 tree var0, con0, lit0, minus_lit0;
10331 tree var1, con1, lit1, minus_lit1;
10332 bool ok = true;
10333
10334 /* Split both trees into variables, constants, and literals. Then
10335 associate each group together, the constants with literals,
10336 then the result with variables. This increases the chances of
10337 literals being recombined later and of generating relocatable
10338 expressions for the sum of a constant and literal. */
10339 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10340 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10341 code == MINUS_EXPR);
10342
10343 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10344 if (code == MINUS_EXPR)
10345 code = PLUS_EXPR;
10346
10347 /* With undefined overflow we can only associate constants with one
10348 variable, and constants whose association doesn't overflow. */
10349 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10350 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10351 {
10352 if (var0 && var1)
10353 {
10354 tree tmp0 = var0;
10355 tree tmp1 = var1;
10356
10357 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10358 tmp0 = TREE_OPERAND (tmp0, 0);
10359 if (CONVERT_EXPR_P (tmp0)
10360 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10361 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10362 <= TYPE_PRECISION (type)))
10363 tmp0 = TREE_OPERAND (tmp0, 0);
10364 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10365 tmp1 = TREE_OPERAND (tmp1, 0);
10366 if (CONVERT_EXPR_P (tmp1)
10367 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10368 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10369 <= TYPE_PRECISION (type)))
10370 tmp1 = TREE_OPERAND (tmp1, 0);
10371 /* The only case we can still associate with two variables
10372 is if they are the same, modulo negation and bit-pattern
10373 preserving conversions. */
10374 if (!operand_equal_p (tmp0, tmp1, 0))
10375 ok = false;
10376 }
10377
10378 if (ok && lit0 && lit1)
10379 {
10380 tree tmp0 = fold_convert (type, lit0);
10381 tree tmp1 = fold_convert (type, lit1);
10382
10383 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10384 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10385 ok = false;
10386 }
10387 }
10388
10389 /* Only do something if we found more than two objects. Otherwise,
10390 nothing has changed and we risk infinite recursion. */
10391 if (ok
10392 && (2 < ((var0 != 0) + (var1 != 0)
10393 + (con0 != 0) + (con1 != 0)
10394 + (lit0 != 0) + (lit1 != 0)
10395 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10396 {
10397 var0 = associate_trees (loc, var0, var1, code, type);
10398 con0 = associate_trees (loc, con0, con1, code, type);
10399 lit0 = associate_trees (loc, lit0, lit1, code, type);
10400 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10401
10402 /* Preserve the MINUS_EXPR if the negative part of the literal is
10403 greater than the positive part. Otherwise, the multiplicative
10404 folding code (i.e extract_muldiv) may be fooled in case
10405 unsigned constants are subtracted, like in the following
10406 example: ((X*2 + 4) - 8U)/2. */
10407 if (minus_lit0 && lit0)
10408 {
10409 if (TREE_CODE (lit0) == INTEGER_CST
10410 && TREE_CODE (minus_lit0) == INTEGER_CST
10411 && tree_int_cst_lt (lit0, minus_lit0))
10412 {
10413 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10414 MINUS_EXPR, type);
10415 lit0 = 0;
10416 }
10417 else
10418 {
10419 lit0 = associate_trees (loc, lit0, minus_lit0,
10420 MINUS_EXPR, type);
10421 minus_lit0 = 0;
10422 }
10423 }
10424 if (minus_lit0)
10425 {
10426 if (con0 == 0)
10427 return
10428 fold_convert_loc (loc, type,
10429 associate_trees (loc, var0, minus_lit0,
10430 MINUS_EXPR, type));
10431 else
10432 {
10433 con0 = associate_trees (loc, con0, minus_lit0,
10434 MINUS_EXPR, type);
10435 return
10436 fold_convert_loc (loc, type,
10437 associate_trees (loc, var0, con0,
10438 PLUS_EXPR, type));
10439 }
10440 }
10441
10442 con0 = associate_trees (loc, con0, lit0, code, type);
10443 return
10444 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10445 code, type));
10446 }
10447 }
10448
10449 return NULL_TREE;
10450
10451 case MINUS_EXPR:
10452 /* Pointer simplifications for subtraction, simple reassociations. */
10453 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10454 {
10455 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10456 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10457 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10458 {
10459 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10460 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10461 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10462 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10463 return fold_build2_loc (loc, PLUS_EXPR, type,
10464 fold_build2_loc (loc, MINUS_EXPR, type,
10465 arg00, arg10),
10466 fold_build2_loc (loc, MINUS_EXPR, type,
10467 arg01, arg11));
10468 }
10469 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10470 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10471 {
10472 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10473 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10474 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10475 fold_convert_loc (loc, type, arg1));
10476 if (tmp)
10477 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10478 }
10479 }
10480 /* A - (-B) -> A + B */
10481 if (TREE_CODE (arg1) == NEGATE_EXPR)
10482 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10483 fold_convert_loc (loc, type,
10484 TREE_OPERAND (arg1, 0)));
10485 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10486 if (TREE_CODE (arg0) == NEGATE_EXPR
10487 && (FLOAT_TYPE_P (type)
10488 || INTEGRAL_TYPE_P (type))
10489 && negate_expr_p (arg1)
10490 && reorder_operands_p (arg0, arg1))
10491 return fold_build2_loc (loc, MINUS_EXPR, type,
10492 fold_convert_loc (loc, type,
10493 negate_expr (arg1)),
10494 fold_convert_loc (loc, type,
10495 TREE_OPERAND (arg0, 0)));
10496 /* Convert -A - 1 to ~A. */
10497 if (INTEGRAL_TYPE_P (type)
10498 && TREE_CODE (arg0) == NEGATE_EXPR
10499 && integer_onep (arg1)
10500 && !TYPE_OVERFLOW_TRAPS (type))
10501 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10502 fold_convert_loc (loc, type,
10503 TREE_OPERAND (arg0, 0)));
10504
10505 /* Convert -1 - A to ~A. */
10506 if (INTEGRAL_TYPE_P (type)
10507 && integer_all_onesp (arg0))
10508 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10509
10510
10511 /* X - (X / CST) * CST is X % CST. */
10512 if (INTEGRAL_TYPE_P (type)
10513 && TREE_CODE (arg1) == MULT_EXPR
10514 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10515 && operand_equal_p (arg0,
10516 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10517 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10518 TREE_OPERAND (arg1, 1), 0))
10519 return
10520 fold_convert_loc (loc, type,
10521 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10522 arg0, TREE_OPERAND (arg1, 1)));
10523
10524 if (! FLOAT_TYPE_P (type))
10525 {
10526 if (integer_zerop (arg0))
10527 return negate_expr (fold_convert_loc (loc, type, arg1));
10528 if (integer_zerop (arg1))
10529 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10530
10531 /* Fold A - (A & B) into ~B & A. */
10532 if (!TREE_SIDE_EFFECTS (arg0)
10533 && TREE_CODE (arg1) == BIT_AND_EXPR)
10534 {
10535 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10536 {
10537 tree arg10 = fold_convert_loc (loc, type,
10538 TREE_OPERAND (arg1, 0));
10539 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10540 fold_build1_loc (loc, BIT_NOT_EXPR,
10541 type, arg10),
10542 fold_convert_loc (loc, type, arg0));
10543 }
10544 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10545 {
10546 tree arg11 = fold_convert_loc (loc,
10547 type, TREE_OPERAND (arg1, 1));
10548 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10549 fold_build1_loc (loc, BIT_NOT_EXPR,
10550 type, arg11),
10551 fold_convert_loc (loc, type, arg0));
10552 }
10553 }
10554
10555 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10556 any power of 2 minus 1. */
10557 if (TREE_CODE (arg0) == BIT_AND_EXPR
10558 && TREE_CODE (arg1) == BIT_AND_EXPR
10559 && operand_equal_p (TREE_OPERAND (arg0, 0),
10560 TREE_OPERAND (arg1, 0), 0))
10561 {
10562 tree mask0 = TREE_OPERAND (arg0, 1);
10563 tree mask1 = TREE_OPERAND (arg1, 1);
10564 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10565
10566 if (operand_equal_p (tem, mask1, 0))
10567 {
10568 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10569 TREE_OPERAND (arg0, 0), mask1);
10570 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10571 }
10572 }
10573 }
10574
10575 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10576 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10577 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10578
10579 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10580 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10581 (-ARG1 + ARG0) reduces to -ARG1. */
10582 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10583 return negate_expr (fold_convert_loc (loc, type, arg1));
10584
10585 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10586 __complex__ ( x, -y ). This is not the same for SNaNs or if
10587 signed zeros are involved. */
10588 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10589 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10590 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10591 {
10592 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10593 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10594 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10595 bool arg0rz = false, arg0iz = false;
10596 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10597 || (arg0i && (arg0iz = real_zerop (arg0i))))
10598 {
10599 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10600 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10601 if (arg0rz && arg1i && real_zerop (arg1i))
10602 {
10603 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10604 arg1r ? arg1r
10605 : build1 (REALPART_EXPR, rtype, arg1));
10606 tree ip = arg0i ? arg0i
10607 : build1 (IMAGPART_EXPR, rtype, arg0);
10608 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10609 }
10610 else if (arg0iz && arg1r && real_zerop (arg1r))
10611 {
10612 tree rp = arg0r ? arg0r
10613 : build1 (REALPART_EXPR, rtype, arg0);
10614 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10615 arg1i ? arg1i
10616 : build1 (IMAGPART_EXPR, rtype, arg1));
10617 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10618 }
10619 }
10620 }
10621
10622 /* Fold &x - &x. This can happen from &x.foo - &x.
10623 This is unsafe for certain floats even in non-IEEE formats.
10624 In IEEE, it is unsafe because it does wrong for NaNs.
10625 Also note that operand_equal_p is always false if an operand
10626 is volatile. */
10627
10628 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10629 && operand_equal_p (arg0, arg1, 0))
10630 return build_zero_cst (type);
10631
10632 /* A - B -> A + (-B) if B is easily negatable. */
10633 if (negate_expr_p (arg1)
10634 && ((FLOAT_TYPE_P (type)
10635 /* Avoid this transformation if B is a positive REAL_CST. */
10636 && (TREE_CODE (arg1) != REAL_CST
10637 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10638 || INTEGRAL_TYPE_P (type)))
10639 return fold_build2_loc (loc, PLUS_EXPR, type,
10640 fold_convert_loc (loc, type, arg0),
10641 fold_convert_loc (loc, type,
10642 negate_expr (arg1)));
10643
10644 /* Try folding difference of addresses. */
10645 {
10646 HOST_WIDE_INT diff;
10647
10648 if ((TREE_CODE (arg0) == ADDR_EXPR
10649 || TREE_CODE (arg1) == ADDR_EXPR)
10650 && ptr_difference_const (arg0, arg1, &diff))
10651 return build_int_cst_type (type, diff);
10652 }
10653
10654 /* Fold &a[i] - &a[j] to i-j. */
10655 if (TREE_CODE (arg0) == ADDR_EXPR
10656 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10657 && TREE_CODE (arg1) == ADDR_EXPR
10658 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10659 {
10660 tree tem = fold_addr_of_array_ref_difference (loc, type,
10661 TREE_OPERAND (arg0, 0),
10662 TREE_OPERAND (arg1, 0));
10663 if (tem)
10664 return tem;
10665 }
10666
10667 if (FLOAT_TYPE_P (type)
10668 && flag_unsafe_math_optimizations
10669 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10670 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10671 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10672 return tem;
10673
10674 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10675 same or one. Make sure type is not saturating.
10676 fold_plusminus_mult_expr will re-associate. */
10677 if ((TREE_CODE (arg0) == MULT_EXPR
10678 || TREE_CODE (arg1) == MULT_EXPR)
10679 && !TYPE_SATURATING (type)
10680 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10681 {
10682 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10683 if (tem)
10684 return tem;
10685 }
10686
10687 goto associate;
10688
10689 case MULT_EXPR:
10690 /* (-A) * (-B) -> A * B */
10691 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10692 return fold_build2_loc (loc, MULT_EXPR, type,
10693 fold_convert_loc (loc, type,
10694 TREE_OPERAND (arg0, 0)),
10695 fold_convert_loc (loc, type,
10696 negate_expr (arg1)));
10697 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10698 return fold_build2_loc (loc, MULT_EXPR, type,
10699 fold_convert_loc (loc, type,
10700 negate_expr (arg0)),
10701 fold_convert_loc (loc, type,
10702 TREE_OPERAND (arg1, 0)));
10703
10704 if (! FLOAT_TYPE_P (type))
10705 {
10706 if (integer_zerop (arg1))
10707 return omit_one_operand_loc (loc, type, arg1, arg0);
10708 if (integer_onep (arg1))
10709 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10710 /* Transform x * -1 into -x. Make sure to do the negation
10711 on the original operand with conversions not stripped
10712 because we can only strip non-sign-changing conversions. */
10713 if (integer_all_onesp (arg1))
10714 return fold_convert_loc (loc, type, negate_expr (op0));
10715 /* Transform x * -C into -x * C if x is easily negatable. */
10716 if (TREE_CODE (arg1) == INTEGER_CST
10717 && tree_int_cst_sgn (arg1) == -1
10718 && negate_expr_p (arg0)
10719 && (tem = negate_expr (arg1)) != arg1
10720 && !TREE_OVERFLOW (tem))
10721 return fold_build2_loc (loc, MULT_EXPR, type,
10722 fold_convert_loc (loc, type,
10723 negate_expr (arg0)),
10724 tem);
10725
10726 /* (a * (1 << b)) is (a << b) */
10727 if (TREE_CODE (arg1) == LSHIFT_EXPR
10728 && integer_onep (TREE_OPERAND (arg1, 0)))
10729 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10730 TREE_OPERAND (arg1, 1));
10731 if (TREE_CODE (arg0) == LSHIFT_EXPR
10732 && integer_onep (TREE_OPERAND (arg0, 0)))
10733 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10734 TREE_OPERAND (arg0, 1));
10735
10736 /* (A + A) * C -> A * 2 * C */
10737 if (TREE_CODE (arg0) == PLUS_EXPR
10738 && TREE_CODE (arg1) == INTEGER_CST
10739 && operand_equal_p (TREE_OPERAND (arg0, 0),
10740 TREE_OPERAND (arg0, 1), 0))
10741 return fold_build2_loc (loc, MULT_EXPR, type,
10742 omit_one_operand_loc (loc, type,
10743 TREE_OPERAND (arg0, 0),
10744 TREE_OPERAND (arg0, 1)),
10745 fold_build2_loc (loc, MULT_EXPR, type,
10746 build_int_cst (type, 2) , arg1));
10747
10748 strict_overflow_p = false;
10749 if (TREE_CODE (arg1) == INTEGER_CST
10750 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10751 &strict_overflow_p)))
10752 {
10753 if (strict_overflow_p)
10754 fold_overflow_warning (("assuming signed overflow does not "
10755 "occur when simplifying "
10756 "multiplication"),
10757 WARN_STRICT_OVERFLOW_MISC);
10758 return fold_convert_loc (loc, type, tem);
10759 }
10760
10761 /* Optimize z * conj(z) for integer complex numbers. */
10762 if (TREE_CODE (arg0) == CONJ_EXPR
10763 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10764 return fold_mult_zconjz (loc, type, arg1);
10765 if (TREE_CODE (arg1) == CONJ_EXPR
10766 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10767 return fold_mult_zconjz (loc, type, arg0);
10768 }
10769 else
10770 {
10771 /* Maybe fold x * 0 to 0. The expressions aren't the same
10772 when x is NaN, since x * 0 is also NaN. Nor are they the
10773 same in modes with signed zeros, since multiplying a
10774 negative value by 0 gives -0, not +0. */
10775 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10776 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10777 && real_zerop (arg1))
10778 return omit_one_operand_loc (loc, type, arg1, arg0);
10779 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10780 Likewise for complex arithmetic with signed zeros. */
10781 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10782 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10783 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10784 && real_onep (arg1))
10785 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10786
10787 /* Transform x * -1.0 into -x. */
10788 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10789 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10790 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10791 && real_minus_onep (arg1))
10792 return fold_convert_loc (loc, type, negate_expr (arg0));
10793
10794 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10795 the result for floating point types due to rounding so it is applied
10796 only if -fassociative-math was specify. */
10797 if (flag_associative_math
10798 && TREE_CODE (arg0) == RDIV_EXPR
10799 && TREE_CODE (arg1) == REAL_CST
10800 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10801 {
10802 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10803 arg1);
10804 if (tem)
10805 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10806 TREE_OPERAND (arg0, 1));
10807 }
10808
10809 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10810 if (operand_equal_p (arg0, arg1, 0))
10811 {
10812 tree tem = fold_strip_sign_ops (arg0);
10813 if (tem != NULL_TREE)
10814 {
10815 tem = fold_convert_loc (loc, type, tem);
10816 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10817 }
10818 }
10819
10820 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10821 This is not the same for NaNs or if signed zeros are
10822 involved. */
10823 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10824 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10825 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10826 && TREE_CODE (arg1) == COMPLEX_CST
10827 && real_zerop (TREE_REALPART (arg1)))
10828 {
10829 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10830 if (real_onep (TREE_IMAGPART (arg1)))
10831 return
10832 fold_build2_loc (loc, COMPLEX_EXPR, type,
10833 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10834 rtype, arg0)),
10835 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10836 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10837 return
10838 fold_build2_loc (loc, COMPLEX_EXPR, type,
10839 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10840 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10841 rtype, arg0)));
10842 }
10843
10844 /* Optimize z * conj(z) for floating point complex numbers.
10845 Guarded by flag_unsafe_math_optimizations as non-finite
10846 imaginary components don't produce scalar results. */
10847 if (flag_unsafe_math_optimizations
10848 && TREE_CODE (arg0) == CONJ_EXPR
10849 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10850 return fold_mult_zconjz (loc, type, arg1);
10851 if (flag_unsafe_math_optimizations
10852 && TREE_CODE (arg1) == CONJ_EXPR
10853 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10854 return fold_mult_zconjz (loc, type, arg0);
10855
10856 if (flag_unsafe_math_optimizations)
10857 {
10858 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10859 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10860
10861 /* Optimizations of root(...)*root(...). */
10862 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10863 {
10864 tree rootfn, arg;
10865 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10866 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10867
10868 /* Optimize sqrt(x)*sqrt(x) as x. */
10869 if (BUILTIN_SQRT_P (fcode0)
10870 && operand_equal_p (arg00, arg10, 0)
10871 && ! HONOR_SNANS (TYPE_MODE (type)))
10872 return arg00;
10873
10874 /* Optimize root(x)*root(y) as root(x*y). */
10875 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10876 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10877 return build_call_expr_loc (loc, rootfn, 1, arg);
10878 }
10879
10880 /* Optimize expN(x)*expN(y) as expN(x+y). */
10881 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10882 {
10883 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10884 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10885 CALL_EXPR_ARG (arg0, 0),
10886 CALL_EXPR_ARG (arg1, 0));
10887 return build_call_expr_loc (loc, expfn, 1, arg);
10888 }
10889
10890 /* Optimizations of pow(...)*pow(...). */
10891 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10892 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10893 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10894 {
10895 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10896 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10897 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10898 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10899
10900 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10901 if (operand_equal_p (arg01, arg11, 0))
10902 {
10903 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10904 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10905 arg00, arg10);
10906 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10907 }
10908
10909 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10910 if (operand_equal_p (arg00, arg10, 0))
10911 {
10912 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10913 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10914 arg01, arg11);
10915 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10916 }
10917 }
10918
10919 /* Optimize tan(x)*cos(x) as sin(x). */
10920 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10921 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10922 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10923 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10924 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10925 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10926 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10927 CALL_EXPR_ARG (arg1, 0), 0))
10928 {
10929 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10930
10931 if (sinfn != NULL_TREE)
10932 return build_call_expr_loc (loc, sinfn, 1,
10933 CALL_EXPR_ARG (arg0, 0));
10934 }
10935
10936 /* Optimize x*pow(x,c) as pow(x,c+1). */
10937 if (fcode1 == BUILT_IN_POW
10938 || fcode1 == BUILT_IN_POWF
10939 || fcode1 == BUILT_IN_POWL)
10940 {
10941 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10942 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10943 if (TREE_CODE (arg11) == REAL_CST
10944 && !TREE_OVERFLOW (arg11)
10945 && operand_equal_p (arg0, arg10, 0))
10946 {
10947 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10948 REAL_VALUE_TYPE c;
10949 tree arg;
10950
10951 c = TREE_REAL_CST (arg11);
10952 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10953 arg = build_real (type, c);
10954 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10955 }
10956 }
10957
10958 /* Optimize pow(x,c)*x as pow(x,c+1). */
10959 if (fcode0 == BUILT_IN_POW
10960 || fcode0 == BUILT_IN_POWF
10961 || fcode0 == BUILT_IN_POWL)
10962 {
10963 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10964 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10965 if (TREE_CODE (arg01) == REAL_CST
10966 && !TREE_OVERFLOW (arg01)
10967 && operand_equal_p (arg1, arg00, 0))
10968 {
10969 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10970 REAL_VALUE_TYPE c;
10971 tree arg;
10972
10973 c = TREE_REAL_CST (arg01);
10974 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10975 arg = build_real (type, c);
10976 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10977 }
10978 }
10979
10980 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10981 if (!in_gimple_form
10982 && optimize
10983 && operand_equal_p (arg0, arg1, 0))
10984 {
10985 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10986
10987 if (powfn)
10988 {
10989 tree arg = build_real (type, dconst2);
10990 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10991 }
10992 }
10993 }
10994 }
10995 goto associate;
10996
10997 case BIT_IOR_EXPR:
10998 bit_ior:
10999 if (integer_all_onesp (arg1))
11000 return omit_one_operand_loc (loc, type, arg1, arg0);
11001 if (integer_zerop (arg1))
11002 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11003 if (operand_equal_p (arg0, arg1, 0))
11004 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11005
11006 /* ~X | X is -1. */
11007 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11008 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11009 {
11010 t1 = build_zero_cst (type);
11011 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11012 return omit_one_operand_loc (loc, type, t1, arg1);
11013 }
11014
11015 /* X | ~X is -1. */
11016 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11017 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11018 {
11019 t1 = build_zero_cst (type);
11020 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11021 return omit_one_operand_loc (loc, type, t1, arg0);
11022 }
11023
11024 /* Canonicalize (X & C1) | C2. */
11025 if (TREE_CODE (arg0) == BIT_AND_EXPR
11026 && TREE_CODE (arg1) == INTEGER_CST
11027 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11028 {
11029 double_int c1, c2, c3, msk;
11030 int width = TYPE_PRECISION (type), w;
11031 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11032 c2 = tree_to_double_int (arg1);
11033
11034 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11035 if (double_int_equal_p (double_int_and (c1, c2), c1))
11036 return omit_one_operand_loc (loc, type, arg1,
11037 TREE_OPERAND (arg0, 0));
11038
11039 msk = double_int_mask (width);
11040
11041 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11042 if (double_int_zero_p (double_int_and_not (msk,
11043 double_int_ior (c1, c2))))
11044 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11045 TREE_OPERAND (arg0, 0), arg1);
11046
11047 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11048 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11049 mode which allows further optimizations. */
11050 c1 = double_int_and (c1, msk);
11051 c2 = double_int_and (c2, msk);
11052 c3 = double_int_and_not (c1, c2);
11053 for (w = BITS_PER_UNIT;
11054 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11055 w <<= 1)
11056 {
11057 unsigned HOST_WIDE_INT mask
11058 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11059 if (((c1.low | c2.low) & mask) == mask
11060 && (c1.low & ~mask) == 0 && c1.high == 0)
11061 {
11062 c3 = uhwi_to_double_int (mask);
11063 break;
11064 }
11065 }
11066 if (!double_int_equal_p (c3, c1))
11067 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11068 fold_build2_loc (loc, BIT_AND_EXPR, type,
11069 TREE_OPERAND (arg0, 0),
11070 double_int_to_tree (type,
11071 c3)),
11072 arg1);
11073 }
11074
11075 /* (X & Y) | Y is (X, Y). */
11076 if (TREE_CODE (arg0) == BIT_AND_EXPR
11077 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11078 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11079 /* (X & Y) | X is (Y, X). */
11080 if (TREE_CODE (arg0) == BIT_AND_EXPR
11081 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11082 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11083 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11084 /* X | (X & Y) is (Y, X). */
11085 if (TREE_CODE (arg1) == BIT_AND_EXPR
11086 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11087 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11088 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11089 /* X | (Y & X) is (Y, X). */
11090 if (TREE_CODE (arg1) == BIT_AND_EXPR
11091 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11092 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11093 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11094
11095 /* (X & ~Y) | (~X & Y) is X ^ Y */
11096 if (TREE_CODE (arg0) == BIT_AND_EXPR
11097 && TREE_CODE (arg1) == BIT_AND_EXPR)
11098 {
11099 tree a0, a1, l0, l1, n0, n1;
11100
11101 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11102 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11103
11104 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11105 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11106
11107 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11108 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11109
11110 if ((operand_equal_p (n0, a0, 0)
11111 && operand_equal_p (n1, a1, 0))
11112 || (operand_equal_p (n0, a1, 0)
11113 && operand_equal_p (n1, a0, 0)))
11114 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11115 }
11116
11117 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11118 if (t1 != NULL_TREE)
11119 return t1;
11120
11121 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11122
11123 This results in more efficient code for machines without a NAND
11124 instruction. Combine will canonicalize to the first form
11125 which will allow use of NAND instructions provided by the
11126 backend if they exist. */
11127 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11128 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11129 {
11130 return
11131 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11132 build2 (BIT_AND_EXPR, type,
11133 fold_convert_loc (loc, type,
11134 TREE_OPERAND (arg0, 0)),
11135 fold_convert_loc (loc, type,
11136 TREE_OPERAND (arg1, 0))));
11137 }
11138
11139 /* See if this can be simplified into a rotate first. If that
11140 is unsuccessful continue in the association code. */
11141 goto bit_rotate;
11142
11143 case BIT_XOR_EXPR:
11144 if (integer_zerop (arg1))
11145 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11146 if (integer_all_onesp (arg1))
11147 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11148 if (operand_equal_p (arg0, arg1, 0))
11149 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11150
11151 /* ~X ^ X is -1. */
11152 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11153 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11154 {
11155 t1 = build_zero_cst (type);
11156 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11157 return omit_one_operand_loc (loc, type, t1, arg1);
11158 }
11159
11160 /* X ^ ~X is -1. */
11161 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11162 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11163 {
11164 t1 = build_zero_cst (type);
11165 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11166 return omit_one_operand_loc (loc, type, t1, arg0);
11167 }
11168
11169 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11170 with a constant, and the two constants have no bits in common,
11171 we should treat this as a BIT_IOR_EXPR since this may produce more
11172 simplifications. */
11173 if (TREE_CODE (arg0) == BIT_AND_EXPR
11174 && TREE_CODE (arg1) == BIT_AND_EXPR
11175 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11176 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11177 && integer_zerop (const_binop (BIT_AND_EXPR,
11178 TREE_OPERAND (arg0, 1),
11179 TREE_OPERAND (arg1, 1))))
11180 {
11181 code = BIT_IOR_EXPR;
11182 goto bit_ior;
11183 }
11184
11185 /* (X | Y) ^ X -> Y & ~ X*/
11186 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11187 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11188 {
11189 tree t2 = TREE_OPERAND (arg0, 1);
11190 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11191 arg1);
11192 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11193 fold_convert_loc (loc, type, t2),
11194 fold_convert_loc (loc, type, t1));
11195 return t1;
11196 }
11197
11198 /* (Y | X) ^ X -> Y & ~ X*/
11199 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11200 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11201 {
11202 tree t2 = TREE_OPERAND (arg0, 0);
11203 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11204 arg1);
11205 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11206 fold_convert_loc (loc, type, t2),
11207 fold_convert_loc (loc, type, t1));
11208 return t1;
11209 }
11210
11211 /* X ^ (X | Y) -> Y & ~ X*/
11212 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11213 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11214 {
11215 tree t2 = TREE_OPERAND (arg1, 1);
11216 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11217 arg0);
11218 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11219 fold_convert_loc (loc, type, t2),
11220 fold_convert_loc (loc, type, t1));
11221 return t1;
11222 }
11223
11224 /* X ^ (Y | X) -> Y & ~ X*/
11225 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11226 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11227 {
11228 tree t2 = TREE_OPERAND (arg1, 0);
11229 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11230 arg0);
11231 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11232 fold_convert_loc (loc, type, t2),
11233 fold_convert_loc (loc, type, t1));
11234 return t1;
11235 }
11236
11237 /* Convert ~X ^ ~Y to X ^ Y. */
11238 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11239 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11240 return fold_build2_loc (loc, code, type,
11241 fold_convert_loc (loc, type,
11242 TREE_OPERAND (arg0, 0)),
11243 fold_convert_loc (loc, type,
11244 TREE_OPERAND (arg1, 0)));
11245
11246 /* Convert ~X ^ C to X ^ ~C. */
11247 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11248 && TREE_CODE (arg1) == INTEGER_CST)
11249 return fold_build2_loc (loc, code, type,
11250 fold_convert_loc (loc, type,
11251 TREE_OPERAND (arg0, 0)),
11252 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11253
11254 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11255 if (TREE_CODE (arg0) == BIT_AND_EXPR
11256 && integer_onep (TREE_OPERAND (arg0, 1))
11257 && integer_onep (arg1))
11258 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11259 build_int_cst (TREE_TYPE (arg0), 0));
11260
11261 /* Fold (X & Y) ^ Y as ~X & Y. */
11262 if (TREE_CODE (arg0) == BIT_AND_EXPR
11263 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11264 {
11265 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11266 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11267 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11268 fold_convert_loc (loc, type, arg1));
11269 }
11270 /* Fold (X & Y) ^ X as ~Y & X. */
11271 if (TREE_CODE (arg0) == BIT_AND_EXPR
11272 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11273 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11274 {
11275 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11276 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11277 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11278 fold_convert_loc (loc, type, arg1));
11279 }
11280 /* Fold X ^ (X & Y) as X & ~Y. */
11281 if (TREE_CODE (arg1) == BIT_AND_EXPR
11282 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11283 {
11284 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11285 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11286 fold_convert_loc (loc, type, arg0),
11287 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11288 }
11289 /* Fold X ^ (Y & X) as ~Y & X. */
11290 if (TREE_CODE (arg1) == BIT_AND_EXPR
11291 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11292 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11293 {
11294 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11295 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11296 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11297 fold_convert_loc (loc, type, arg0));
11298 }
11299
11300 /* See if this can be simplified into a rotate first. If that
11301 is unsuccessful continue in the association code. */
11302 goto bit_rotate;
11303
11304 case BIT_AND_EXPR:
11305 if (integer_all_onesp (arg1))
11306 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11307 if (integer_zerop (arg1))
11308 return omit_one_operand_loc (loc, type, arg1, arg0);
11309 if (operand_equal_p (arg0, arg1, 0))
11310 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11311
11312 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11313 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11314 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11315 || (TREE_CODE (arg0) == EQ_EXPR
11316 && integer_zerop (TREE_OPERAND (arg0, 1))))
11317 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11318 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11319
11320 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11321 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11322 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11323 || (TREE_CODE (arg1) == EQ_EXPR
11324 && integer_zerop (TREE_OPERAND (arg1, 1))))
11325 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11326 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11327
11328 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11329 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11330 && TREE_CODE (arg1) == INTEGER_CST
11331 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11332 {
11333 tree tmp1 = fold_convert_loc (loc, type, arg1);
11334 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11335 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11336 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11337 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11338 return
11339 fold_convert_loc (loc, type,
11340 fold_build2_loc (loc, BIT_IOR_EXPR,
11341 type, tmp2, tmp3));
11342 }
11343
11344 /* (X | Y) & Y is (X, Y). */
11345 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11346 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11347 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11348 /* (X | Y) & X is (Y, X). */
11349 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11350 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11351 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11352 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11353 /* X & (X | Y) is (Y, X). */
11354 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11355 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11356 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11357 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11358 /* X & (Y | X) is (Y, X). */
11359 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11360 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11361 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11362 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11363
11364 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11365 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11366 && integer_onep (TREE_OPERAND (arg0, 1))
11367 && integer_onep (arg1))
11368 {
11369 tem = TREE_OPERAND (arg0, 0);
11370 return fold_build2_loc (loc, EQ_EXPR, type,
11371 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11372 build_int_cst (TREE_TYPE (tem), 1)),
11373 build_int_cst (TREE_TYPE (tem), 0));
11374 }
11375 /* Fold ~X & 1 as (X & 1) == 0. */
11376 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11377 && integer_onep (arg1))
11378 {
11379 tem = TREE_OPERAND (arg0, 0);
11380 return fold_build2_loc (loc, EQ_EXPR, type,
11381 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11382 build_int_cst (TREE_TYPE (tem), 1)),
11383 build_int_cst (TREE_TYPE (tem), 0));
11384 }
11385 /* Fold !X & 1 as X == 0. */
11386 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11387 && integer_onep (arg1))
11388 {
11389 tem = TREE_OPERAND (arg0, 0);
11390 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11391 build_int_cst (TREE_TYPE (tem), 0));
11392 }
11393
11394 /* Fold (X ^ Y) & Y as ~X & Y. */
11395 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11396 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11397 {
11398 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11399 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11400 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11401 fold_convert_loc (loc, type, arg1));
11402 }
11403 /* Fold (X ^ Y) & X as ~Y & X. */
11404 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11405 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11406 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11407 {
11408 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11409 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11410 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11411 fold_convert_loc (loc, type, arg1));
11412 }
11413 /* Fold X & (X ^ Y) as X & ~Y. */
11414 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11415 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11416 {
11417 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11418 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11419 fold_convert_loc (loc, type, arg0),
11420 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11421 }
11422 /* Fold X & (Y ^ X) as ~Y & X. */
11423 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11424 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11425 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11426 {
11427 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11428 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11429 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11430 fold_convert_loc (loc, type, arg0));
11431 }
11432
11433 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11434 multiple of 1 << CST. */
11435 if (TREE_CODE (arg1) == INTEGER_CST)
11436 {
11437 double_int cst1 = tree_to_double_int (arg1);
11438 double_int ncst1 = double_int_ext (double_int_neg (cst1),
11439 TYPE_PRECISION (TREE_TYPE (arg1)),
11440 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11441 if (double_int_equal_p (double_int_and (cst1, ncst1), ncst1)
11442 && multiple_of_p (type, arg0,
11443 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11444 return fold_convert_loc (loc, type, arg0);
11445 }
11446
11447 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11448 ((A & N) + B) & M -> (A + B) & M
11449 Similarly if (N & M) == 0,
11450 ((A | N) + B) & M -> (A + B) & M
11451 and for - instead of + (or unary - instead of +)
11452 and/or ^ instead of |.
11453 If B is constant and (B & M) == 0, fold into A & M. */
11454 if (host_integerp (arg1, 1))
11455 {
11456 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11457 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11458 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11459 && (TREE_CODE (arg0) == PLUS_EXPR
11460 || TREE_CODE (arg0) == MINUS_EXPR
11461 || TREE_CODE (arg0) == NEGATE_EXPR)
11462 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11463 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11464 {
11465 tree pmop[2];
11466 int which = 0;
11467 unsigned HOST_WIDE_INT cst0;
11468
11469 /* Now we know that arg0 is (C + D) or (C - D) or
11470 -C and arg1 (M) is == (1LL << cst) - 1.
11471 Store C into PMOP[0] and D into PMOP[1]. */
11472 pmop[0] = TREE_OPERAND (arg0, 0);
11473 pmop[1] = NULL;
11474 if (TREE_CODE (arg0) != NEGATE_EXPR)
11475 {
11476 pmop[1] = TREE_OPERAND (arg0, 1);
11477 which = 1;
11478 }
11479
11480 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11481 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11482 & cst1) != cst1)
11483 which = -1;
11484
11485 for (; which >= 0; which--)
11486 switch (TREE_CODE (pmop[which]))
11487 {
11488 case BIT_AND_EXPR:
11489 case BIT_IOR_EXPR:
11490 case BIT_XOR_EXPR:
11491 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11492 != INTEGER_CST)
11493 break;
11494 /* tree_low_cst not used, because we don't care about
11495 the upper bits. */
11496 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11497 cst0 &= cst1;
11498 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11499 {
11500 if (cst0 != cst1)
11501 break;
11502 }
11503 else if (cst0 != 0)
11504 break;
11505 /* If C or D is of the form (A & N) where
11506 (N & M) == M, or of the form (A | N) or
11507 (A ^ N) where (N & M) == 0, replace it with A. */
11508 pmop[which] = TREE_OPERAND (pmop[which], 0);
11509 break;
11510 case INTEGER_CST:
11511 /* If C or D is a N where (N & M) == 0, it can be
11512 omitted (assumed 0). */
11513 if ((TREE_CODE (arg0) == PLUS_EXPR
11514 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11515 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11516 pmop[which] = NULL;
11517 break;
11518 default:
11519 break;
11520 }
11521
11522 /* Only build anything new if we optimized one or both arguments
11523 above. */
11524 if (pmop[0] != TREE_OPERAND (arg0, 0)
11525 || (TREE_CODE (arg0) != NEGATE_EXPR
11526 && pmop[1] != TREE_OPERAND (arg0, 1)))
11527 {
11528 tree utype = TREE_TYPE (arg0);
11529 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11530 {
11531 /* Perform the operations in a type that has defined
11532 overflow behavior. */
11533 utype = unsigned_type_for (TREE_TYPE (arg0));
11534 if (pmop[0] != NULL)
11535 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11536 if (pmop[1] != NULL)
11537 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11538 }
11539
11540 if (TREE_CODE (arg0) == NEGATE_EXPR)
11541 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11542 else if (TREE_CODE (arg0) == PLUS_EXPR)
11543 {
11544 if (pmop[0] != NULL && pmop[1] != NULL)
11545 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11546 pmop[0], pmop[1]);
11547 else if (pmop[0] != NULL)
11548 tem = pmop[0];
11549 else if (pmop[1] != NULL)
11550 tem = pmop[1];
11551 else
11552 return build_int_cst (type, 0);
11553 }
11554 else if (pmop[0] == NULL)
11555 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11556 else
11557 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11558 pmop[0], pmop[1]);
11559 /* TEM is now the new binary +, - or unary - replacement. */
11560 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11561 fold_convert_loc (loc, utype, arg1));
11562 return fold_convert_loc (loc, type, tem);
11563 }
11564 }
11565 }
11566
11567 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11568 if (t1 != NULL_TREE)
11569 return t1;
11570 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11571 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11572 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11573 {
11574 unsigned int prec
11575 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11576
11577 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11578 && (~TREE_INT_CST_LOW (arg1)
11579 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11580 return
11581 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11582 }
11583
11584 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11585
11586 This results in more efficient code for machines without a NOR
11587 instruction. Combine will canonicalize to the first form
11588 which will allow use of NOR instructions provided by the
11589 backend if they exist. */
11590 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11591 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11592 {
11593 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11594 build2 (BIT_IOR_EXPR, type,
11595 fold_convert_loc (loc, type,
11596 TREE_OPERAND (arg0, 0)),
11597 fold_convert_loc (loc, type,
11598 TREE_OPERAND (arg1, 0))));
11599 }
11600
11601 /* If arg0 is derived from the address of an object or function, we may
11602 be able to fold this expression using the object or function's
11603 alignment. */
11604 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11605 {
11606 unsigned HOST_WIDE_INT modulus, residue;
11607 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11608
11609 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11610 integer_onep (arg1));
11611
11612 /* This works because modulus is a power of 2. If this weren't the
11613 case, we'd have to replace it by its greatest power-of-2
11614 divisor: modulus & -modulus. */
11615 if (low < modulus)
11616 return build_int_cst (type, residue & low);
11617 }
11618
11619 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11620 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11621 if the new mask might be further optimized. */
11622 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11623 || TREE_CODE (arg0) == RSHIFT_EXPR)
11624 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11625 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11626 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11627 < TYPE_PRECISION (TREE_TYPE (arg0))
11628 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11629 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11630 {
11631 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11632 unsigned HOST_WIDE_INT mask
11633 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11634 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11635 tree shift_type = TREE_TYPE (arg0);
11636
11637 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11638 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11639 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11640 && TYPE_PRECISION (TREE_TYPE (arg0))
11641 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11642 {
11643 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11644 tree arg00 = TREE_OPERAND (arg0, 0);
11645 /* See if more bits can be proven as zero because of
11646 zero extension. */
11647 if (TREE_CODE (arg00) == NOP_EXPR
11648 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11649 {
11650 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11651 if (TYPE_PRECISION (inner_type)
11652 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11653 && TYPE_PRECISION (inner_type) < prec)
11654 {
11655 prec = TYPE_PRECISION (inner_type);
11656 /* See if we can shorten the right shift. */
11657 if (shiftc < prec)
11658 shift_type = inner_type;
11659 }
11660 }
11661 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11662 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11663 zerobits <<= prec - shiftc;
11664 /* For arithmetic shift if sign bit could be set, zerobits
11665 can contain actually sign bits, so no transformation is
11666 possible, unless MASK masks them all away. In that
11667 case the shift needs to be converted into logical shift. */
11668 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11669 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11670 {
11671 if ((mask & zerobits) == 0)
11672 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11673 else
11674 zerobits = 0;
11675 }
11676 }
11677
11678 /* ((X << 16) & 0xff00) is (X, 0). */
11679 if ((mask & zerobits) == mask)
11680 return omit_one_operand_loc (loc, type,
11681 build_int_cst (type, 0), arg0);
11682
11683 newmask = mask | zerobits;
11684 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11685 {
11686 unsigned int prec;
11687
11688 /* Only do the transformation if NEWMASK is some integer
11689 mode's mask. */
11690 for (prec = BITS_PER_UNIT;
11691 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11692 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11693 break;
11694 if (prec < HOST_BITS_PER_WIDE_INT
11695 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11696 {
11697 tree newmaskt;
11698
11699 if (shift_type != TREE_TYPE (arg0))
11700 {
11701 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11702 fold_convert_loc (loc, shift_type,
11703 TREE_OPERAND (arg0, 0)),
11704 TREE_OPERAND (arg0, 1));
11705 tem = fold_convert_loc (loc, type, tem);
11706 }
11707 else
11708 tem = op0;
11709 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11710 if (!tree_int_cst_equal (newmaskt, arg1))
11711 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11712 }
11713 }
11714 }
11715
11716 goto associate;
11717
11718 case RDIV_EXPR:
11719 /* Don't touch a floating-point divide by zero unless the mode
11720 of the constant can represent infinity. */
11721 if (TREE_CODE (arg1) == REAL_CST
11722 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11723 && real_zerop (arg1))
11724 return NULL_TREE;
11725
11726 /* Optimize A / A to 1.0 if we don't care about
11727 NaNs or Infinities. Skip the transformation
11728 for non-real operands. */
11729 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11730 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11731 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11732 && operand_equal_p (arg0, arg1, 0))
11733 {
11734 tree r = build_real (TREE_TYPE (arg0), dconst1);
11735
11736 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11737 }
11738
11739 /* The complex version of the above A / A optimization. */
11740 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11741 && operand_equal_p (arg0, arg1, 0))
11742 {
11743 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11744 if (! HONOR_NANS (TYPE_MODE (elem_type))
11745 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11746 {
11747 tree r = build_real (elem_type, dconst1);
11748 /* omit_two_operands will call fold_convert for us. */
11749 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11750 }
11751 }
11752
11753 /* (-A) / (-B) -> A / B */
11754 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11755 return fold_build2_loc (loc, RDIV_EXPR, type,
11756 TREE_OPERAND (arg0, 0),
11757 negate_expr (arg1));
11758 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11759 return fold_build2_loc (loc, RDIV_EXPR, type,
11760 negate_expr (arg0),
11761 TREE_OPERAND (arg1, 0));
11762
11763 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11764 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11765 && real_onep (arg1))
11766 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11767
11768 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11769 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11770 && real_minus_onep (arg1))
11771 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11772 negate_expr (arg0)));
11773
11774 /* If ARG1 is a constant, we can convert this to a multiply by the
11775 reciprocal. This does not have the same rounding properties,
11776 so only do this if -freciprocal-math. We can actually
11777 always safely do it if ARG1 is a power of two, but it's hard to
11778 tell if it is or not in a portable manner. */
11779 if (optimize
11780 && (TREE_CODE (arg1) == REAL_CST
11781 || (TREE_CODE (arg1) == COMPLEX_CST
11782 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11783 || (TREE_CODE (arg1) == VECTOR_CST
11784 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11785 {
11786 if (flag_reciprocal_math
11787 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11788 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11789 /* Find the reciprocal if optimizing and the result is exact.
11790 TODO: Complex reciprocal not implemented. */
11791 if (TREE_CODE (arg1) != COMPLEX_CST)
11792 {
11793 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11794
11795 if (inverse)
11796 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11797 }
11798 }
11799 /* Convert A/B/C to A/(B*C). */
11800 if (flag_reciprocal_math
11801 && TREE_CODE (arg0) == RDIV_EXPR)
11802 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11803 fold_build2_loc (loc, MULT_EXPR, type,
11804 TREE_OPERAND (arg0, 1), arg1));
11805
11806 /* Convert A/(B/C) to (A/B)*C. */
11807 if (flag_reciprocal_math
11808 && TREE_CODE (arg1) == RDIV_EXPR)
11809 return fold_build2_loc (loc, MULT_EXPR, type,
11810 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11811 TREE_OPERAND (arg1, 0)),
11812 TREE_OPERAND (arg1, 1));
11813
11814 /* Convert C1/(X*C2) into (C1/C2)/X. */
11815 if (flag_reciprocal_math
11816 && TREE_CODE (arg1) == MULT_EXPR
11817 && TREE_CODE (arg0) == REAL_CST
11818 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11819 {
11820 tree tem = const_binop (RDIV_EXPR, arg0,
11821 TREE_OPERAND (arg1, 1));
11822 if (tem)
11823 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11824 TREE_OPERAND (arg1, 0));
11825 }
11826
11827 if (flag_unsafe_math_optimizations)
11828 {
11829 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11830 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11831
11832 /* Optimize sin(x)/cos(x) as tan(x). */
11833 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11834 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11835 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11836 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11837 CALL_EXPR_ARG (arg1, 0), 0))
11838 {
11839 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11840
11841 if (tanfn != NULL_TREE)
11842 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11843 }
11844
11845 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11846 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11847 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11848 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11849 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11850 CALL_EXPR_ARG (arg1, 0), 0))
11851 {
11852 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11853
11854 if (tanfn != NULL_TREE)
11855 {
11856 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11857 CALL_EXPR_ARG (arg0, 0));
11858 return fold_build2_loc (loc, RDIV_EXPR, type,
11859 build_real (type, dconst1), tmp);
11860 }
11861 }
11862
11863 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11864 NaNs or Infinities. */
11865 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11866 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11867 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11868 {
11869 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11870 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11871
11872 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11873 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11874 && operand_equal_p (arg00, arg01, 0))
11875 {
11876 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11877
11878 if (cosfn != NULL_TREE)
11879 return build_call_expr_loc (loc, cosfn, 1, arg00);
11880 }
11881 }
11882
11883 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11884 NaNs or Infinities. */
11885 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11886 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11887 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11888 {
11889 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11890 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11891
11892 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11893 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11894 && operand_equal_p (arg00, arg01, 0))
11895 {
11896 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11897
11898 if (cosfn != NULL_TREE)
11899 {
11900 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11901 return fold_build2_loc (loc, RDIV_EXPR, type,
11902 build_real (type, dconst1),
11903 tmp);
11904 }
11905 }
11906 }
11907
11908 /* Optimize pow(x,c)/x as pow(x,c-1). */
11909 if (fcode0 == BUILT_IN_POW
11910 || fcode0 == BUILT_IN_POWF
11911 || fcode0 == BUILT_IN_POWL)
11912 {
11913 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11914 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11915 if (TREE_CODE (arg01) == REAL_CST
11916 && !TREE_OVERFLOW (arg01)
11917 && operand_equal_p (arg1, arg00, 0))
11918 {
11919 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11920 REAL_VALUE_TYPE c;
11921 tree arg;
11922
11923 c = TREE_REAL_CST (arg01);
11924 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11925 arg = build_real (type, c);
11926 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11927 }
11928 }
11929
11930 /* Optimize a/root(b/c) into a*root(c/b). */
11931 if (BUILTIN_ROOT_P (fcode1))
11932 {
11933 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11934
11935 if (TREE_CODE (rootarg) == RDIV_EXPR)
11936 {
11937 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11938 tree b = TREE_OPERAND (rootarg, 0);
11939 tree c = TREE_OPERAND (rootarg, 1);
11940
11941 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11942
11943 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11944 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11945 }
11946 }
11947
11948 /* Optimize x/expN(y) into x*expN(-y). */
11949 if (BUILTIN_EXPONENT_P (fcode1))
11950 {
11951 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11952 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11953 arg1 = build_call_expr_loc (loc,
11954 expfn, 1,
11955 fold_convert_loc (loc, type, arg));
11956 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11957 }
11958
11959 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11960 if (fcode1 == BUILT_IN_POW
11961 || fcode1 == BUILT_IN_POWF
11962 || fcode1 == BUILT_IN_POWL)
11963 {
11964 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11965 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11966 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11967 tree neg11 = fold_convert_loc (loc, type,
11968 negate_expr (arg11));
11969 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11970 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11971 }
11972 }
11973 return NULL_TREE;
11974
11975 case TRUNC_DIV_EXPR:
11976 /* Optimize (X & (-A)) / A where A is a power of 2,
11977 to X >> log2(A) */
11978 if (TREE_CODE (arg0) == BIT_AND_EXPR
11979 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11980 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11981 {
11982 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11983 arg1, TREE_OPERAND (arg0, 1));
11984 if (sum && integer_zerop (sum)) {
11985 unsigned long pow2;
11986
11987 if (TREE_INT_CST_LOW (arg1))
11988 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11989 else
11990 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11991 + HOST_BITS_PER_WIDE_INT;
11992
11993 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11994 TREE_OPERAND (arg0, 0),
11995 build_int_cst (integer_type_node, pow2));
11996 }
11997 }
11998
11999 /* Fall thru */
12000
12001 case FLOOR_DIV_EXPR:
12002 /* Simplify A / (B << N) where A and B are positive and B is
12003 a power of 2, to A >> (N + log2(B)). */
12004 strict_overflow_p = false;
12005 if (TREE_CODE (arg1) == LSHIFT_EXPR
12006 && (TYPE_UNSIGNED (type)
12007 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12008 {
12009 tree sval = TREE_OPERAND (arg1, 0);
12010 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12011 {
12012 tree sh_cnt = TREE_OPERAND (arg1, 1);
12013 unsigned long pow2;
12014
12015 if (TREE_INT_CST_LOW (sval))
12016 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12017 else
12018 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12019 + HOST_BITS_PER_WIDE_INT;
12020
12021 if (strict_overflow_p)
12022 fold_overflow_warning (("assuming signed overflow does not "
12023 "occur when simplifying A / (B << N)"),
12024 WARN_STRICT_OVERFLOW_MISC);
12025
12026 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12027 sh_cnt,
12028 build_int_cst (TREE_TYPE (sh_cnt),
12029 pow2));
12030 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12031 fold_convert_loc (loc, type, arg0), sh_cnt);
12032 }
12033 }
12034
12035 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12036 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12037 if (INTEGRAL_TYPE_P (type)
12038 && TYPE_UNSIGNED (type)
12039 && code == FLOOR_DIV_EXPR)
12040 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12041
12042 /* Fall thru */
12043
12044 case ROUND_DIV_EXPR:
12045 case CEIL_DIV_EXPR:
12046 case EXACT_DIV_EXPR:
12047 if (integer_onep (arg1))
12048 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12049 if (integer_zerop (arg1))
12050 return NULL_TREE;
12051 /* X / -1 is -X. */
12052 if (!TYPE_UNSIGNED (type)
12053 && TREE_CODE (arg1) == INTEGER_CST
12054 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12055 && TREE_INT_CST_HIGH (arg1) == -1)
12056 return fold_convert_loc (loc, type, negate_expr (arg0));
12057
12058 /* Convert -A / -B to A / B when the type is signed and overflow is
12059 undefined. */
12060 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12061 && TREE_CODE (arg0) == NEGATE_EXPR
12062 && negate_expr_p (arg1))
12063 {
12064 if (INTEGRAL_TYPE_P (type))
12065 fold_overflow_warning (("assuming signed overflow does not occur "
12066 "when distributing negation across "
12067 "division"),
12068 WARN_STRICT_OVERFLOW_MISC);
12069 return fold_build2_loc (loc, code, type,
12070 fold_convert_loc (loc, type,
12071 TREE_OPERAND (arg0, 0)),
12072 fold_convert_loc (loc, type,
12073 negate_expr (arg1)));
12074 }
12075 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12076 && TREE_CODE (arg1) == NEGATE_EXPR
12077 && negate_expr_p (arg0))
12078 {
12079 if (INTEGRAL_TYPE_P (type))
12080 fold_overflow_warning (("assuming signed overflow does not occur "
12081 "when distributing negation across "
12082 "division"),
12083 WARN_STRICT_OVERFLOW_MISC);
12084 return fold_build2_loc (loc, code, type,
12085 fold_convert_loc (loc, type,
12086 negate_expr (arg0)),
12087 fold_convert_loc (loc, type,
12088 TREE_OPERAND (arg1, 0)));
12089 }
12090
12091 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12092 operation, EXACT_DIV_EXPR.
12093
12094 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12095 At one time others generated faster code, it's not clear if they do
12096 after the last round to changes to the DIV code in expmed.c. */
12097 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12098 && multiple_of_p (type, arg0, arg1))
12099 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12100
12101 strict_overflow_p = false;
12102 if (TREE_CODE (arg1) == INTEGER_CST
12103 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12104 &strict_overflow_p)))
12105 {
12106 if (strict_overflow_p)
12107 fold_overflow_warning (("assuming signed overflow does not occur "
12108 "when simplifying division"),
12109 WARN_STRICT_OVERFLOW_MISC);
12110 return fold_convert_loc (loc, type, tem);
12111 }
12112
12113 return NULL_TREE;
12114
12115 case CEIL_MOD_EXPR:
12116 case FLOOR_MOD_EXPR:
12117 case ROUND_MOD_EXPR:
12118 case TRUNC_MOD_EXPR:
12119 /* X % 1 is always zero, but be sure to preserve any side
12120 effects in X. */
12121 if (integer_onep (arg1))
12122 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12123
12124 /* X % 0, return X % 0 unchanged so that we can get the
12125 proper warnings and errors. */
12126 if (integer_zerop (arg1))
12127 return NULL_TREE;
12128
12129 /* 0 % X is always zero, but be sure to preserve any side
12130 effects in X. Place this after checking for X == 0. */
12131 if (integer_zerop (arg0))
12132 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12133
12134 /* X % -1 is zero. */
12135 if (!TYPE_UNSIGNED (type)
12136 && TREE_CODE (arg1) == INTEGER_CST
12137 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12138 && TREE_INT_CST_HIGH (arg1) == -1)
12139 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12140
12141 /* X % -C is the same as X % C. */
12142 if (code == TRUNC_MOD_EXPR
12143 && !TYPE_UNSIGNED (type)
12144 && TREE_CODE (arg1) == INTEGER_CST
12145 && !TREE_OVERFLOW (arg1)
12146 && TREE_INT_CST_HIGH (arg1) < 0
12147 && !TYPE_OVERFLOW_TRAPS (type)
12148 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12149 && !sign_bit_p (arg1, arg1))
12150 return fold_build2_loc (loc, code, type,
12151 fold_convert_loc (loc, type, arg0),
12152 fold_convert_loc (loc, type,
12153 negate_expr (arg1)));
12154
12155 /* X % -Y is the same as X % Y. */
12156 if (code == TRUNC_MOD_EXPR
12157 && !TYPE_UNSIGNED (type)
12158 && TREE_CODE (arg1) == NEGATE_EXPR
12159 && !TYPE_OVERFLOW_TRAPS (type))
12160 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12161 fold_convert_loc (loc, type,
12162 TREE_OPERAND (arg1, 0)));
12163
12164 strict_overflow_p = false;
12165 if (TREE_CODE (arg1) == INTEGER_CST
12166 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12167 &strict_overflow_p)))
12168 {
12169 if (strict_overflow_p)
12170 fold_overflow_warning (("assuming signed overflow does not occur "
12171 "when simplifying modulus"),
12172 WARN_STRICT_OVERFLOW_MISC);
12173 return fold_convert_loc (loc, type, tem);
12174 }
12175
12176 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12177 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12178 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12179 && (TYPE_UNSIGNED (type)
12180 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12181 {
12182 tree c = arg1;
12183 /* Also optimize A % (C << N) where C is a power of 2,
12184 to A & ((C << N) - 1). */
12185 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12186 c = TREE_OPERAND (arg1, 0);
12187
12188 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12189 {
12190 tree mask
12191 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12192 build_int_cst (TREE_TYPE (arg1), 1));
12193 if (strict_overflow_p)
12194 fold_overflow_warning (("assuming signed overflow does not "
12195 "occur when simplifying "
12196 "X % (power of two)"),
12197 WARN_STRICT_OVERFLOW_MISC);
12198 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12199 fold_convert_loc (loc, type, arg0),
12200 fold_convert_loc (loc, type, mask));
12201 }
12202 }
12203
12204 return NULL_TREE;
12205
12206 case LROTATE_EXPR:
12207 case RROTATE_EXPR:
12208 if (integer_all_onesp (arg0))
12209 return omit_one_operand_loc (loc, type, arg0, arg1);
12210 goto shift;
12211
12212 case RSHIFT_EXPR:
12213 /* Optimize -1 >> x for arithmetic right shifts. */
12214 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12215 && tree_expr_nonnegative_p (arg1))
12216 return omit_one_operand_loc (loc, type, arg0, arg1);
12217 /* ... fall through ... */
12218
12219 case LSHIFT_EXPR:
12220 shift:
12221 if (integer_zerop (arg1))
12222 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12223 if (integer_zerop (arg0))
12224 return omit_one_operand_loc (loc, type, arg0, arg1);
12225
12226 /* Since negative shift count is not well-defined,
12227 don't try to compute it in the compiler. */
12228 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12229 return NULL_TREE;
12230
12231 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12232 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12233 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12234 && host_integerp (TREE_OPERAND (arg0, 1), false)
12235 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12236 {
12237 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12238 + TREE_INT_CST_LOW (arg1));
12239
12240 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12241 being well defined. */
12242 if (low >= TYPE_PRECISION (type))
12243 {
12244 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12245 low = low % TYPE_PRECISION (type);
12246 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12247 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12248 TREE_OPERAND (arg0, 0));
12249 else
12250 low = TYPE_PRECISION (type) - 1;
12251 }
12252
12253 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12254 build_int_cst (type, low));
12255 }
12256
12257 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12258 into x & ((unsigned)-1 >> c) for unsigned types. */
12259 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12260 || (TYPE_UNSIGNED (type)
12261 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12262 && host_integerp (arg1, false)
12263 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12264 && host_integerp (TREE_OPERAND (arg0, 1), false)
12265 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12266 {
12267 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12268 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12269 tree lshift;
12270 tree arg00;
12271
12272 if (low0 == low1)
12273 {
12274 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12275
12276 lshift = build_int_cst (type, -1);
12277 lshift = int_const_binop (code, lshift, arg1);
12278
12279 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12280 }
12281 }
12282
12283 /* Rewrite an LROTATE_EXPR by a constant into an
12284 RROTATE_EXPR by a new constant. */
12285 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12286 {
12287 tree tem = build_int_cst (TREE_TYPE (arg1),
12288 TYPE_PRECISION (type));
12289 tem = const_binop (MINUS_EXPR, tem, arg1);
12290 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12291 }
12292
12293 /* If we have a rotate of a bit operation with the rotate count and
12294 the second operand of the bit operation both constant,
12295 permute the two operations. */
12296 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12297 && (TREE_CODE (arg0) == BIT_AND_EXPR
12298 || TREE_CODE (arg0) == BIT_IOR_EXPR
12299 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12300 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12301 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12302 fold_build2_loc (loc, code, type,
12303 TREE_OPERAND (arg0, 0), arg1),
12304 fold_build2_loc (loc, code, type,
12305 TREE_OPERAND (arg0, 1), arg1));
12306
12307 /* Two consecutive rotates adding up to the precision of the
12308 type can be ignored. */
12309 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12310 && TREE_CODE (arg0) == RROTATE_EXPR
12311 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12312 && TREE_INT_CST_HIGH (arg1) == 0
12313 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12314 && ((TREE_INT_CST_LOW (arg1)
12315 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12316 == (unsigned int) TYPE_PRECISION (type)))
12317 return TREE_OPERAND (arg0, 0);
12318
12319 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12320 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12321 if the latter can be further optimized. */
12322 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12323 && TREE_CODE (arg0) == BIT_AND_EXPR
12324 && TREE_CODE (arg1) == INTEGER_CST
12325 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12326 {
12327 tree mask = fold_build2_loc (loc, code, type,
12328 fold_convert_loc (loc, type,
12329 TREE_OPERAND (arg0, 1)),
12330 arg1);
12331 tree shift = fold_build2_loc (loc, code, type,
12332 fold_convert_loc (loc, type,
12333 TREE_OPERAND (arg0, 0)),
12334 arg1);
12335 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12336 if (tem)
12337 return tem;
12338 }
12339
12340 return NULL_TREE;
12341
12342 case MIN_EXPR:
12343 if (operand_equal_p (arg0, arg1, 0))
12344 return omit_one_operand_loc (loc, type, arg0, arg1);
12345 if (INTEGRAL_TYPE_P (type)
12346 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12347 return omit_one_operand_loc (loc, type, arg1, arg0);
12348 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12349 if (tem)
12350 return tem;
12351 goto associate;
12352
12353 case MAX_EXPR:
12354 if (operand_equal_p (arg0, arg1, 0))
12355 return omit_one_operand_loc (loc, type, arg0, arg1);
12356 if (INTEGRAL_TYPE_P (type)
12357 && TYPE_MAX_VALUE (type)
12358 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12359 return omit_one_operand_loc (loc, type, arg1, arg0);
12360 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12361 if (tem)
12362 return tem;
12363 goto associate;
12364
12365 case TRUTH_ANDIF_EXPR:
12366 /* Note that the operands of this must be ints
12367 and their values must be 0 or 1.
12368 ("true" is a fixed value perhaps depending on the language.) */
12369 /* If first arg is constant zero, return it. */
12370 if (integer_zerop (arg0))
12371 return fold_convert_loc (loc, type, arg0);
12372 case TRUTH_AND_EXPR:
12373 /* If either arg is constant true, drop it. */
12374 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12375 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12376 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12377 /* Preserve sequence points. */
12378 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12379 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12380 /* If second arg is constant zero, result is zero, but first arg
12381 must be evaluated. */
12382 if (integer_zerop (arg1))
12383 return omit_one_operand_loc (loc, type, arg1, arg0);
12384 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12385 case will be handled here. */
12386 if (integer_zerop (arg0))
12387 return omit_one_operand_loc (loc, type, arg0, arg1);
12388
12389 /* !X && X is always false. */
12390 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12391 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12392 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12393 /* X && !X is always false. */
12394 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12395 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12396 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12397
12398 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12399 means A >= Y && A != MAX, but in this case we know that
12400 A < X <= MAX. */
12401
12402 if (!TREE_SIDE_EFFECTS (arg0)
12403 && !TREE_SIDE_EFFECTS (arg1))
12404 {
12405 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12406 if (tem && !operand_equal_p (tem, arg0, 0))
12407 return fold_build2_loc (loc, code, type, tem, arg1);
12408
12409 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12410 if (tem && !operand_equal_p (tem, arg1, 0))
12411 return fold_build2_loc (loc, code, type, arg0, tem);
12412 }
12413
12414 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12415 != NULL_TREE)
12416 return tem;
12417
12418 return NULL_TREE;
12419
12420 case TRUTH_ORIF_EXPR:
12421 /* Note that the operands of this must be ints
12422 and their values must be 0 or true.
12423 ("true" is a fixed value perhaps depending on the language.) */
12424 /* If first arg is constant true, return it. */
12425 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12426 return fold_convert_loc (loc, type, arg0);
12427 case TRUTH_OR_EXPR:
12428 /* If either arg is constant zero, drop it. */
12429 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12430 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12431 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12432 /* Preserve sequence points. */
12433 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12434 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12435 /* If second arg is constant true, result is true, but we must
12436 evaluate first arg. */
12437 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12438 return omit_one_operand_loc (loc, type, arg1, arg0);
12439 /* Likewise for first arg, but note this only occurs here for
12440 TRUTH_OR_EXPR. */
12441 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12442 return omit_one_operand_loc (loc, type, arg0, arg1);
12443
12444 /* !X || X is always true. */
12445 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12446 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12447 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12448 /* X || !X is always true. */
12449 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12450 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12451 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12452
12453 /* (X && !Y) || (!X && Y) is X ^ Y */
12454 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12455 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12456 {
12457 tree a0, a1, l0, l1, n0, n1;
12458
12459 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12460 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12461
12462 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12463 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12464
12465 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12466 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12467
12468 if ((operand_equal_p (n0, a0, 0)
12469 && operand_equal_p (n1, a1, 0))
12470 || (operand_equal_p (n0, a1, 0)
12471 && operand_equal_p (n1, a0, 0)))
12472 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12473 }
12474
12475 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12476 != NULL_TREE)
12477 return tem;
12478
12479 return NULL_TREE;
12480
12481 case TRUTH_XOR_EXPR:
12482 /* If the second arg is constant zero, drop it. */
12483 if (integer_zerop (arg1))
12484 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12485 /* If the second arg is constant true, this is a logical inversion. */
12486 if (integer_onep (arg1))
12487 {
12488 /* Only call invert_truthvalue if operand is a truth value. */
12489 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12490 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12491 else
12492 tem = invert_truthvalue_loc (loc, arg0);
12493 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12494 }
12495 /* Identical arguments cancel to zero. */
12496 if (operand_equal_p (arg0, arg1, 0))
12497 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12498
12499 /* !X ^ X is always true. */
12500 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12501 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12502 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12503
12504 /* X ^ !X is always true. */
12505 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12506 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12507 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12508
12509 return NULL_TREE;
12510
12511 case EQ_EXPR:
12512 case NE_EXPR:
12513 STRIP_NOPS (arg0);
12514 STRIP_NOPS (arg1);
12515
12516 tem = fold_comparison (loc, code, type, op0, op1);
12517 if (tem != NULL_TREE)
12518 return tem;
12519
12520 /* bool_var != 0 becomes bool_var. */
12521 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12522 && code == NE_EXPR)
12523 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12524
12525 /* bool_var == 1 becomes bool_var. */
12526 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12527 && code == EQ_EXPR)
12528 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12529
12530 /* bool_var != 1 becomes !bool_var. */
12531 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12532 && code == NE_EXPR)
12533 return fold_convert_loc (loc, type,
12534 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12535 TREE_TYPE (arg0), arg0));
12536
12537 /* bool_var == 0 becomes !bool_var. */
12538 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12539 && code == EQ_EXPR)
12540 return fold_convert_loc (loc, type,
12541 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12542 TREE_TYPE (arg0), arg0));
12543
12544 /* !exp != 0 becomes !exp */
12545 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12546 && code == NE_EXPR)
12547 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12548
12549 /* If this is an equality comparison of the address of two non-weak,
12550 unaliased symbols neither of which are extern (since we do not
12551 have access to attributes for externs), then we know the result. */
12552 if (TREE_CODE (arg0) == ADDR_EXPR
12553 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12554 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12555 && ! lookup_attribute ("alias",
12556 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12557 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12558 && TREE_CODE (arg1) == ADDR_EXPR
12559 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12560 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12561 && ! lookup_attribute ("alias",
12562 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12563 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12564 {
12565 /* We know that we're looking at the address of two
12566 non-weak, unaliased, static _DECL nodes.
12567
12568 It is both wasteful and incorrect to call operand_equal_p
12569 to compare the two ADDR_EXPR nodes. It is wasteful in that
12570 all we need to do is test pointer equality for the arguments
12571 to the two ADDR_EXPR nodes. It is incorrect to use
12572 operand_equal_p as that function is NOT equivalent to a
12573 C equality test. It can in fact return false for two
12574 objects which would test as equal using the C equality
12575 operator. */
12576 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12577 return constant_boolean_node (equal
12578 ? code == EQ_EXPR : code != EQ_EXPR,
12579 type);
12580 }
12581
12582 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12583 a MINUS_EXPR of a constant, we can convert it into a comparison with
12584 a revised constant as long as no overflow occurs. */
12585 if (TREE_CODE (arg1) == INTEGER_CST
12586 && (TREE_CODE (arg0) == PLUS_EXPR
12587 || TREE_CODE (arg0) == MINUS_EXPR)
12588 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12589 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12590 ? MINUS_EXPR : PLUS_EXPR,
12591 fold_convert_loc (loc, TREE_TYPE (arg0),
12592 arg1),
12593 TREE_OPERAND (arg0, 1)))
12594 && !TREE_OVERFLOW (tem))
12595 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12596
12597 /* Similarly for a NEGATE_EXPR. */
12598 if (TREE_CODE (arg0) == NEGATE_EXPR
12599 && TREE_CODE (arg1) == INTEGER_CST
12600 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12601 arg1)))
12602 && TREE_CODE (tem) == INTEGER_CST
12603 && !TREE_OVERFLOW (tem))
12604 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12605
12606 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12607 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12608 && TREE_CODE (arg1) == INTEGER_CST
12609 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12610 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12611 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12612 fold_convert_loc (loc,
12613 TREE_TYPE (arg0),
12614 arg1),
12615 TREE_OPERAND (arg0, 1)));
12616
12617 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12618 if ((TREE_CODE (arg0) == PLUS_EXPR
12619 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12620 || TREE_CODE (arg0) == MINUS_EXPR)
12621 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12622 0)),
12623 arg1, 0)
12624 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12625 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12626 {
12627 tree val = TREE_OPERAND (arg0, 1);
12628 return omit_two_operands_loc (loc, type,
12629 fold_build2_loc (loc, code, type,
12630 val,
12631 build_int_cst (TREE_TYPE (val),
12632 0)),
12633 TREE_OPERAND (arg0, 0), arg1);
12634 }
12635
12636 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12637 if (TREE_CODE (arg0) == MINUS_EXPR
12638 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12639 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12640 1)),
12641 arg1, 0)
12642 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12643 {
12644 return omit_two_operands_loc (loc, type,
12645 code == NE_EXPR
12646 ? boolean_true_node : boolean_false_node,
12647 TREE_OPERAND (arg0, 1), arg1);
12648 }
12649
12650 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12651 for !=. Don't do this for ordered comparisons due to overflow. */
12652 if (TREE_CODE (arg0) == MINUS_EXPR
12653 && integer_zerop (arg1))
12654 return fold_build2_loc (loc, code, type,
12655 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12656
12657 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12658 if (TREE_CODE (arg0) == ABS_EXPR
12659 && (integer_zerop (arg1) || real_zerop (arg1)))
12660 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12661
12662 /* If this is an EQ or NE comparison with zero and ARG0 is
12663 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12664 two operations, but the latter can be done in one less insn
12665 on machines that have only two-operand insns or on which a
12666 constant cannot be the first operand. */
12667 if (TREE_CODE (arg0) == BIT_AND_EXPR
12668 && integer_zerop (arg1))
12669 {
12670 tree arg00 = TREE_OPERAND (arg0, 0);
12671 tree arg01 = TREE_OPERAND (arg0, 1);
12672 if (TREE_CODE (arg00) == LSHIFT_EXPR
12673 && integer_onep (TREE_OPERAND (arg00, 0)))
12674 {
12675 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12676 arg01, TREE_OPERAND (arg00, 1));
12677 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12678 build_int_cst (TREE_TYPE (arg0), 1));
12679 return fold_build2_loc (loc, code, type,
12680 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12681 arg1);
12682 }
12683 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12684 && integer_onep (TREE_OPERAND (arg01, 0)))
12685 {
12686 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12687 arg00, TREE_OPERAND (arg01, 1));
12688 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12689 build_int_cst (TREE_TYPE (arg0), 1));
12690 return fold_build2_loc (loc, code, type,
12691 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12692 arg1);
12693 }
12694 }
12695
12696 /* If this is an NE or EQ comparison of zero against the result of a
12697 signed MOD operation whose second operand is a power of 2, make
12698 the MOD operation unsigned since it is simpler and equivalent. */
12699 if (integer_zerop (arg1)
12700 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12701 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12702 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12703 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12704 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12705 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12706 {
12707 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12708 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12709 fold_convert_loc (loc, newtype,
12710 TREE_OPERAND (arg0, 0)),
12711 fold_convert_loc (loc, newtype,
12712 TREE_OPERAND (arg0, 1)));
12713
12714 return fold_build2_loc (loc, code, type, newmod,
12715 fold_convert_loc (loc, newtype, arg1));
12716 }
12717
12718 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12719 C1 is a valid shift constant, and C2 is a power of two, i.e.
12720 a single bit. */
12721 if (TREE_CODE (arg0) == BIT_AND_EXPR
12722 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12723 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12724 == INTEGER_CST
12725 && integer_pow2p (TREE_OPERAND (arg0, 1))
12726 && integer_zerop (arg1))
12727 {
12728 tree itype = TREE_TYPE (arg0);
12729 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12730 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12731
12732 /* Check for a valid shift count. */
12733 if (TREE_INT_CST_HIGH (arg001) == 0
12734 && TREE_INT_CST_LOW (arg001) < prec)
12735 {
12736 tree arg01 = TREE_OPERAND (arg0, 1);
12737 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12738 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12739 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12740 can be rewritten as (X & (C2 << C1)) != 0. */
12741 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12742 {
12743 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12744 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12745 return fold_build2_loc (loc, code, type, tem,
12746 fold_convert_loc (loc, itype, arg1));
12747 }
12748 /* Otherwise, for signed (arithmetic) shifts,
12749 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12750 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12751 else if (!TYPE_UNSIGNED (itype))
12752 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12753 arg000, build_int_cst (itype, 0));
12754 /* Otherwise, of unsigned (logical) shifts,
12755 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12756 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12757 else
12758 return omit_one_operand_loc (loc, type,
12759 code == EQ_EXPR ? integer_one_node
12760 : integer_zero_node,
12761 arg000);
12762 }
12763 }
12764
12765 /* If we have (A & C) == C where C is a power of 2, convert this into
12766 (A & C) != 0. Similarly for NE_EXPR. */
12767 if (TREE_CODE (arg0) == BIT_AND_EXPR
12768 && integer_pow2p (TREE_OPERAND (arg0, 1))
12769 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12770 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12771 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12772 integer_zero_node));
12773
12774 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12775 bit, then fold the expression into A < 0 or A >= 0. */
12776 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12777 if (tem)
12778 return tem;
12779
12780 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12781 Similarly for NE_EXPR. */
12782 if (TREE_CODE (arg0) == BIT_AND_EXPR
12783 && TREE_CODE (arg1) == INTEGER_CST
12784 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12785 {
12786 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12787 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12788 TREE_OPERAND (arg0, 1));
12789 tree dandnotc
12790 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12791 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12792 notc);
12793 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12794 if (integer_nonzerop (dandnotc))
12795 return omit_one_operand_loc (loc, type, rslt, arg0);
12796 }
12797
12798 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12799 Similarly for NE_EXPR. */
12800 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12801 && TREE_CODE (arg1) == INTEGER_CST
12802 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12803 {
12804 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12805 tree candnotd
12806 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12807 TREE_OPERAND (arg0, 1),
12808 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12809 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12810 if (integer_nonzerop (candnotd))
12811 return omit_one_operand_loc (loc, type, rslt, arg0);
12812 }
12813
12814 /* If this is a comparison of a field, we may be able to simplify it. */
12815 if ((TREE_CODE (arg0) == COMPONENT_REF
12816 || TREE_CODE (arg0) == BIT_FIELD_REF)
12817 /* Handle the constant case even without -O
12818 to make sure the warnings are given. */
12819 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12820 {
12821 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12822 if (t1)
12823 return t1;
12824 }
12825
12826 /* Optimize comparisons of strlen vs zero to a compare of the
12827 first character of the string vs zero. To wit,
12828 strlen(ptr) == 0 => *ptr == 0
12829 strlen(ptr) != 0 => *ptr != 0
12830 Other cases should reduce to one of these two (or a constant)
12831 due to the return value of strlen being unsigned. */
12832 if (TREE_CODE (arg0) == CALL_EXPR
12833 && integer_zerop (arg1))
12834 {
12835 tree fndecl = get_callee_fndecl (arg0);
12836
12837 if (fndecl
12838 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12839 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12840 && call_expr_nargs (arg0) == 1
12841 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12842 {
12843 tree iref = build_fold_indirect_ref_loc (loc,
12844 CALL_EXPR_ARG (arg0, 0));
12845 return fold_build2_loc (loc, code, type, iref,
12846 build_int_cst (TREE_TYPE (iref), 0));
12847 }
12848 }
12849
12850 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12851 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12852 if (TREE_CODE (arg0) == RSHIFT_EXPR
12853 && integer_zerop (arg1)
12854 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12855 {
12856 tree arg00 = TREE_OPERAND (arg0, 0);
12857 tree arg01 = TREE_OPERAND (arg0, 1);
12858 tree itype = TREE_TYPE (arg00);
12859 if (TREE_INT_CST_HIGH (arg01) == 0
12860 && TREE_INT_CST_LOW (arg01)
12861 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12862 {
12863 if (TYPE_UNSIGNED (itype))
12864 {
12865 itype = signed_type_for (itype);
12866 arg00 = fold_convert_loc (loc, itype, arg00);
12867 }
12868 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12869 type, arg00, build_int_cst (itype, 0));
12870 }
12871 }
12872
12873 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12874 if (integer_zerop (arg1)
12875 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12876 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12877 TREE_OPERAND (arg0, 1));
12878
12879 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12880 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12881 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12882 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12883 build_int_cst (TREE_TYPE (arg0), 0));
12884 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12885 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12886 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12887 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12888 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12889 build_int_cst (TREE_TYPE (arg0), 0));
12890
12891 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12892 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12893 && TREE_CODE (arg1) == INTEGER_CST
12894 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12895 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12896 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12897 TREE_OPERAND (arg0, 1), arg1));
12898
12899 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12900 (X & C) == 0 when C is a single bit. */
12901 if (TREE_CODE (arg0) == BIT_AND_EXPR
12902 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12903 && integer_zerop (arg1)
12904 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12905 {
12906 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12907 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12908 TREE_OPERAND (arg0, 1));
12909 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12910 type, tem,
12911 fold_convert_loc (loc, TREE_TYPE (arg0),
12912 arg1));
12913 }
12914
12915 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12916 constant C is a power of two, i.e. a single bit. */
12917 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12918 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12919 && integer_zerop (arg1)
12920 && integer_pow2p (TREE_OPERAND (arg0, 1))
12921 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12922 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12923 {
12924 tree arg00 = TREE_OPERAND (arg0, 0);
12925 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12926 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12927 }
12928
12929 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12930 when is C is a power of two, i.e. a single bit. */
12931 if (TREE_CODE (arg0) == BIT_AND_EXPR
12932 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12933 && integer_zerop (arg1)
12934 && integer_pow2p (TREE_OPERAND (arg0, 1))
12935 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12936 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12937 {
12938 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12939 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12940 arg000, TREE_OPERAND (arg0, 1));
12941 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12942 tem, build_int_cst (TREE_TYPE (tem), 0));
12943 }
12944
12945 if (integer_zerop (arg1)
12946 && tree_expr_nonzero_p (arg0))
12947 {
12948 tree res = constant_boolean_node (code==NE_EXPR, type);
12949 return omit_one_operand_loc (loc, type, res, arg0);
12950 }
12951
12952 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12953 if (TREE_CODE (arg0) == NEGATE_EXPR
12954 && TREE_CODE (arg1) == NEGATE_EXPR)
12955 return fold_build2_loc (loc, code, type,
12956 TREE_OPERAND (arg0, 0),
12957 fold_convert_loc (loc, TREE_TYPE (arg0),
12958 TREE_OPERAND (arg1, 0)));
12959
12960 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12961 if (TREE_CODE (arg0) == BIT_AND_EXPR
12962 && TREE_CODE (arg1) == BIT_AND_EXPR)
12963 {
12964 tree arg00 = TREE_OPERAND (arg0, 0);
12965 tree arg01 = TREE_OPERAND (arg0, 1);
12966 tree arg10 = TREE_OPERAND (arg1, 0);
12967 tree arg11 = TREE_OPERAND (arg1, 1);
12968 tree itype = TREE_TYPE (arg0);
12969
12970 if (operand_equal_p (arg01, arg11, 0))
12971 return fold_build2_loc (loc, code, type,
12972 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12973 fold_build2_loc (loc,
12974 BIT_XOR_EXPR, itype,
12975 arg00, arg10),
12976 arg01),
12977 build_int_cst (itype, 0));
12978
12979 if (operand_equal_p (arg01, arg10, 0))
12980 return fold_build2_loc (loc, code, type,
12981 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12982 fold_build2_loc (loc,
12983 BIT_XOR_EXPR, itype,
12984 arg00, arg11),
12985 arg01),
12986 build_int_cst (itype, 0));
12987
12988 if (operand_equal_p (arg00, arg11, 0))
12989 return fold_build2_loc (loc, code, type,
12990 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12991 fold_build2_loc (loc,
12992 BIT_XOR_EXPR, itype,
12993 arg01, arg10),
12994 arg00),
12995 build_int_cst (itype, 0));
12996
12997 if (operand_equal_p (arg00, arg10, 0))
12998 return fold_build2_loc (loc, code, type,
12999 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13000 fold_build2_loc (loc,
13001 BIT_XOR_EXPR, itype,
13002 arg01, arg11),
13003 arg00),
13004 build_int_cst (itype, 0));
13005 }
13006
13007 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13008 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13009 {
13010 tree arg00 = TREE_OPERAND (arg0, 0);
13011 tree arg01 = TREE_OPERAND (arg0, 1);
13012 tree arg10 = TREE_OPERAND (arg1, 0);
13013 tree arg11 = TREE_OPERAND (arg1, 1);
13014 tree itype = TREE_TYPE (arg0);
13015
13016 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13017 operand_equal_p guarantees no side-effects so we don't need
13018 to use omit_one_operand on Z. */
13019 if (operand_equal_p (arg01, arg11, 0))
13020 return fold_build2_loc (loc, code, type, arg00,
13021 fold_convert_loc (loc, TREE_TYPE (arg00),
13022 arg10));
13023 if (operand_equal_p (arg01, arg10, 0))
13024 return fold_build2_loc (loc, code, type, arg00,
13025 fold_convert_loc (loc, TREE_TYPE (arg00),
13026 arg11));
13027 if (operand_equal_p (arg00, arg11, 0))
13028 return fold_build2_loc (loc, code, type, arg01,
13029 fold_convert_loc (loc, TREE_TYPE (arg01),
13030 arg10));
13031 if (operand_equal_p (arg00, arg10, 0))
13032 return fold_build2_loc (loc, code, type, arg01,
13033 fold_convert_loc (loc, TREE_TYPE (arg01),
13034 arg11));
13035
13036 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13037 if (TREE_CODE (arg01) == INTEGER_CST
13038 && TREE_CODE (arg11) == INTEGER_CST)
13039 {
13040 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13041 fold_convert_loc (loc, itype, arg11));
13042 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13043 return fold_build2_loc (loc, code, type, tem,
13044 fold_convert_loc (loc, itype, arg10));
13045 }
13046 }
13047
13048 /* Attempt to simplify equality/inequality comparisons of complex
13049 values. Only lower the comparison if the result is known or
13050 can be simplified to a single scalar comparison. */
13051 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13052 || TREE_CODE (arg0) == COMPLEX_CST)
13053 && (TREE_CODE (arg1) == COMPLEX_EXPR
13054 || TREE_CODE (arg1) == COMPLEX_CST))
13055 {
13056 tree real0, imag0, real1, imag1;
13057 tree rcond, icond;
13058
13059 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13060 {
13061 real0 = TREE_OPERAND (arg0, 0);
13062 imag0 = TREE_OPERAND (arg0, 1);
13063 }
13064 else
13065 {
13066 real0 = TREE_REALPART (arg0);
13067 imag0 = TREE_IMAGPART (arg0);
13068 }
13069
13070 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13071 {
13072 real1 = TREE_OPERAND (arg1, 0);
13073 imag1 = TREE_OPERAND (arg1, 1);
13074 }
13075 else
13076 {
13077 real1 = TREE_REALPART (arg1);
13078 imag1 = TREE_IMAGPART (arg1);
13079 }
13080
13081 rcond = fold_binary_loc (loc, code, type, real0, real1);
13082 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13083 {
13084 if (integer_zerop (rcond))
13085 {
13086 if (code == EQ_EXPR)
13087 return omit_two_operands_loc (loc, type, boolean_false_node,
13088 imag0, imag1);
13089 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13090 }
13091 else
13092 {
13093 if (code == NE_EXPR)
13094 return omit_two_operands_loc (loc, type, boolean_true_node,
13095 imag0, imag1);
13096 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13097 }
13098 }
13099
13100 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13101 if (icond && TREE_CODE (icond) == INTEGER_CST)
13102 {
13103 if (integer_zerop (icond))
13104 {
13105 if (code == EQ_EXPR)
13106 return omit_two_operands_loc (loc, type, boolean_false_node,
13107 real0, real1);
13108 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13109 }
13110 else
13111 {
13112 if (code == NE_EXPR)
13113 return omit_two_operands_loc (loc, type, boolean_true_node,
13114 real0, real1);
13115 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13116 }
13117 }
13118 }
13119
13120 return NULL_TREE;
13121
13122 case LT_EXPR:
13123 case GT_EXPR:
13124 case LE_EXPR:
13125 case GE_EXPR:
13126 tem = fold_comparison (loc, code, type, op0, op1);
13127 if (tem != NULL_TREE)
13128 return tem;
13129
13130 /* Transform comparisons of the form X +- C CMP X. */
13131 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13132 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13133 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13134 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13135 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13136 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13137 {
13138 tree arg01 = TREE_OPERAND (arg0, 1);
13139 enum tree_code code0 = TREE_CODE (arg0);
13140 int is_positive;
13141
13142 if (TREE_CODE (arg01) == REAL_CST)
13143 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13144 else
13145 is_positive = tree_int_cst_sgn (arg01);
13146
13147 /* (X - c) > X becomes false. */
13148 if (code == GT_EXPR
13149 && ((code0 == MINUS_EXPR && is_positive >= 0)
13150 || (code0 == PLUS_EXPR && is_positive <= 0)))
13151 {
13152 if (TREE_CODE (arg01) == INTEGER_CST
13153 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13154 fold_overflow_warning (("assuming signed overflow does not "
13155 "occur when assuming that (X - c) > X "
13156 "is always false"),
13157 WARN_STRICT_OVERFLOW_ALL);
13158 return constant_boolean_node (0, type);
13159 }
13160
13161 /* Likewise (X + c) < X becomes false. */
13162 if (code == LT_EXPR
13163 && ((code0 == PLUS_EXPR && is_positive >= 0)
13164 || (code0 == MINUS_EXPR && is_positive <= 0)))
13165 {
13166 if (TREE_CODE (arg01) == INTEGER_CST
13167 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13168 fold_overflow_warning (("assuming signed overflow does not "
13169 "occur when assuming that "
13170 "(X + c) < X is always false"),
13171 WARN_STRICT_OVERFLOW_ALL);
13172 return constant_boolean_node (0, type);
13173 }
13174
13175 /* Convert (X - c) <= X to true. */
13176 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13177 && code == LE_EXPR
13178 && ((code0 == MINUS_EXPR && is_positive >= 0)
13179 || (code0 == PLUS_EXPR && is_positive <= 0)))
13180 {
13181 if (TREE_CODE (arg01) == INTEGER_CST
13182 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13183 fold_overflow_warning (("assuming signed overflow does not "
13184 "occur when assuming that "
13185 "(X - c) <= X is always true"),
13186 WARN_STRICT_OVERFLOW_ALL);
13187 return constant_boolean_node (1, type);
13188 }
13189
13190 /* Convert (X + c) >= X to true. */
13191 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13192 && code == GE_EXPR
13193 && ((code0 == PLUS_EXPR && is_positive >= 0)
13194 || (code0 == MINUS_EXPR && is_positive <= 0)))
13195 {
13196 if (TREE_CODE (arg01) == INTEGER_CST
13197 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13198 fold_overflow_warning (("assuming signed overflow does not "
13199 "occur when assuming that "
13200 "(X + c) >= X is always true"),
13201 WARN_STRICT_OVERFLOW_ALL);
13202 return constant_boolean_node (1, type);
13203 }
13204
13205 if (TREE_CODE (arg01) == INTEGER_CST)
13206 {
13207 /* Convert X + c > X and X - c < X to true for integers. */
13208 if (code == GT_EXPR
13209 && ((code0 == PLUS_EXPR && is_positive > 0)
13210 || (code0 == MINUS_EXPR && is_positive < 0)))
13211 {
13212 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13213 fold_overflow_warning (("assuming signed overflow does "
13214 "not occur when assuming that "
13215 "(X + c) > X is always true"),
13216 WARN_STRICT_OVERFLOW_ALL);
13217 return constant_boolean_node (1, type);
13218 }
13219
13220 if (code == LT_EXPR
13221 && ((code0 == MINUS_EXPR && is_positive > 0)
13222 || (code0 == PLUS_EXPR && is_positive < 0)))
13223 {
13224 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13225 fold_overflow_warning (("assuming signed overflow does "
13226 "not occur when assuming that "
13227 "(X - c) < X is always true"),
13228 WARN_STRICT_OVERFLOW_ALL);
13229 return constant_boolean_node (1, type);
13230 }
13231
13232 /* Convert X + c <= X and X - c >= X to false for integers. */
13233 if (code == LE_EXPR
13234 && ((code0 == PLUS_EXPR && is_positive > 0)
13235 || (code0 == MINUS_EXPR && is_positive < 0)))
13236 {
13237 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13238 fold_overflow_warning (("assuming signed overflow does "
13239 "not occur when assuming that "
13240 "(X + c) <= X is always false"),
13241 WARN_STRICT_OVERFLOW_ALL);
13242 return constant_boolean_node (0, type);
13243 }
13244
13245 if (code == GE_EXPR
13246 && ((code0 == MINUS_EXPR && is_positive > 0)
13247 || (code0 == PLUS_EXPR && is_positive < 0)))
13248 {
13249 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13250 fold_overflow_warning (("assuming signed overflow does "
13251 "not occur when assuming that "
13252 "(X - c) >= X is always false"),
13253 WARN_STRICT_OVERFLOW_ALL);
13254 return constant_boolean_node (0, type);
13255 }
13256 }
13257 }
13258
13259 /* Comparisons with the highest or lowest possible integer of
13260 the specified precision will have known values. */
13261 {
13262 tree arg1_type = TREE_TYPE (arg1);
13263 unsigned int width = TYPE_PRECISION (arg1_type);
13264
13265 if (TREE_CODE (arg1) == INTEGER_CST
13266 && width <= 2 * HOST_BITS_PER_WIDE_INT
13267 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13268 {
13269 HOST_WIDE_INT signed_max_hi;
13270 unsigned HOST_WIDE_INT signed_max_lo;
13271 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13272
13273 if (width <= HOST_BITS_PER_WIDE_INT)
13274 {
13275 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13276 - 1;
13277 signed_max_hi = 0;
13278 max_hi = 0;
13279
13280 if (TYPE_UNSIGNED (arg1_type))
13281 {
13282 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13283 min_lo = 0;
13284 min_hi = 0;
13285 }
13286 else
13287 {
13288 max_lo = signed_max_lo;
13289 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13290 min_hi = -1;
13291 }
13292 }
13293 else
13294 {
13295 width -= HOST_BITS_PER_WIDE_INT;
13296 signed_max_lo = -1;
13297 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13298 - 1;
13299 max_lo = -1;
13300 min_lo = 0;
13301
13302 if (TYPE_UNSIGNED (arg1_type))
13303 {
13304 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13305 min_hi = 0;
13306 }
13307 else
13308 {
13309 max_hi = signed_max_hi;
13310 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13311 }
13312 }
13313
13314 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13315 && TREE_INT_CST_LOW (arg1) == max_lo)
13316 switch (code)
13317 {
13318 case GT_EXPR:
13319 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13320
13321 case GE_EXPR:
13322 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13323
13324 case LE_EXPR:
13325 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13326
13327 case LT_EXPR:
13328 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13329
13330 /* The GE_EXPR and LT_EXPR cases above are not normally
13331 reached because of previous transformations. */
13332
13333 default:
13334 break;
13335 }
13336 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13337 == max_hi
13338 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13339 switch (code)
13340 {
13341 case GT_EXPR:
13342 arg1 = const_binop (PLUS_EXPR, arg1,
13343 build_int_cst (TREE_TYPE (arg1), 1));
13344 return fold_build2_loc (loc, EQ_EXPR, type,
13345 fold_convert_loc (loc,
13346 TREE_TYPE (arg1), arg0),
13347 arg1);
13348 case LE_EXPR:
13349 arg1 = const_binop (PLUS_EXPR, arg1,
13350 build_int_cst (TREE_TYPE (arg1), 1));
13351 return fold_build2_loc (loc, NE_EXPR, type,
13352 fold_convert_loc (loc, TREE_TYPE (arg1),
13353 arg0),
13354 arg1);
13355 default:
13356 break;
13357 }
13358 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13359 == min_hi
13360 && TREE_INT_CST_LOW (arg1) == min_lo)
13361 switch (code)
13362 {
13363 case LT_EXPR:
13364 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13365
13366 case LE_EXPR:
13367 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13368
13369 case GE_EXPR:
13370 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13371
13372 case GT_EXPR:
13373 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13374
13375 default:
13376 break;
13377 }
13378 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13379 == min_hi
13380 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13381 switch (code)
13382 {
13383 case GE_EXPR:
13384 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13385 return fold_build2_loc (loc, NE_EXPR, type,
13386 fold_convert_loc (loc,
13387 TREE_TYPE (arg1), arg0),
13388 arg1);
13389 case LT_EXPR:
13390 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13391 return fold_build2_loc (loc, EQ_EXPR, type,
13392 fold_convert_loc (loc, TREE_TYPE (arg1),
13393 arg0),
13394 arg1);
13395 default:
13396 break;
13397 }
13398
13399 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13400 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13401 && TYPE_UNSIGNED (arg1_type)
13402 /* We will flip the signedness of the comparison operator
13403 associated with the mode of arg1, so the sign bit is
13404 specified by this mode. Check that arg1 is the signed
13405 max associated with this sign bit. */
13406 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13407 /* signed_type does not work on pointer types. */
13408 && INTEGRAL_TYPE_P (arg1_type))
13409 {
13410 /* The following case also applies to X < signed_max+1
13411 and X >= signed_max+1 because previous transformations. */
13412 if (code == LE_EXPR || code == GT_EXPR)
13413 {
13414 tree st;
13415 st = signed_type_for (TREE_TYPE (arg1));
13416 return fold_build2_loc (loc,
13417 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13418 type, fold_convert_loc (loc, st, arg0),
13419 build_int_cst (st, 0));
13420 }
13421 }
13422 }
13423 }
13424
13425 /* If we are comparing an ABS_EXPR with a constant, we can
13426 convert all the cases into explicit comparisons, but they may
13427 well not be faster than doing the ABS and one comparison.
13428 But ABS (X) <= C is a range comparison, which becomes a subtraction
13429 and a comparison, and is probably faster. */
13430 if (code == LE_EXPR
13431 && TREE_CODE (arg1) == INTEGER_CST
13432 && TREE_CODE (arg0) == ABS_EXPR
13433 && ! TREE_SIDE_EFFECTS (arg0)
13434 && (0 != (tem = negate_expr (arg1)))
13435 && TREE_CODE (tem) == INTEGER_CST
13436 && !TREE_OVERFLOW (tem))
13437 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13438 build2 (GE_EXPR, type,
13439 TREE_OPERAND (arg0, 0), tem),
13440 build2 (LE_EXPR, type,
13441 TREE_OPERAND (arg0, 0), arg1));
13442
13443 /* Convert ABS_EXPR<x> >= 0 to true. */
13444 strict_overflow_p = false;
13445 if (code == GE_EXPR
13446 && (integer_zerop (arg1)
13447 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13448 && real_zerop (arg1)))
13449 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13450 {
13451 if (strict_overflow_p)
13452 fold_overflow_warning (("assuming signed overflow does not occur "
13453 "when simplifying comparison of "
13454 "absolute value and zero"),
13455 WARN_STRICT_OVERFLOW_CONDITIONAL);
13456 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13457 }
13458
13459 /* Convert ABS_EXPR<x> < 0 to false. */
13460 strict_overflow_p = false;
13461 if (code == LT_EXPR
13462 && (integer_zerop (arg1) || real_zerop (arg1))
13463 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13464 {
13465 if (strict_overflow_p)
13466 fold_overflow_warning (("assuming signed overflow does not occur "
13467 "when simplifying comparison of "
13468 "absolute value and zero"),
13469 WARN_STRICT_OVERFLOW_CONDITIONAL);
13470 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13471 }
13472
13473 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13474 and similarly for >= into !=. */
13475 if ((code == LT_EXPR || code == GE_EXPR)
13476 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13477 && TREE_CODE (arg1) == LSHIFT_EXPR
13478 && integer_onep (TREE_OPERAND (arg1, 0)))
13479 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13480 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13481 TREE_OPERAND (arg1, 1)),
13482 build_int_cst (TREE_TYPE (arg0), 0));
13483
13484 if ((code == LT_EXPR || code == GE_EXPR)
13485 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13486 && CONVERT_EXPR_P (arg1)
13487 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13488 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13489 {
13490 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13491 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13492 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13493 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13494 build_int_cst (TREE_TYPE (arg0), 0));
13495 }
13496
13497 return NULL_TREE;
13498
13499 case UNORDERED_EXPR:
13500 case ORDERED_EXPR:
13501 case UNLT_EXPR:
13502 case UNLE_EXPR:
13503 case UNGT_EXPR:
13504 case UNGE_EXPR:
13505 case UNEQ_EXPR:
13506 case LTGT_EXPR:
13507 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13508 {
13509 t1 = fold_relational_const (code, type, arg0, arg1);
13510 if (t1 != NULL_TREE)
13511 return t1;
13512 }
13513
13514 /* If the first operand is NaN, the result is constant. */
13515 if (TREE_CODE (arg0) == REAL_CST
13516 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13517 && (code != LTGT_EXPR || ! flag_trapping_math))
13518 {
13519 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13520 ? integer_zero_node
13521 : integer_one_node;
13522 return omit_one_operand_loc (loc, type, t1, arg1);
13523 }
13524
13525 /* If the second operand is NaN, the result is constant. */
13526 if (TREE_CODE (arg1) == REAL_CST
13527 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13528 && (code != LTGT_EXPR || ! flag_trapping_math))
13529 {
13530 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13531 ? integer_zero_node
13532 : integer_one_node;
13533 return omit_one_operand_loc (loc, type, t1, arg0);
13534 }
13535
13536 /* Simplify unordered comparison of something with itself. */
13537 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13538 && operand_equal_p (arg0, arg1, 0))
13539 return constant_boolean_node (1, type);
13540
13541 if (code == LTGT_EXPR
13542 && !flag_trapping_math
13543 && operand_equal_p (arg0, arg1, 0))
13544 return constant_boolean_node (0, type);
13545
13546 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13547 {
13548 tree targ0 = strip_float_extensions (arg0);
13549 tree targ1 = strip_float_extensions (arg1);
13550 tree newtype = TREE_TYPE (targ0);
13551
13552 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13553 newtype = TREE_TYPE (targ1);
13554
13555 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13556 return fold_build2_loc (loc, code, type,
13557 fold_convert_loc (loc, newtype, targ0),
13558 fold_convert_loc (loc, newtype, targ1));
13559 }
13560
13561 return NULL_TREE;
13562
13563 case COMPOUND_EXPR:
13564 /* When pedantic, a compound expression can be neither an lvalue
13565 nor an integer constant expression. */
13566 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13567 return NULL_TREE;
13568 /* Don't let (0, 0) be null pointer constant. */
13569 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13570 : fold_convert_loc (loc, type, arg1);
13571 return pedantic_non_lvalue_loc (loc, tem);
13572
13573 case COMPLEX_EXPR:
13574 if ((TREE_CODE (arg0) == REAL_CST
13575 && TREE_CODE (arg1) == REAL_CST)
13576 || (TREE_CODE (arg0) == INTEGER_CST
13577 && TREE_CODE (arg1) == INTEGER_CST))
13578 return build_complex (type, arg0, arg1);
13579 if (TREE_CODE (arg0) == REALPART_EXPR
13580 && TREE_CODE (arg1) == IMAGPART_EXPR
13581 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13582 && operand_equal_p (TREE_OPERAND (arg0, 0),
13583 TREE_OPERAND (arg1, 0), 0))
13584 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13585 TREE_OPERAND (arg1, 0));
13586 return NULL_TREE;
13587
13588 case ASSERT_EXPR:
13589 /* An ASSERT_EXPR should never be passed to fold_binary. */
13590 gcc_unreachable ();
13591
13592 case VEC_PACK_TRUNC_EXPR:
13593 case VEC_PACK_FIX_TRUNC_EXPR:
13594 {
13595 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13596 tree *elts;
13597
13598 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13599 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13600 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13601 return NULL_TREE;
13602
13603 elts = XALLOCAVEC (tree, nelts);
13604 if (!vec_cst_ctor_to_array (arg0, elts)
13605 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13606 return NULL_TREE;
13607
13608 for (i = 0; i < nelts; i++)
13609 {
13610 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13611 ? NOP_EXPR : FIX_TRUNC_EXPR,
13612 TREE_TYPE (type), elts[i]);
13613 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13614 return NULL_TREE;
13615 }
13616
13617 return build_vector (type, elts);
13618 }
13619
13620 case VEC_WIDEN_MULT_LO_EXPR:
13621 case VEC_WIDEN_MULT_HI_EXPR:
13622 {
13623 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13624 tree *elts;
13625
13626 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13627 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13628 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13629 return NULL_TREE;
13630
13631 elts = XALLOCAVEC (tree, nelts * 4);
13632 if (!vec_cst_ctor_to_array (arg0, elts)
13633 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13634 return NULL_TREE;
13635
13636 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_WIDEN_MULT_LO_EXPR))
13637 elts += nelts;
13638
13639 for (i = 0; i < nelts; i++)
13640 {
13641 elts[i] = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[i]);
13642 elts[i + nelts * 2]
13643 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
13644 elts[i + nelts * 2]);
13645 if (elts[i] == NULL_TREE || elts[i + nelts * 2] == NULL_TREE)
13646 return NULL_TREE;
13647 elts[i] = const_binop (MULT_EXPR, elts[i], elts[i + nelts * 2]);
13648 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13649 return NULL_TREE;
13650 }
13651
13652 return build_vector (type, elts);
13653 }
13654
13655 default:
13656 return NULL_TREE;
13657 } /* switch (code) */
13658 }
13659
13660 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13661 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13662 of GOTO_EXPR. */
13663
13664 static tree
13665 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13666 {
13667 switch (TREE_CODE (*tp))
13668 {
13669 case LABEL_EXPR:
13670 return *tp;
13671
13672 case GOTO_EXPR:
13673 *walk_subtrees = 0;
13674
13675 /* ... fall through ... */
13676
13677 default:
13678 return NULL_TREE;
13679 }
13680 }
13681
13682 /* Return whether the sub-tree ST contains a label which is accessible from
13683 outside the sub-tree. */
13684
13685 static bool
13686 contains_label_p (tree st)
13687 {
13688 return
13689 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13690 }
13691
13692 /* Fold a ternary expression of code CODE and type TYPE with operands
13693 OP0, OP1, and OP2. Return the folded expression if folding is
13694 successful. Otherwise, return NULL_TREE. */
13695
13696 tree
13697 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13698 tree op0, tree op1, tree op2)
13699 {
13700 tree tem;
13701 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13702 enum tree_code_class kind = TREE_CODE_CLASS (code);
13703
13704 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13705 && TREE_CODE_LENGTH (code) == 3);
13706
13707 /* Strip any conversions that don't change the mode. This is safe
13708 for every expression, except for a comparison expression because
13709 its signedness is derived from its operands. So, in the latter
13710 case, only strip conversions that don't change the signedness.
13711
13712 Note that this is done as an internal manipulation within the
13713 constant folder, in order to find the simplest representation of
13714 the arguments so that their form can be studied. In any cases,
13715 the appropriate type conversions should be put back in the tree
13716 that will get out of the constant folder. */
13717 if (op0)
13718 {
13719 arg0 = op0;
13720 STRIP_NOPS (arg0);
13721 }
13722
13723 if (op1)
13724 {
13725 arg1 = op1;
13726 STRIP_NOPS (arg1);
13727 }
13728
13729 if (op2)
13730 {
13731 arg2 = op2;
13732 STRIP_NOPS (arg2);
13733 }
13734
13735 switch (code)
13736 {
13737 case COMPONENT_REF:
13738 if (TREE_CODE (arg0) == CONSTRUCTOR
13739 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13740 {
13741 unsigned HOST_WIDE_INT idx;
13742 tree field, value;
13743 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13744 if (field == arg1)
13745 return value;
13746 }
13747 return NULL_TREE;
13748
13749 case COND_EXPR:
13750 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13751 so all simple results must be passed through pedantic_non_lvalue. */
13752 if (TREE_CODE (arg0) == INTEGER_CST)
13753 {
13754 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13755 tem = integer_zerop (arg0) ? op2 : op1;
13756 /* Only optimize constant conditions when the selected branch
13757 has the same type as the COND_EXPR. This avoids optimizing
13758 away "c ? x : throw", where the throw has a void type.
13759 Avoid throwing away that operand which contains label. */
13760 if ((!TREE_SIDE_EFFECTS (unused_op)
13761 || !contains_label_p (unused_op))
13762 && (! VOID_TYPE_P (TREE_TYPE (tem))
13763 || VOID_TYPE_P (type)))
13764 return pedantic_non_lvalue_loc (loc, tem);
13765 return NULL_TREE;
13766 }
13767 if (operand_equal_p (arg1, op2, 0))
13768 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13769
13770 /* If we have A op B ? A : C, we may be able to convert this to a
13771 simpler expression, depending on the operation and the values
13772 of B and C. Signed zeros prevent all of these transformations,
13773 for reasons given above each one.
13774
13775 Also try swapping the arguments and inverting the conditional. */
13776 if (COMPARISON_CLASS_P (arg0)
13777 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13778 arg1, TREE_OPERAND (arg0, 1))
13779 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13780 {
13781 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13782 if (tem)
13783 return tem;
13784 }
13785
13786 if (COMPARISON_CLASS_P (arg0)
13787 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13788 op2,
13789 TREE_OPERAND (arg0, 1))
13790 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13791 {
13792 location_t loc0 = expr_location_or (arg0, loc);
13793 tem = fold_truth_not_expr (loc0, arg0);
13794 if (tem && COMPARISON_CLASS_P (tem))
13795 {
13796 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13797 if (tem)
13798 return tem;
13799 }
13800 }
13801
13802 /* If the second operand is simpler than the third, swap them
13803 since that produces better jump optimization results. */
13804 if (truth_value_p (TREE_CODE (arg0))
13805 && tree_swap_operands_p (op1, op2, false))
13806 {
13807 location_t loc0 = expr_location_or (arg0, loc);
13808 /* See if this can be inverted. If it can't, possibly because
13809 it was a floating-point inequality comparison, don't do
13810 anything. */
13811 tem = fold_truth_not_expr (loc0, arg0);
13812 if (tem)
13813 return fold_build3_loc (loc, code, type, tem, op2, op1);
13814 }
13815
13816 /* Convert A ? 1 : 0 to simply A. */
13817 if (integer_onep (op1)
13818 && integer_zerop (op2)
13819 /* If we try to convert OP0 to our type, the
13820 call to fold will try to move the conversion inside
13821 a COND, which will recurse. In that case, the COND_EXPR
13822 is probably the best choice, so leave it alone. */
13823 && type == TREE_TYPE (arg0))
13824 return pedantic_non_lvalue_loc (loc, arg0);
13825
13826 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13827 over COND_EXPR in cases such as floating point comparisons. */
13828 if (integer_zerop (op1)
13829 && integer_onep (op2)
13830 && truth_value_p (TREE_CODE (arg0)))
13831 return pedantic_non_lvalue_loc (loc,
13832 fold_convert_loc (loc, type,
13833 invert_truthvalue_loc (loc,
13834 arg0)));
13835
13836 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13837 if (TREE_CODE (arg0) == LT_EXPR
13838 && integer_zerop (TREE_OPERAND (arg0, 1))
13839 && integer_zerop (op2)
13840 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13841 {
13842 /* sign_bit_p only checks ARG1 bits within A's precision.
13843 If <sign bit of A> has wider type than A, bits outside
13844 of A's precision in <sign bit of A> need to be checked.
13845 If they are all 0, this optimization needs to be done
13846 in unsigned A's type, if they are all 1 in signed A's type,
13847 otherwise this can't be done. */
13848 if (TYPE_PRECISION (TREE_TYPE (tem))
13849 < TYPE_PRECISION (TREE_TYPE (arg1))
13850 && TYPE_PRECISION (TREE_TYPE (tem))
13851 < TYPE_PRECISION (type))
13852 {
13853 unsigned HOST_WIDE_INT mask_lo;
13854 HOST_WIDE_INT mask_hi;
13855 int inner_width, outer_width;
13856 tree tem_type;
13857
13858 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13859 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13860 if (outer_width > TYPE_PRECISION (type))
13861 outer_width = TYPE_PRECISION (type);
13862
13863 if (outer_width > HOST_BITS_PER_WIDE_INT)
13864 {
13865 mask_hi = ((unsigned HOST_WIDE_INT) -1
13866 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13867 mask_lo = -1;
13868 }
13869 else
13870 {
13871 mask_hi = 0;
13872 mask_lo = ((unsigned HOST_WIDE_INT) -1
13873 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13874 }
13875 if (inner_width > HOST_BITS_PER_WIDE_INT)
13876 {
13877 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13878 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13879 mask_lo = 0;
13880 }
13881 else
13882 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13883 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13884
13885 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13886 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13887 {
13888 tem_type = signed_type_for (TREE_TYPE (tem));
13889 tem = fold_convert_loc (loc, tem_type, tem);
13890 }
13891 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13892 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13893 {
13894 tem_type = unsigned_type_for (TREE_TYPE (tem));
13895 tem = fold_convert_loc (loc, tem_type, tem);
13896 }
13897 else
13898 tem = NULL;
13899 }
13900
13901 if (tem)
13902 return
13903 fold_convert_loc (loc, type,
13904 fold_build2_loc (loc, BIT_AND_EXPR,
13905 TREE_TYPE (tem), tem,
13906 fold_convert_loc (loc,
13907 TREE_TYPE (tem),
13908 arg1)));
13909 }
13910
13911 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13912 already handled above. */
13913 if (TREE_CODE (arg0) == BIT_AND_EXPR
13914 && integer_onep (TREE_OPERAND (arg0, 1))
13915 && integer_zerop (op2)
13916 && integer_pow2p (arg1))
13917 {
13918 tree tem = TREE_OPERAND (arg0, 0);
13919 STRIP_NOPS (tem);
13920 if (TREE_CODE (tem) == RSHIFT_EXPR
13921 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13922 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13923 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13924 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13925 TREE_OPERAND (tem, 0), arg1);
13926 }
13927
13928 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13929 is probably obsolete because the first operand should be a
13930 truth value (that's why we have the two cases above), but let's
13931 leave it in until we can confirm this for all front-ends. */
13932 if (integer_zerop (op2)
13933 && TREE_CODE (arg0) == NE_EXPR
13934 && integer_zerop (TREE_OPERAND (arg0, 1))
13935 && integer_pow2p (arg1)
13936 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13937 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13938 arg1, OEP_ONLY_CONST))
13939 return pedantic_non_lvalue_loc (loc,
13940 fold_convert_loc (loc, type,
13941 TREE_OPERAND (arg0, 0)));
13942
13943 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13944 if (integer_zerop (op2)
13945 && truth_value_p (TREE_CODE (arg0))
13946 && truth_value_p (TREE_CODE (arg1)))
13947 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13948 fold_convert_loc (loc, type, arg0),
13949 arg1);
13950
13951 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13952 if (integer_onep (op2)
13953 && truth_value_p (TREE_CODE (arg0))
13954 && truth_value_p (TREE_CODE (arg1)))
13955 {
13956 location_t loc0 = expr_location_or (arg0, loc);
13957 /* Only perform transformation if ARG0 is easily inverted. */
13958 tem = fold_truth_not_expr (loc0, arg0);
13959 if (tem)
13960 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13961 fold_convert_loc (loc, type, tem),
13962 arg1);
13963 }
13964
13965 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13966 if (integer_zerop (arg1)
13967 && truth_value_p (TREE_CODE (arg0))
13968 && truth_value_p (TREE_CODE (op2)))
13969 {
13970 location_t loc0 = expr_location_or (arg0, loc);
13971 /* Only perform transformation if ARG0 is easily inverted. */
13972 tem = fold_truth_not_expr (loc0, arg0);
13973 if (tem)
13974 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13975 fold_convert_loc (loc, type, tem),
13976 op2);
13977 }
13978
13979 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13980 if (integer_onep (arg1)
13981 && truth_value_p (TREE_CODE (arg0))
13982 && truth_value_p (TREE_CODE (op2)))
13983 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13984 fold_convert_loc (loc, type, arg0),
13985 op2);
13986
13987 return NULL_TREE;
13988
13989 case CALL_EXPR:
13990 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13991 of fold_ternary on them. */
13992 gcc_unreachable ();
13993
13994 case BIT_FIELD_REF:
13995 if ((TREE_CODE (arg0) == VECTOR_CST
13996 || TREE_CODE (arg0) == CONSTRUCTOR)
13997 && (type == TREE_TYPE (TREE_TYPE (arg0))
13998 || (TREE_CODE (type) == VECTOR_TYPE
13999 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14000 {
14001 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14002 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14003 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14004 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14005
14006 if (n != 0
14007 && (idx % width) == 0
14008 && (n % width) == 0
14009 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14010 {
14011 idx = idx / width;
14012 n = n / width;
14013 if (TREE_CODE (type) == VECTOR_TYPE)
14014 {
14015 if (TREE_CODE (arg0) == VECTOR_CST)
14016 {
14017 tree *vals = XALLOCAVEC (tree, n);
14018 unsigned i;
14019 for (i = 0; i < n; ++i)
14020 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14021 return build_vector (type, vals);
14022 }
14023 else
14024 {
14025 VEC(constructor_elt, gc) *vals;
14026 unsigned i;
14027 if (CONSTRUCTOR_NELTS (arg0) == 0)
14028 return build_constructor (type, NULL);
14029 vals = VEC_alloc (constructor_elt, gc, n);
14030 for (i = 0; i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14031 ++i)
14032 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14033 CONSTRUCTOR_ELT
14034 (arg0, idx + i)->value);
14035 return build_constructor (type, vals);
14036 }
14037 }
14038 else if (n == 1)
14039 {
14040 if (TREE_CODE (arg0) == VECTOR_CST)
14041 return VECTOR_CST_ELT (arg0, idx);
14042 else if (idx < CONSTRUCTOR_NELTS (arg0))
14043 return CONSTRUCTOR_ELT (arg0, idx)->value;
14044 return build_zero_cst (type);
14045 }
14046 }
14047 }
14048
14049 /* A bit-field-ref that referenced the full argument can be stripped. */
14050 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14051 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14052 && integer_zerop (op2))
14053 return fold_convert_loc (loc, type, arg0);
14054
14055 /* On constants we can use native encode/interpret to constant
14056 fold (nearly) all BIT_FIELD_REFs. */
14057 if (CONSTANT_CLASS_P (arg0)
14058 && can_native_interpret_type_p (type)
14059 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14060 /* This limitation should not be necessary, we just need to
14061 round this up to mode size. */
14062 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14063 /* Need bit-shifting of the buffer to relax the following. */
14064 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14065 {
14066 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14067 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14068 unsigned HOST_WIDE_INT clen;
14069 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14070 /* ??? We cannot tell native_encode_expr to start at
14071 some random byte only. So limit us to a reasonable amount
14072 of work. */
14073 if (clen <= 4096)
14074 {
14075 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14076 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14077 if (len > 0
14078 && len * BITS_PER_UNIT >= bitpos + bitsize)
14079 {
14080 tree v = native_interpret_expr (type,
14081 b + bitpos / BITS_PER_UNIT,
14082 bitsize / BITS_PER_UNIT);
14083 if (v)
14084 return v;
14085 }
14086 }
14087 }
14088
14089 return NULL_TREE;
14090
14091 case FMA_EXPR:
14092 /* For integers we can decompose the FMA if possible. */
14093 if (TREE_CODE (arg0) == INTEGER_CST
14094 && TREE_CODE (arg1) == INTEGER_CST)
14095 return fold_build2_loc (loc, PLUS_EXPR, type,
14096 const_binop (MULT_EXPR, arg0, arg1), arg2);
14097 if (integer_zerop (arg2))
14098 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14099
14100 return fold_fma (loc, type, arg0, arg1, arg2);
14101
14102 case VEC_PERM_EXPR:
14103 if (TREE_CODE (arg2) == VECTOR_CST)
14104 {
14105 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14106 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14107 tree t;
14108 bool need_mask_canon = false;
14109
14110 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14111 for (i = 0; i < nelts; i++)
14112 {
14113 tree val = VECTOR_CST_ELT (arg2, i);
14114 if (TREE_CODE (val) != INTEGER_CST)
14115 return NULL_TREE;
14116
14117 sel[i] = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
14118 if (TREE_INT_CST_HIGH (val)
14119 || ((unsigned HOST_WIDE_INT)
14120 TREE_INT_CST_LOW (val) != sel[i]))
14121 need_mask_canon = true;
14122 }
14123
14124 if ((TREE_CODE (arg0) == VECTOR_CST
14125 || TREE_CODE (arg0) == CONSTRUCTOR)
14126 && (TREE_CODE (arg1) == VECTOR_CST
14127 || TREE_CODE (arg1) == CONSTRUCTOR))
14128 {
14129 t = fold_vec_perm (type, arg0, arg1, sel);
14130 if (t != NULL_TREE)
14131 return t;
14132 }
14133
14134 if (need_mask_canon && arg2 == op2)
14135 {
14136 tree *tsel = XALLOCAVEC (tree, nelts);
14137 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14138 for (i = 0; i < nelts; i++)
14139 tsel[i] = build_int_cst (eltype, sel[nelts - i - 1]);
14140 t = build_vector (TREE_TYPE (arg2), tsel);
14141 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, t);
14142 }
14143 }
14144 return NULL_TREE;
14145
14146 default:
14147 return NULL_TREE;
14148 } /* switch (code) */
14149 }
14150
14151 /* Perform constant folding and related simplification of EXPR.
14152 The related simplifications include x*1 => x, x*0 => 0, etc.,
14153 and application of the associative law.
14154 NOP_EXPR conversions may be removed freely (as long as we
14155 are careful not to change the type of the overall expression).
14156 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14157 but we can constant-fold them if they have constant operands. */
14158
14159 #ifdef ENABLE_FOLD_CHECKING
14160 # define fold(x) fold_1 (x)
14161 static tree fold_1 (tree);
14162 static
14163 #endif
14164 tree
14165 fold (tree expr)
14166 {
14167 const tree t = expr;
14168 enum tree_code code = TREE_CODE (t);
14169 enum tree_code_class kind = TREE_CODE_CLASS (code);
14170 tree tem;
14171 location_t loc = EXPR_LOCATION (expr);
14172
14173 /* Return right away if a constant. */
14174 if (kind == tcc_constant)
14175 return t;
14176
14177 /* CALL_EXPR-like objects with variable numbers of operands are
14178 treated specially. */
14179 if (kind == tcc_vl_exp)
14180 {
14181 if (code == CALL_EXPR)
14182 {
14183 tem = fold_call_expr (loc, expr, false);
14184 return tem ? tem : expr;
14185 }
14186 return expr;
14187 }
14188
14189 if (IS_EXPR_CODE_CLASS (kind))
14190 {
14191 tree type = TREE_TYPE (t);
14192 tree op0, op1, op2;
14193
14194 switch (TREE_CODE_LENGTH (code))
14195 {
14196 case 1:
14197 op0 = TREE_OPERAND (t, 0);
14198 tem = fold_unary_loc (loc, code, type, op0);
14199 return tem ? tem : expr;
14200 case 2:
14201 op0 = TREE_OPERAND (t, 0);
14202 op1 = TREE_OPERAND (t, 1);
14203 tem = fold_binary_loc (loc, code, type, op0, op1);
14204 return tem ? tem : expr;
14205 case 3:
14206 op0 = TREE_OPERAND (t, 0);
14207 op1 = TREE_OPERAND (t, 1);
14208 op2 = TREE_OPERAND (t, 2);
14209 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14210 return tem ? tem : expr;
14211 default:
14212 break;
14213 }
14214 }
14215
14216 switch (code)
14217 {
14218 case ARRAY_REF:
14219 {
14220 tree op0 = TREE_OPERAND (t, 0);
14221 tree op1 = TREE_OPERAND (t, 1);
14222
14223 if (TREE_CODE (op1) == INTEGER_CST
14224 && TREE_CODE (op0) == CONSTRUCTOR
14225 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14226 {
14227 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14228 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14229 unsigned HOST_WIDE_INT begin = 0;
14230
14231 /* Find a matching index by means of a binary search. */
14232 while (begin != end)
14233 {
14234 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14235 tree index = VEC_index (constructor_elt, elts, middle)->index;
14236
14237 if (TREE_CODE (index) == INTEGER_CST
14238 && tree_int_cst_lt (index, op1))
14239 begin = middle + 1;
14240 else if (TREE_CODE (index) == INTEGER_CST
14241 && tree_int_cst_lt (op1, index))
14242 end = middle;
14243 else if (TREE_CODE (index) == RANGE_EXPR
14244 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14245 begin = middle + 1;
14246 else if (TREE_CODE (index) == RANGE_EXPR
14247 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14248 end = middle;
14249 else
14250 return VEC_index (constructor_elt, elts, middle)->value;
14251 }
14252 }
14253
14254 return t;
14255 }
14256
14257 case CONST_DECL:
14258 return fold (DECL_INITIAL (t));
14259
14260 default:
14261 return t;
14262 } /* switch (code) */
14263 }
14264
14265 #ifdef ENABLE_FOLD_CHECKING
14266 #undef fold
14267
14268 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14269 static void fold_check_failed (const_tree, const_tree);
14270 void print_fold_checksum (const_tree);
14271
14272 /* When --enable-checking=fold, compute a digest of expr before
14273 and after actual fold call to see if fold did not accidentally
14274 change original expr. */
14275
14276 tree
14277 fold (tree expr)
14278 {
14279 tree ret;
14280 struct md5_ctx ctx;
14281 unsigned char checksum_before[16], checksum_after[16];
14282 htab_t ht;
14283
14284 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14285 md5_init_ctx (&ctx);
14286 fold_checksum_tree (expr, &ctx, ht);
14287 md5_finish_ctx (&ctx, checksum_before);
14288 htab_empty (ht);
14289
14290 ret = fold_1 (expr);
14291
14292 md5_init_ctx (&ctx);
14293 fold_checksum_tree (expr, &ctx, ht);
14294 md5_finish_ctx (&ctx, checksum_after);
14295 htab_delete (ht);
14296
14297 if (memcmp (checksum_before, checksum_after, 16))
14298 fold_check_failed (expr, ret);
14299
14300 return ret;
14301 }
14302
14303 void
14304 print_fold_checksum (const_tree expr)
14305 {
14306 struct md5_ctx ctx;
14307 unsigned char checksum[16], cnt;
14308 htab_t ht;
14309
14310 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14311 md5_init_ctx (&ctx);
14312 fold_checksum_tree (expr, &ctx, ht);
14313 md5_finish_ctx (&ctx, checksum);
14314 htab_delete (ht);
14315 for (cnt = 0; cnt < 16; ++cnt)
14316 fprintf (stderr, "%02x", checksum[cnt]);
14317 putc ('\n', stderr);
14318 }
14319
14320 static void
14321 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14322 {
14323 internal_error ("fold check: original tree changed by fold");
14324 }
14325
14326 static void
14327 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14328 {
14329 void **slot;
14330 enum tree_code code;
14331 union tree_node buf;
14332 int i, len;
14333
14334 recursive_label:
14335 if (expr == NULL)
14336 return;
14337 slot = (void **) htab_find_slot (ht, expr, INSERT);
14338 if (*slot != NULL)
14339 return;
14340 *slot = CONST_CAST_TREE (expr);
14341 code = TREE_CODE (expr);
14342 if (TREE_CODE_CLASS (code) == tcc_declaration
14343 && DECL_ASSEMBLER_NAME_SET_P (expr))
14344 {
14345 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14346 memcpy ((char *) &buf, expr, tree_size (expr));
14347 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14348 expr = (tree) &buf;
14349 }
14350 else if (TREE_CODE_CLASS (code) == tcc_type
14351 && (TYPE_POINTER_TO (expr)
14352 || TYPE_REFERENCE_TO (expr)
14353 || TYPE_CACHED_VALUES_P (expr)
14354 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14355 || TYPE_NEXT_VARIANT (expr)))
14356 {
14357 /* Allow these fields to be modified. */
14358 tree tmp;
14359 memcpy ((char *) &buf, expr, tree_size (expr));
14360 expr = tmp = (tree) &buf;
14361 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14362 TYPE_POINTER_TO (tmp) = NULL;
14363 TYPE_REFERENCE_TO (tmp) = NULL;
14364 TYPE_NEXT_VARIANT (tmp) = NULL;
14365 if (TYPE_CACHED_VALUES_P (tmp))
14366 {
14367 TYPE_CACHED_VALUES_P (tmp) = 0;
14368 TYPE_CACHED_VALUES (tmp) = NULL;
14369 }
14370 }
14371 md5_process_bytes (expr, tree_size (expr), ctx);
14372 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14373 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14374 if (TREE_CODE_CLASS (code) != tcc_type
14375 && TREE_CODE_CLASS (code) != tcc_declaration
14376 && code != TREE_LIST
14377 && code != SSA_NAME
14378 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14379 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14380 switch (TREE_CODE_CLASS (code))
14381 {
14382 case tcc_constant:
14383 switch (code)
14384 {
14385 case STRING_CST:
14386 md5_process_bytes (TREE_STRING_POINTER (expr),
14387 TREE_STRING_LENGTH (expr), ctx);
14388 break;
14389 case COMPLEX_CST:
14390 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14391 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14392 break;
14393 case VECTOR_CST:
14394 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14395 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14396 break;
14397 default:
14398 break;
14399 }
14400 break;
14401 case tcc_exceptional:
14402 switch (code)
14403 {
14404 case TREE_LIST:
14405 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14406 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14407 expr = TREE_CHAIN (expr);
14408 goto recursive_label;
14409 break;
14410 case TREE_VEC:
14411 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14412 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14413 break;
14414 default:
14415 break;
14416 }
14417 break;
14418 case tcc_expression:
14419 case tcc_reference:
14420 case tcc_comparison:
14421 case tcc_unary:
14422 case tcc_binary:
14423 case tcc_statement:
14424 case tcc_vl_exp:
14425 len = TREE_OPERAND_LENGTH (expr);
14426 for (i = 0; i < len; ++i)
14427 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14428 break;
14429 case tcc_declaration:
14430 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14431 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14432 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14433 {
14434 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14435 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14436 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14437 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14438 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14439 }
14440 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14441 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14442
14443 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14444 {
14445 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14446 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14447 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14448 }
14449 break;
14450 case tcc_type:
14451 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14452 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14453 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14454 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14455 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14456 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14457 if (INTEGRAL_TYPE_P (expr)
14458 || SCALAR_FLOAT_TYPE_P (expr))
14459 {
14460 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14461 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14462 }
14463 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14464 if (TREE_CODE (expr) == RECORD_TYPE
14465 || TREE_CODE (expr) == UNION_TYPE
14466 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14467 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14468 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14469 break;
14470 default:
14471 break;
14472 }
14473 }
14474
14475 /* Helper function for outputting the checksum of a tree T. When
14476 debugging with gdb, you can "define mynext" to be "next" followed
14477 by "call debug_fold_checksum (op0)", then just trace down till the
14478 outputs differ. */
14479
14480 DEBUG_FUNCTION void
14481 debug_fold_checksum (const_tree t)
14482 {
14483 int i;
14484 unsigned char checksum[16];
14485 struct md5_ctx ctx;
14486 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14487
14488 md5_init_ctx (&ctx);
14489 fold_checksum_tree (t, &ctx, ht);
14490 md5_finish_ctx (&ctx, checksum);
14491 htab_empty (ht);
14492
14493 for (i = 0; i < 16; i++)
14494 fprintf (stderr, "%d ", checksum[i]);
14495
14496 fprintf (stderr, "\n");
14497 }
14498
14499 #endif
14500
14501 /* Fold a unary tree expression with code CODE of type TYPE with an
14502 operand OP0. LOC is the location of the resulting expression.
14503 Return a folded expression if successful. Otherwise, return a tree
14504 expression with code CODE of type TYPE with an operand OP0. */
14505
14506 tree
14507 fold_build1_stat_loc (location_t loc,
14508 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14509 {
14510 tree tem;
14511 #ifdef ENABLE_FOLD_CHECKING
14512 unsigned char checksum_before[16], checksum_after[16];
14513 struct md5_ctx ctx;
14514 htab_t ht;
14515
14516 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14517 md5_init_ctx (&ctx);
14518 fold_checksum_tree (op0, &ctx, ht);
14519 md5_finish_ctx (&ctx, checksum_before);
14520 htab_empty (ht);
14521 #endif
14522
14523 tem = fold_unary_loc (loc, code, type, op0);
14524 if (!tem)
14525 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14526
14527 #ifdef ENABLE_FOLD_CHECKING
14528 md5_init_ctx (&ctx);
14529 fold_checksum_tree (op0, &ctx, ht);
14530 md5_finish_ctx (&ctx, checksum_after);
14531 htab_delete (ht);
14532
14533 if (memcmp (checksum_before, checksum_after, 16))
14534 fold_check_failed (op0, tem);
14535 #endif
14536 return tem;
14537 }
14538
14539 /* Fold a binary tree expression with code CODE of type TYPE with
14540 operands OP0 and OP1. LOC is the location of the resulting
14541 expression. Return a folded expression if successful. Otherwise,
14542 return a tree expression with code CODE of type TYPE with operands
14543 OP0 and OP1. */
14544
14545 tree
14546 fold_build2_stat_loc (location_t loc,
14547 enum tree_code code, tree type, tree op0, tree op1
14548 MEM_STAT_DECL)
14549 {
14550 tree tem;
14551 #ifdef ENABLE_FOLD_CHECKING
14552 unsigned char checksum_before_op0[16],
14553 checksum_before_op1[16],
14554 checksum_after_op0[16],
14555 checksum_after_op1[16];
14556 struct md5_ctx ctx;
14557 htab_t ht;
14558
14559 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14560 md5_init_ctx (&ctx);
14561 fold_checksum_tree (op0, &ctx, ht);
14562 md5_finish_ctx (&ctx, checksum_before_op0);
14563 htab_empty (ht);
14564
14565 md5_init_ctx (&ctx);
14566 fold_checksum_tree (op1, &ctx, ht);
14567 md5_finish_ctx (&ctx, checksum_before_op1);
14568 htab_empty (ht);
14569 #endif
14570
14571 tem = fold_binary_loc (loc, code, type, op0, op1);
14572 if (!tem)
14573 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14574
14575 #ifdef ENABLE_FOLD_CHECKING
14576 md5_init_ctx (&ctx);
14577 fold_checksum_tree (op0, &ctx, ht);
14578 md5_finish_ctx (&ctx, checksum_after_op0);
14579 htab_empty (ht);
14580
14581 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14582 fold_check_failed (op0, tem);
14583
14584 md5_init_ctx (&ctx);
14585 fold_checksum_tree (op1, &ctx, ht);
14586 md5_finish_ctx (&ctx, checksum_after_op1);
14587 htab_delete (ht);
14588
14589 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14590 fold_check_failed (op1, tem);
14591 #endif
14592 return tem;
14593 }
14594
14595 /* Fold a ternary tree expression with code CODE of type TYPE with
14596 operands OP0, OP1, and OP2. Return a folded expression if
14597 successful. Otherwise, return a tree expression with code CODE of
14598 type TYPE with operands OP0, OP1, and OP2. */
14599
14600 tree
14601 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14602 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14603 {
14604 tree tem;
14605 #ifdef ENABLE_FOLD_CHECKING
14606 unsigned char checksum_before_op0[16],
14607 checksum_before_op1[16],
14608 checksum_before_op2[16],
14609 checksum_after_op0[16],
14610 checksum_after_op1[16],
14611 checksum_after_op2[16];
14612 struct md5_ctx ctx;
14613 htab_t ht;
14614
14615 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14616 md5_init_ctx (&ctx);
14617 fold_checksum_tree (op0, &ctx, ht);
14618 md5_finish_ctx (&ctx, checksum_before_op0);
14619 htab_empty (ht);
14620
14621 md5_init_ctx (&ctx);
14622 fold_checksum_tree (op1, &ctx, ht);
14623 md5_finish_ctx (&ctx, checksum_before_op1);
14624 htab_empty (ht);
14625
14626 md5_init_ctx (&ctx);
14627 fold_checksum_tree (op2, &ctx, ht);
14628 md5_finish_ctx (&ctx, checksum_before_op2);
14629 htab_empty (ht);
14630 #endif
14631
14632 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14633 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14634 if (!tem)
14635 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14636
14637 #ifdef ENABLE_FOLD_CHECKING
14638 md5_init_ctx (&ctx);
14639 fold_checksum_tree (op0, &ctx, ht);
14640 md5_finish_ctx (&ctx, checksum_after_op0);
14641 htab_empty (ht);
14642
14643 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14644 fold_check_failed (op0, tem);
14645
14646 md5_init_ctx (&ctx);
14647 fold_checksum_tree (op1, &ctx, ht);
14648 md5_finish_ctx (&ctx, checksum_after_op1);
14649 htab_empty (ht);
14650
14651 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14652 fold_check_failed (op1, tem);
14653
14654 md5_init_ctx (&ctx);
14655 fold_checksum_tree (op2, &ctx, ht);
14656 md5_finish_ctx (&ctx, checksum_after_op2);
14657 htab_delete (ht);
14658
14659 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14660 fold_check_failed (op2, tem);
14661 #endif
14662 return tem;
14663 }
14664
14665 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14666 arguments in ARGARRAY, and a null static chain.
14667 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14668 of type TYPE from the given operands as constructed by build_call_array. */
14669
14670 tree
14671 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14672 int nargs, tree *argarray)
14673 {
14674 tree tem;
14675 #ifdef ENABLE_FOLD_CHECKING
14676 unsigned char checksum_before_fn[16],
14677 checksum_before_arglist[16],
14678 checksum_after_fn[16],
14679 checksum_after_arglist[16];
14680 struct md5_ctx ctx;
14681 htab_t ht;
14682 int i;
14683
14684 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14685 md5_init_ctx (&ctx);
14686 fold_checksum_tree (fn, &ctx, ht);
14687 md5_finish_ctx (&ctx, checksum_before_fn);
14688 htab_empty (ht);
14689
14690 md5_init_ctx (&ctx);
14691 for (i = 0; i < nargs; i++)
14692 fold_checksum_tree (argarray[i], &ctx, ht);
14693 md5_finish_ctx (&ctx, checksum_before_arglist);
14694 htab_empty (ht);
14695 #endif
14696
14697 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14698
14699 #ifdef ENABLE_FOLD_CHECKING
14700 md5_init_ctx (&ctx);
14701 fold_checksum_tree (fn, &ctx, ht);
14702 md5_finish_ctx (&ctx, checksum_after_fn);
14703 htab_empty (ht);
14704
14705 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14706 fold_check_failed (fn, tem);
14707
14708 md5_init_ctx (&ctx);
14709 for (i = 0; i < nargs; i++)
14710 fold_checksum_tree (argarray[i], &ctx, ht);
14711 md5_finish_ctx (&ctx, checksum_after_arglist);
14712 htab_delete (ht);
14713
14714 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14715 fold_check_failed (NULL_TREE, tem);
14716 #endif
14717 return tem;
14718 }
14719
14720 /* Perform constant folding and related simplification of initializer
14721 expression EXPR. These behave identically to "fold_buildN" but ignore
14722 potential run-time traps and exceptions that fold must preserve. */
14723
14724 #define START_FOLD_INIT \
14725 int saved_signaling_nans = flag_signaling_nans;\
14726 int saved_trapping_math = flag_trapping_math;\
14727 int saved_rounding_math = flag_rounding_math;\
14728 int saved_trapv = flag_trapv;\
14729 int saved_folding_initializer = folding_initializer;\
14730 flag_signaling_nans = 0;\
14731 flag_trapping_math = 0;\
14732 flag_rounding_math = 0;\
14733 flag_trapv = 0;\
14734 folding_initializer = 1;
14735
14736 #define END_FOLD_INIT \
14737 flag_signaling_nans = saved_signaling_nans;\
14738 flag_trapping_math = saved_trapping_math;\
14739 flag_rounding_math = saved_rounding_math;\
14740 flag_trapv = saved_trapv;\
14741 folding_initializer = saved_folding_initializer;
14742
14743 tree
14744 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14745 tree type, tree op)
14746 {
14747 tree result;
14748 START_FOLD_INIT;
14749
14750 result = fold_build1_loc (loc, code, type, op);
14751
14752 END_FOLD_INIT;
14753 return result;
14754 }
14755
14756 tree
14757 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14758 tree type, tree op0, tree op1)
14759 {
14760 tree result;
14761 START_FOLD_INIT;
14762
14763 result = fold_build2_loc (loc, code, type, op0, op1);
14764
14765 END_FOLD_INIT;
14766 return result;
14767 }
14768
14769 tree
14770 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14771 tree type, tree op0, tree op1, tree op2)
14772 {
14773 tree result;
14774 START_FOLD_INIT;
14775
14776 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14777
14778 END_FOLD_INIT;
14779 return result;
14780 }
14781
14782 tree
14783 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14784 int nargs, tree *argarray)
14785 {
14786 tree result;
14787 START_FOLD_INIT;
14788
14789 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14790
14791 END_FOLD_INIT;
14792 return result;
14793 }
14794
14795 #undef START_FOLD_INIT
14796 #undef END_FOLD_INIT
14797
14798 /* Determine if first argument is a multiple of second argument. Return 0 if
14799 it is not, or we cannot easily determined it to be.
14800
14801 An example of the sort of thing we care about (at this point; this routine
14802 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14803 fold cases do now) is discovering that
14804
14805 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14806
14807 is a multiple of
14808
14809 SAVE_EXPR (J * 8)
14810
14811 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14812
14813 This code also handles discovering that
14814
14815 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14816
14817 is a multiple of 8 so we don't have to worry about dealing with a
14818 possible remainder.
14819
14820 Note that we *look* inside a SAVE_EXPR only to determine how it was
14821 calculated; it is not safe for fold to do much of anything else with the
14822 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14823 at run time. For example, the latter example above *cannot* be implemented
14824 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14825 evaluation time of the original SAVE_EXPR is not necessarily the same at
14826 the time the new expression is evaluated. The only optimization of this
14827 sort that would be valid is changing
14828
14829 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14830
14831 divided by 8 to
14832
14833 SAVE_EXPR (I) * SAVE_EXPR (J)
14834
14835 (where the same SAVE_EXPR (J) is used in the original and the
14836 transformed version). */
14837
14838 int
14839 multiple_of_p (tree type, const_tree top, const_tree bottom)
14840 {
14841 if (operand_equal_p (top, bottom, 0))
14842 return 1;
14843
14844 if (TREE_CODE (type) != INTEGER_TYPE)
14845 return 0;
14846
14847 switch (TREE_CODE (top))
14848 {
14849 case BIT_AND_EXPR:
14850 /* Bitwise and provides a power of two multiple. If the mask is
14851 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14852 if (!integer_pow2p (bottom))
14853 return 0;
14854 /* FALLTHRU */
14855
14856 case MULT_EXPR:
14857 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14858 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14859
14860 case PLUS_EXPR:
14861 case MINUS_EXPR:
14862 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14863 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14864
14865 case LSHIFT_EXPR:
14866 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14867 {
14868 tree op1, t1;
14869
14870 op1 = TREE_OPERAND (top, 1);
14871 /* const_binop may not detect overflow correctly,
14872 so check for it explicitly here. */
14873 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14874 > TREE_INT_CST_LOW (op1)
14875 && TREE_INT_CST_HIGH (op1) == 0
14876 && 0 != (t1 = fold_convert (type,
14877 const_binop (LSHIFT_EXPR,
14878 size_one_node,
14879 op1)))
14880 && !TREE_OVERFLOW (t1))
14881 return multiple_of_p (type, t1, bottom);
14882 }
14883 return 0;
14884
14885 case NOP_EXPR:
14886 /* Can't handle conversions from non-integral or wider integral type. */
14887 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14888 || (TYPE_PRECISION (type)
14889 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14890 return 0;
14891
14892 /* .. fall through ... */
14893
14894 case SAVE_EXPR:
14895 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14896
14897 case COND_EXPR:
14898 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14899 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14900
14901 case INTEGER_CST:
14902 if (TREE_CODE (bottom) != INTEGER_CST
14903 || integer_zerop (bottom)
14904 || (TYPE_UNSIGNED (type)
14905 && (tree_int_cst_sgn (top) < 0
14906 || tree_int_cst_sgn (bottom) < 0)))
14907 return 0;
14908 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14909 top, bottom));
14910
14911 default:
14912 return 0;
14913 }
14914 }
14915
14916 /* Return true if CODE or TYPE is known to be non-negative. */
14917
14918 static bool
14919 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14920 {
14921 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14922 && truth_value_p (code))
14923 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14924 have a signed:1 type (where the value is -1 and 0). */
14925 return true;
14926 return false;
14927 }
14928
14929 /* Return true if (CODE OP0) is known to be non-negative. If the return
14930 value is based on the assumption that signed overflow is undefined,
14931 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14932 *STRICT_OVERFLOW_P. */
14933
14934 bool
14935 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14936 bool *strict_overflow_p)
14937 {
14938 if (TYPE_UNSIGNED (type))
14939 return true;
14940
14941 switch (code)
14942 {
14943 case ABS_EXPR:
14944 /* We can't return 1 if flag_wrapv is set because
14945 ABS_EXPR<INT_MIN> = INT_MIN. */
14946 if (!INTEGRAL_TYPE_P (type))
14947 return true;
14948 if (TYPE_OVERFLOW_UNDEFINED (type))
14949 {
14950 *strict_overflow_p = true;
14951 return true;
14952 }
14953 break;
14954
14955 case NON_LVALUE_EXPR:
14956 case FLOAT_EXPR:
14957 case FIX_TRUNC_EXPR:
14958 return tree_expr_nonnegative_warnv_p (op0,
14959 strict_overflow_p);
14960
14961 case NOP_EXPR:
14962 {
14963 tree inner_type = TREE_TYPE (op0);
14964 tree outer_type = type;
14965
14966 if (TREE_CODE (outer_type) == REAL_TYPE)
14967 {
14968 if (TREE_CODE (inner_type) == REAL_TYPE)
14969 return tree_expr_nonnegative_warnv_p (op0,
14970 strict_overflow_p);
14971 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14972 {
14973 if (TYPE_UNSIGNED (inner_type))
14974 return true;
14975 return tree_expr_nonnegative_warnv_p (op0,
14976 strict_overflow_p);
14977 }
14978 }
14979 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14980 {
14981 if (TREE_CODE (inner_type) == REAL_TYPE)
14982 return tree_expr_nonnegative_warnv_p (op0,
14983 strict_overflow_p);
14984 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14985 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14986 && TYPE_UNSIGNED (inner_type);
14987 }
14988 }
14989 break;
14990
14991 default:
14992 return tree_simple_nonnegative_warnv_p (code, type);
14993 }
14994
14995 /* We don't know sign of `t', so be conservative and return false. */
14996 return false;
14997 }
14998
14999 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15000 value is based on the assumption that signed overflow is undefined,
15001 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15002 *STRICT_OVERFLOW_P. */
15003
15004 bool
15005 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15006 tree op1, bool *strict_overflow_p)
15007 {
15008 if (TYPE_UNSIGNED (type))
15009 return true;
15010
15011 switch (code)
15012 {
15013 case POINTER_PLUS_EXPR:
15014 case PLUS_EXPR:
15015 if (FLOAT_TYPE_P (type))
15016 return (tree_expr_nonnegative_warnv_p (op0,
15017 strict_overflow_p)
15018 && tree_expr_nonnegative_warnv_p (op1,
15019 strict_overflow_p));
15020
15021 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15022 both unsigned and at least 2 bits shorter than the result. */
15023 if (TREE_CODE (type) == INTEGER_TYPE
15024 && TREE_CODE (op0) == NOP_EXPR
15025 && TREE_CODE (op1) == NOP_EXPR)
15026 {
15027 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15028 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15029 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15030 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15031 {
15032 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15033 TYPE_PRECISION (inner2)) + 1;
15034 return prec < TYPE_PRECISION (type);
15035 }
15036 }
15037 break;
15038
15039 case MULT_EXPR:
15040 if (FLOAT_TYPE_P (type))
15041 {
15042 /* x * x for floating point x is always non-negative. */
15043 if (operand_equal_p (op0, op1, 0))
15044 return true;
15045 return (tree_expr_nonnegative_warnv_p (op0,
15046 strict_overflow_p)
15047 && tree_expr_nonnegative_warnv_p (op1,
15048 strict_overflow_p));
15049 }
15050
15051 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15052 both unsigned and their total bits is shorter than the result. */
15053 if (TREE_CODE (type) == INTEGER_TYPE
15054 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15055 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15056 {
15057 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15058 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15059 : TREE_TYPE (op0);
15060 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15061 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15062 : TREE_TYPE (op1);
15063
15064 bool unsigned0 = TYPE_UNSIGNED (inner0);
15065 bool unsigned1 = TYPE_UNSIGNED (inner1);
15066
15067 if (TREE_CODE (op0) == INTEGER_CST)
15068 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15069
15070 if (TREE_CODE (op1) == INTEGER_CST)
15071 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15072
15073 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15074 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15075 {
15076 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15077 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15078 : TYPE_PRECISION (inner0);
15079
15080 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15081 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15082 : TYPE_PRECISION (inner1);
15083
15084 return precision0 + precision1 < TYPE_PRECISION (type);
15085 }
15086 }
15087 return false;
15088
15089 case BIT_AND_EXPR:
15090 case MAX_EXPR:
15091 return (tree_expr_nonnegative_warnv_p (op0,
15092 strict_overflow_p)
15093 || tree_expr_nonnegative_warnv_p (op1,
15094 strict_overflow_p));
15095
15096 case BIT_IOR_EXPR:
15097 case BIT_XOR_EXPR:
15098 case MIN_EXPR:
15099 case RDIV_EXPR:
15100 case TRUNC_DIV_EXPR:
15101 case CEIL_DIV_EXPR:
15102 case FLOOR_DIV_EXPR:
15103 case ROUND_DIV_EXPR:
15104 return (tree_expr_nonnegative_warnv_p (op0,
15105 strict_overflow_p)
15106 && tree_expr_nonnegative_warnv_p (op1,
15107 strict_overflow_p));
15108
15109 case TRUNC_MOD_EXPR:
15110 case CEIL_MOD_EXPR:
15111 case FLOOR_MOD_EXPR:
15112 case ROUND_MOD_EXPR:
15113 return tree_expr_nonnegative_warnv_p (op0,
15114 strict_overflow_p);
15115 default:
15116 return tree_simple_nonnegative_warnv_p (code, type);
15117 }
15118
15119 /* We don't know sign of `t', so be conservative and return false. */
15120 return false;
15121 }
15122
15123 /* Return true if T is known to be non-negative. If the return
15124 value is based on the assumption that signed overflow is undefined,
15125 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15126 *STRICT_OVERFLOW_P. */
15127
15128 bool
15129 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15130 {
15131 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15132 return true;
15133
15134 switch (TREE_CODE (t))
15135 {
15136 case INTEGER_CST:
15137 return tree_int_cst_sgn (t) >= 0;
15138
15139 case REAL_CST:
15140 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15141
15142 case FIXED_CST:
15143 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15144
15145 case COND_EXPR:
15146 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15147 strict_overflow_p)
15148 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15149 strict_overflow_p));
15150 default:
15151 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15152 TREE_TYPE (t));
15153 }
15154 /* We don't know sign of `t', so be conservative and return false. */
15155 return false;
15156 }
15157
15158 /* Return true if T is known to be non-negative. If the return
15159 value is based on the assumption that signed overflow is undefined,
15160 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15161 *STRICT_OVERFLOW_P. */
15162
15163 bool
15164 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15165 tree arg0, tree arg1, bool *strict_overflow_p)
15166 {
15167 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15168 switch (DECL_FUNCTION_CODE (fndecl))
15169 {
15170 CASE_FLT_FN (BUILT_IN_ACOS):
15171 CASE_FLT_FN (BUILT_IN_ACOSH):
15172 CASE_FLT_FN (BUILT_IN_CABS):
15173 CASE_FLT_FN (BUILT_IN_COSH):
15174 CASE_FLT_FN (BUILT_IN_ERFC):
15175 CASE_FLT_FN (BUILT_IN_EXP):
15176 CASE_FLT_FN (BUILT_IN_EXP10):
15177 CASE_FLT_FN (BUILT_IN_EXP2):
15178 CASE_FLT_FN (BUILT_IN_FABS):
15179 CASE_FLT_FN (BUILT_IN_FDIM):
15180 CASE_FLT_FN (BUILT_IN_HYPOT):
15181 CASE_FLT_FN (BUILT_IN_POW10):
15182 CASE_INT_FN (BUILT_IN_FFS):
15183 CASE_INT_FN (BUILT_IN_PARITY):
15184 CASE_INT_FN (BUILT_IN_POPCOUNT):
15185 case BUILT_IN_BSWAP32:
15186 case BUILT_IN_BSWAP64:
15187 /* Always true. */
15188 return true;
15189
15190 CASE_FLT_FN (BUILT_IN_SQRT):
15191 /* sqrt(-0.0) is -0.0. */
15192 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15193 return true;
15194 return tree_expr_nonnegative_warnv_p (arg0,
15195 strict_overflow_p);
15196
15197 CASE_FLT_FN (BUILT_IN_ASINH):
15198 CASE_FLT_FN (BUILT_IN_ATAN):
15199 CASE_FLT_FN (BUILT_IN_ATANH):
15200 CASE_FLT_FN (BUILT_IN_CBRT):
15201 CASE_FLT_FN (BUILT_IN_CEIL):
15202 CASE_FLT_FN (BUILT_IN_ERF):
15203 CASE_FLT_FN (BUILT_IN_EXPM1):
15204 CASE_FLT_FN (BUILT_IN_FLOOR):
15205 CASE_FLT_FN (BUILT_IN_FMOD):
15206 CASE_FLT_FN (BUILT_IN_FREXP):
15207 CASE_FLT_FN (BUILT_IN_ICEIL):
15208 CASE_FLT_FN (BUILT_IN_IFLOOR):
15209 CASE_FLT_FN (BUILT_IN_IRINT):
15210 CASE_FLT_FN (BUILT_IN_IROUND):
15211 CASE_FLT_FN (BUILT_IN_LCEIL):
15212 CASE_FLT_FN (BUILT_IN_LDEXP):
15213 CASE_FLT_FN (BUILT_IN_LFLOOR):
15214 CASE_FLT_FN (BUILT_IN_LLCEIL):
15215 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15216 CASE_FLT_FN (BUILT_IN_LLRINT):
15217 CASE_FLT_FN (BUILT_IN_LLROUND):
15218 CASE_FLT_FN (BUILT_IN_LRINT):
15219 CASE_FLT_FN (BUILT_IN_LROUND):
15220 CASE_FLT_FN (BUILT_IN_MODF):
15221 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15222 CASE_FLT_FN (BUILT_IN_RINT):
15223 CASE_FLT_FN (BUILT_IN_ROUND):
15224 CASE_FLT_FN (BUILT_IN_SCALB):
15225 CASE_FLT_FN (BUILT_IN_SCALBLN):
15226 CASE_FLT_FN (BUILT_IN_SCALBN):
15227 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15228 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15229 CASE_FLT_FN (BUILT_IN_SINH):
15230 CASE_FLT_FN (BUILT_IN_TANH):
15231 CASE_FLT_FN (BUILT_IN_TRUNC):
15232 /* True if the 1st argument is nonnegative. */
15233 return tree_expr_nonnegative_warnv_p (arg0,
15234 strict_overflow_p);
15235
15236 CASE_FLT_FN (BUILT_IN_FMAX):
15237 /* True if the 1st OR 2nd arguments are nonnegative. */
15238 return (tree_expr_nonnegative_warnv_p (arg0,
15239 strict_overflow_p)
15240 || (tree_expr_nonnegative_warnv_p (arg1,
15241 strict_overflow_p)));
15242
15243 CASE_FLT_FN (BUILT_IN_FMIN):
15244 /* True if the 1st AND 2nd arguments are nonnegative. */
15245 return (tree_expr_nonnegative_warnv_p (arg0,
15246 strict_overflow_p)
15247 && (tree_expr_nonnegative_warnv_p (arg1,
15248 strict_overflow_p)));
15249
15250 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15251 /* True if the 2nd argument is nonnegative. */
15252 return tree_expr_nonnegative_warnv_p (arg1,
15253 strict_overflow_p);
15254
15255 CASE_FLT_FN (BUILT_IN_POWI):
15256 /* True if the 1st argument is nonnegative or the second
15257 argument is an even integer. */
15258 if (TREE_CODE (arg1) == INTEGER_CST
15259 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15260 return true;
15261 return tree_expr_nonnegative_warnv_p (arg0,
15262 strict_overflow_p);
15263
15264 CASE_FLT_FN (BUILT_IN_POW):
15265 /* True if the 1st argument is nonnegative or the second
15266 argument is an even integer valued real. */
15267 if (TREE_CODE (arg1) == REAL_CST)
15268 {
15269 REAL_VALUE_TYPE c;
15270 HOST_WIDE_INT n;
15271
15272 c = TREE_REAL_CST (arg1);
15273 n = real_to_integer (&c);
15274 if ((n & 1) == 0)
15275 {
15276 REAL_VALUE_TYPE cint;
15277 real_from_integer (&cint, VOIDmode, n,
15278 n < 0 ? -1 : 0, 0);
15279 if (real_identical (&c, &cint))
15280 return true;
15281 }
15282 }
15283 return tree_expr_nonnegative_warnv_p (arg0,
15284 strict_overflow_p);
15285
15286 default:
15287 break;
15288 }
15289 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15290 type);
15291 }
15292
15293 /* Return true if T is known to be non-negative. If the return
15294 value is based on the assumption that signed overflow is undefined,
15295 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15296 *STRICT_OVERFLOW_P. */
15297
15298 bool
15299 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15300 {
15301 enum tree_code code = TREE_CODE (t);
15302 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15303 return true;
15304
15305 switch (code)
15306 {
15307 case TARGET_EXPR:
15308 {
15309 tree temp = TARGET_EXPR_SLOT (t);
15310 t = TARGET_EXPR_INITIAL (t);
15311
15312 /* If the initializer is non-void, then it's a normal expression
15313 that will be assigned to the slot. */
15314 if (!VOID_TYPE_P (t))
15315 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15316
15317 /* Otherwise, the initializer sets the slot in some way. One common
15318 way is an assignment statement at the end of the initializer. */
15319 while (1)
15320 {
15321 if (TREE_CODE (t) == BIND_EXPR)
15322 t = expr_last (BIND_EXPR_BODY (t));
15323 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15324 || TREE_CODE (t) == TRY_CATCH_EXPR)
15325 t = expr_last (TREE_OPERAND (t, 0));
15326 else if (TREE_CODE (t) == STATEMENT_LIST)
15327 t = expr_last (t);
15328 else
15329 break;
15330 }
15331 if (TREE_CODE (t) == MODIFY_EXPR
15332 && TREE_OPERAND (t, 0) == temp)
15333 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15334 strict_overflow_p);
15335
15336 return false;
15337 }
15338
15339 case CALL_EXPR:
15340 {
15341 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15342 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15343
15344 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15345 get_callee_fndecl (t),
15346 arg0,
15347 arg1,
15348 strict_overflow_p);
15349 }
15350 case COMPOUND_EXPR:
15351 case MODIFY_EXPR:
15352 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15353 strict_overflow_p);
15354 case BIND_EXPR:
15355 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15356 strict_overflow_p);
15357 case SAVE_EXPR:
15358 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15359 strict_overflow_p);
15360
15361 default:
15362 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15363 TREE_TYPE (t));
15364 }
15365
15366 /* We don't know sign of `t', so be conservative and return false. */
15367 return false;
15368 }
15369
15370 /* Return true if T is known to be non-negative. If the return
15371 value is based on the assumption that signed overflow is undefined,
15372 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15373 *STRICT_OVERFLOW_P. */
15374
15375 bool
15376 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15377 {
15378 enum tree_code code;
15379 if (t == error_mark_node)
15380 return false;
15381
15382 code = TREE_CODE (t);
15383 switch (TREE_CODE_CLASS (code))
15384 {
15385 case tcc_binary:
15386 case tcc_comparison:
15387 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15388 TREE_TYPE (t),
15389 TREE_OPERAND (t, 0),
15390 TREE_OPERAND (t, 1),
15391 strict_overflow_p);
15392
15393 case tcc_unary:
15394 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15395 TREE_TYPE (t),
15396 TREE_OPERAND (t, 0),
15397 strict_overflow_p);
15398
15399 case tcc_constant:
15400 case tcc_declaration:
15401 case tcc_reference:
15402 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15403
15404 default:
15405 break;
15406 }
15407
15408 switch (code)
15409 {
15410 case TRUTH_AND_EXPR:
15411 case TRUTH_OR_EXPR:
15412 case TRUTH_XOR_EXPR:
15413 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15414 TREE_TYPE (t),
15415 TREE_OPERAND (t, 0),
15416 TREE_OPERAND (t, 1),
15417 strict_overflow_p);
15418 case TRUTH_NOT_EXPR:
15419 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15420 TREE_TYPE (t),
15421 TREE_OPERAND (t, 0),
15422 strict_overflow_p);
15423
15424 case COND_EXPR:
15425 case CONSTRUCTOR:
15426 case OBJ_TYPE_REF:
15427 case ASSERT_EXPR:
15428 case ADDR_EXPR:
15429 case WITH_SIZE_EXPR:
15430 case SSA_NAME:
15431 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15432
15433 default:
15434 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15435 }
15436 }
15437
15438 /* Return true if `t' is known to be non-negative. Handle warnings
15439 about undefined signed overflow. */
15440
15441 bool
15442 tree_expr_nonnegative_p (tree t)
15443 {
15444 bool ret, strict_overflow_p;
15445
15446 strict_overflow_p = false;
15447 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15448 if (strict_overflow_p)
15449 fold_overflow_warning (("assuming signed overflow does not occur when "
15450 "determining that expression is always "
15451 "non-negative"),
15452 WARN_STRICT_OVERFLOW_MISC);
15453 return ret;
15454 }
15455
15456
15457 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15458 For floating point we further ensure that T is not denormal.
15459 Similar logic is present in nonzero_address in rtlanal.h.
15460
15461 If the return value is based on the assumption that signed overflow
15462 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15463 change *STRICT_OVERFLOW_P. */
15464
15465 bool
15466 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15467 bool *strict_overflow_p)
15468 {
15469 switch (code)
15470 {
15471 case ABS_EXPR:
15472 return tree_expr_nonzero_warnv_p (op0,
15473 strict_overflow_p);
15474
15475 case NOP_EXPR:
15476 {
15477 tree inner_type = TREE_TYPE (op0);
15478 tree outer_type = type;
15479
15480 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15481 && tree_expr_nonzero_warnv_p (op0,
15482 strict_overflow_p));
15483 }
15484 break;
15485
15486 case NON_LVALUE_EXPR:
15487 return tree_expr_nonzero_warnv_p (op0,
15488 strict_overflow_p);
15489
15490 default:
15491 break;
15492 }
15493
15494 return false;
15495 }
15496
15497 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15498 For floating point we further ensure that T is not denormal.
15499 Similar logic is present in nonzero_address in rtlanal.h.
15500
15501 If the return value is based on the assumption that signed overflow
15502 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15503 change *STRICT_OVERFLOW_P. */
15504
15505 bool
15506 tree_binary_nonzero_warnv_p (enum tree_code code,
15507 tree type,
15508 tree op0,
15509 tree op1, bool *strict_overflow_p)
15510 {
15511 bool sub_strict_overflow_p;
15512 switch (code)
15513 {
15514 case POINTER_PLUS_EXPR:
15515 case PLUS_EXPR:
15516 if (TYPE_OVERFLOW_UNDEFINED (type))
15517 {
15518 /* With the presence of negative values it is hard
15519 to say something. */
15520 sub_strict_overflow_p = false;
15521 if (!tree_expr_nonnegative_warnv_p (op0,
15522 &sub_strict_overflow_p)
15523 || !tree_expr_nonnegative_warnv_p (op1,
15524 &sub_strict_overflow_p))
15525 return false;
15526 /* One of operands must be positive and the other non-negative. */
15527 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15528 overflows, on a twos-complement machine the sum of two
15529 nonnegative numbers can never be zero. */
15530 return (tree_expr_nonzero_warnv_p (op0,
15531 strict_overflow_p)
15532 || tree_expr_nonzero_warnv_p (op1,
15533 strict_overflow_p));
15534 }
15535 break;
15536
15537 case MULT_EXPR:
15538 if (TYPE_OVERFLOW_UNDEFINED (type))
15539 {
15540 if (tree_expr_nonzero_warnv_p (op0,
15541 strict_overflow_p)
15542 && tree_expr_nonzero_warnv_p (op1,
15543 strict_overflow_p))
15544 {
15545 *strict_overflow_p = true;
15546 return true;
15547 }
15548 }
15549 break;
15550
15551 case MIN_EXPR:
15552 sub_strict_overflow_p = false;
15553 if (tree_expr_nonzero_warnv_p (op0,
15554 &sub_strict_overflow_p)
15555 && tree_expr_nonzero_warnv_p (op1,
15556 &sub_strict_overflow_p))
15557 {
15558 if (sub_strict_overflow_p)
15559 *strict_overflow_p = true;
15560 }
15561 break;
15562
15563 case MAX_EXPR:
15564 sub_strict_overflow_p = false;
15565 if (tree_expr_nonzero_warnv_p (op0,
15566 &sub_strict_overflow_p))
15567 {
15568 if (sub_strict_overflow_p)
15569 *strict_overflow_p = true;
15570
15571 /* When both operands are nonzero, then MAX must be too. */
15572 if (tree_expr_nonzero_warnv_p (op1,
15573 strict_overflow_p))
15574 return true;
15575
15576 /* MAX where operand 0 is positive is positive. */
15577 return tree_expr_nonnegative_warnv_p (op0,
15578 strict_overflow_p);
15579 }
15580 /* MAX where operand 1 is positive is positive. */
15581 else if (tree_expr_nonzero_warnv_p (op1,
15582 &sub_strict_overflow_p)
15583 && tree_expr_nonnegative_warnv_p (op1,
15584 &sub_strict_overflow_p))
15585 {
15586 if (sub_strict_overflow_p)
15587 *strict_overflow_p = true;
15588 return true;
15589 }
15590 break;
15591
15592 case BIT_IOR_EXPR:
15593 return (tree_expr_nonzero_warnv_p (op1,
15594 strict_overflow_p)
15595 || tree_expr_nonzero_warnv_p (op0,
15596 strict_overflow_p));
15597
15598 default:
15599 break;
15600 }
15601
15602 return false;
15603 }
15604
15605 /* Return true when T is an address and is known to be nonzero.
15606 For floating point we further ensure that T is not denormal.
15607 Similar logic is present in nonzero_address in rtlanal.h.
15608
15609 If the return value is based on the assumption that signed overflow
15610 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15611 change *STRICT_OVERFLOW_P. */
15612
15613 bool
15614 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15615 {
15616 bool sub_strict_overflow_p;
15617 switch (TREE_CODE (t))
15618 {
15619 case INTEGER_CST:
15620 return !integer_zerop (t);
15621
15622 case ADDR_EXPR:
15623 {
15624 tree base = TREE_OPERAND (t, 0);
15625 if (!DECL_P (base))
15626 base = get_base_address (base);
15627
15628 if (!base)
15629 return false;
15630
15631 /* Weak declarations may link to NULL. Other things may also be NULL
15632 so protect with -fdelete-null-pointer-checks; but not variables
15633 allocated on the stack. */
15634 if (DECL_P (base)
15635 && (flag_delete_null_pointer_checks
15636 || (DECL_CONTEXT (base)
15637 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15638 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15639 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15640
15641 /* Constants are never weak. */
15642 if (CONSTANT_CLASS_P (base))
15643 return true;
15644
15645 return false;
15646 }
15647
15648 case COND_EXPR:
15649 sub_strict_overflow_p = false;
15650 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15651 &sub_strict_overflow_p)
15652 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15653 &sub_strict_overflow_p))
15654 {
15655 if (sub_strict_overflow_p)
15656 *strict_overflow_p = true;
15657 return true;
15658 }
15659 break;
15660
15661 default:
15662 break;
15663 }
15664 return false;
15665 }
15666
15667 /* Return true when T is an address and is known to be nonzero.
15668 For floating point we further ensure that T is not denormal.
15669 Similar logic is present in nonzero_address in rtlanal.h.
15670
15671 If the return value is based on the assumption that signed overflow
15672 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15673 change *STRICT_OVERFLOW_P. */
15674
15675 bool
15676 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15677 {
15678 tree type = TREE_TYPE (t);
15679 enum tree_code code;
15680
15681 /* Doing something useful for floating point would need more work. */
15682 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15683 return false;
15684
15685 code = TREE_CODE (t);
15686 switch (TREE_CODE_CLASS (code))
15687 {
15688 case tcc_unary:
15689 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15690 strict_overflow_p);
15691 case tcc_binary:
15692 case tcc_comparison:
15693 return tree_binary_nonzero_warnv_p (code, type,
15694 TREE_OPERAND (t, 0),
15695 TREE_OPERAND (t, 1),
15696 strict_overflow_p);
15697 case tcc_constant:
15698 case tcc_declaration:
15699 case tcc_reference:
15700 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15701
15702 default:
15703 break;
15704 }
15705
15706 switch (code)
15707 {
15708 case TRUTH_NOT_EXPR:
15709 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15710 strict_overflow_p);
15711
15712 case TRUTH_AND_EXPR:
15713 case TRUTH_OR_EXPR:
15714 case TRUTH_XOR_EXPR:
15715 return tree_binary_nonzero_warnv_p (code, type,
15716 TREE_OPERAND (t, 0),
15717 TREE_OPERAND (t, 1),
15718 strict_overflow_p);
15719
15720 case COND_EXPR:
15721 case CONSTRUCTOR:
15722 case OBJ_TYPE_REF:
15723 case ASSERT_EXPR:
15724 case ADDR_EXPR:
15725 case WITH_SIZE_EXPR:
15726 case SSA_NAME:
15727 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15728
15729 case COMPOUND_EXPR:
15730 case MODIFY_EXPR:
15731 case BIND_EXPR:
15732 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15733 strict_overflow_p);
15734
15735 case SAVE_EXPR:
15736 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15737 strict_overflow_p);
15738
15739 case CALL_EXPR:
15740 return alloca_call_p (t);
15741
15742 default:
15743 break;
15744 }
15745 return false;
15746 }
15747
15748 /* Return true when T is an address and is known to be nonzero.
15749 Handle warnings about undefined signed overflow. */
15750
15751 bool
15752 tree_expr_nonzero_p (tree t)
15753 {
15754 bool ret, strict_overflow_p;
15755
15756 strict_overflow_p = false;
15757 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15758 if (strict_overflow_p)
15759 fold_overflow_warning (("assuming signed overflow does not occur when "
15760 "determining that expression is always "
15761 "non-zero"),
15762 WARN_STRICT_OVERFLOW_MISC);
15763 return ret;
15764 }
15765
15766 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15767 attempt to fold the expression to a constant without modifying TYPE,
15768 OP0 or OP1.
15769
15770 If the expression could be simplified to a constant, then return
15771 the constant. If the expression would not be simplified to a
15772 constant, then return NULL_TREE. */
15773
15774 tree
15775 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15776 {
15777 tree tem = fold_binary (code, type, op0, op1);
15778 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15779 }
15780
15781 /* Given the components of a unary expression CODE, TYPE and OP0,
15782 attempt to fold the expression to a constant without modifying
15783 TYPE or OP0.
15784
15785 If the expression could be simplified to a constant, then return
15786 the constant. If the expression would not be simplified to a
15787 constant, then return NULL_TREE. */
15788
15789 tree
15790 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15791 {
15792 tree tem = fold_unary (code, type, op0);
15793 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15794 }
15795
15796 /* If EXP represents referencing an element in a constant string
15797 (either via pointer arithmetic or array indexing), return the
15798 tree representing the value accessed, otherwise return NULL. */
15799
15800 tree
15801 fold_read_from_constant_string (tree exp)
15802 {
15803 if ((TREE_CODE (exp) == INDIRECT_REF
15804 || TREE_CODE (exp) == ARRAY_REF)
15805 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15806 {
15807 tree exp1 = TREE_OPERAND (exp, 0);
15808 tree index;
15809 tree string;
15810 location_t loc = EXPR_LOCATION (exp);
15811
15812 if (TREE_CODE (exp) == INDIRECT_REF)
15813 string = string_constant (exp1, &index);
15814 else
15815 {
15816 tree low_bound = array_ref_low_bound (exp);
15817 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15818
15819 /* Optimize the special-case of a zero lower bound.
15820
15821 We convert the low_bound to sizetype to avoid some problems
15822 with constant folding. (E.g. suppose the lower bound is 1,
15823 and its mode is QI. Without the conversion,l (ARRAY
15824 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15825 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15826 if (! integer_zerop (low_bound))
15827 index = size_diffop_loc (loc, index,
15828 fold_convert_loc (loc, sizetype, low_bound));
15829
15830 string = exp1;
15831 }
15832
15833 if (string
15834 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15835 && TREE_CODE (string) == STRING_CST
15836 && TREE_CODE (index) == INTEGER_CST
15837 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15838 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15839 == MODE_INT)
15840 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15841 return build_int_cst_type (TREE_TYPE (exp),
15842 (TREE_STRING_POINTER (string)
15843 [TREE_INT_CST_LOW (index)]));
15844 }
15845 return NULL;
15846 }
15847
15848 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15849 an integer constant, real, or fixed-point constant.
15850
15851 TYPE is the type of the result. */
15852
15853 static tree
15854 fold_negate_const (tree arg0, tree type)
15855 {
15856 tree t = NULL_TREE;
15857
15858 switch (TREE_CODE (arg0))
15859 {
15860 case INTEGER_CST:
15861 {
15862 double_int val = tree_to_double_int (arg0);
15863 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15864
15865 t = force_fit_type_double (type, val, 1,
15866 (overflow | TREE_OVERFLOW (arg0))
15867 && !TYPE_UNSIGNED (type));
15868 break;
15869 }
15870
15871 case REAL_CST:
15872 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15873 break;
15874
15875 case FIXED_CST:
15876 {
15877 FIXED_VALUE_TYPE f;
15878 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15879 &(TREE_FIXED_CST (arg0)), NULL,
15880 TYPE_SATURATING (type));
15881 t = build_fixed (type, f);
15882 /* Propagate overflow flags. */
15883 if (overflow_p | TREE_OVERFLOW (arg0))
15884 TREE_OVERFLOW (t) = 1;
15885 break;
15886 }
15887
15888 default:
15889 gcc_unreachable ();
15890 }
15891
15892 return t;
15893 }
15894
15895 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15896 an integer constant or real constant.
15897
15898 TYPE is the type of the result. */
15899
15900 tree
15901 fold_abs_const (tree arg0, tree type)
15902 {
15903 tree t = NULL_TREE;
15904
15905 switch (TREE_CODE (arg0))
15906 {
15907 case INTEGER_CST:
15908 {
15909 double_int val = tree_to_double_int (arg0);
15910
15911 /* If the value is unsigned or non-negative, then the absolute value
15912 is the same as the ordinary value. */
15913 if (TYPE_UNSIGNED (type)
15914 || !double_int_negative_p (val))
15915 t = arg0;
15916
15917 /* If the value is negative, then the absolute value is
15918 its negation. */
15919 else
15920 {
15921 int overflow;
15922
15923 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15924 t = force_fit_type_double (type, val, -1,
15925 overflow | TREE_OVERFLOW (arg0));
15926 }
15927 }
15928 break;
15929
15930 case REAL_CST:
15931 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15932 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15933 else
15934 t = arg0;
15935 break;
15936
15937 default:
15938 gcc_unreachable ();
15939 }
15940
15941 return t;
15942 }
15943
15944 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15945 constant. TYPE is the type of the result. */
15946
15947 static tree
15948 fold_not_const (const_tree arg0, tree type)
15949 {
15950 double_int val;
15951
15952 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15953
15954 val = double_int_not (tree_to_double_int (arg0));
15955 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15956 }
15957
15958 /* Given CODE, a relational operator, the target type, TYPE and two
15959 constant operands OP0 and OP1, return the result of the
15960 relational operation. If the result is not a compile time
15961 constant, then return NULL_TREE. */
15962
15963 static tree
15964 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15965 {
15966 int result, invert;
15967
15968 /* From here on, the only cases we handle are when the result is
15969 known to be a constant. */
15970
15971 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15972 {
15973 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15974 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15975
15976 /* Handle the cases where either operand is a NaN. */
15977 if (real_isnan (c0) || real_isnan (c1))
15978 {
15979 switch (code)
15980 {
15981 case EQ_EXPR:
15982 case ORDERED_EXPR:
15983 result = 0;
15984 break;
15985
15986 case NE_EXPR:
15987 case UNORDERED_EXPR:
15988 case UNLT_EXPR:
15989 case UNLE_EXPR:
15990 case UNGT_EXPR:
15991 case UNGE_EXPR:
15992 case UNEQ_EXPR:
15993 result = 1;
15994 break;
15995
15996 case LT_EXPR:
15997 case LE_EXPR:
15998 case GT_EXPR:
15999 case GE_EXPR:
16000 case LTGT_EXPR:
16001 if (flag_trapping_math)
16002 return NULL_TREE;
16003 result = 0;
16004 break;
16005
16006 default:
16007 gcc_unreachable ();
16008 }
16009
16010 return constant_boolean_node (result, type);
16011 }
16012
16013 return constant_boolean_node (real_compare (code, c0, c1), type);
16014 }
16015
16016 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16017 {
16018 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16019 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16020 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16021 }
16022
16023 /* Handle equality/inequality of complex constants. */
16024 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16025 {
16026 tree rcond = fold_relational_const (code, type,
16027 TREE_REALPART (op0),
16028 TREE_REALPART (op1));
16029 tree icond = fold_relational_const (code, type,
16030 TREE_IMAGPART (op0),
16031 TREE_IMAGPART (op1));
16032 if (code == EQ_EXPR)
16033 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16034 else if (code == NE_EXPR)
16035 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16036 else
16037 return NULL_TREE;
16038 }
16039
16040 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16041
16042 To compute GT, swap the arguments and do LT.
16043 To compute GE, do LT and invert the result.
16044 To compute LE, swap the arguments, do LT and invert the result.
16045 To compute NE, do EQ and invert the result.
16046
16047 Therefore, the code below must handle only EQ and LT. */
16048
16049 if (code == LE_EXPR || code == GT_EXPR)
16050 {
16051 tree tem = op0;
16052 op0 = op1;
16053 op1 = tem;
16054 code = swap_tree_comparison (code);
16055 }
16056
16057 /* Note that it is safe to invert for real values here because we
16058 have already handled the one case that it matters. */
16059
16060 invert = 0;
16061 if (code == NE_EXPR || code == GE_EXPR)
16062 {
16063 invert = 1;
16064 code = invert_tree_comparison (code, false);
16065 }
16066
16067 /* Compute a result for LT or EQ if args permit;
16068 Otherwise return T. */
16069 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16070 {
16071 if (code == EQ_EXPR)
16072 result = tree_int_cst_equal (op0, op1);
16073 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16074 result = INT_CST_LT_UNSIGNED (op0, op1);
16075 else
16076 result = INT_CST_LT (op0, op1);
16077 }
16078 else
16079 return NULL_TREE;
16080
16081 if (invert)
16082 result ^= 1;
16083 return constant_boolean_node (result, type);
16084 }
16085
16086 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16087 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16088 itself. */
16089
16090 tree
16091 fold_build_cleanup_point_expr (tree type, tree expr)
16092 {
16093 /* If the expression does not have side effects then we don't have to wrap
16094 it with a cleanup point expression. */
16095 if (!TREE_SIDE_EFFECTS (expr))
16096 return expr;
16097
16098 /* If the expression is a return, check to see if the expression inside the
16099 return has no side effects or the right hand side of the modify expression
16100 inside the return. If either don't have side effects set we don't need to
16101 wrap the expression in a cleanup point expression. Note we don't check the
16102 left hand side of the modify because it should always be a return decl. */
16103 if (TREE_CODE (expr) == RETURN_EXPR)
16104 {
16105 tree op = TREE_OPERAND (expr, 0);
16106 if (!op || !TREE_SIDE_EFFECTS (op))
16107 return expr;
16108 op = TREE_OPERAND (op, 1);
16109 if (!TREE_SIDE_EFFECTS (op))
16110 return expr;
16111 }
16112
16113 return build1 (CLEANUP_POINT_EXPR, type, expr);
16114 }
16115
16116 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16117 of an indirection through OP0, or NULL_TREE if no simplification is
16118 possible. */
16119
16120 tree
16121 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16122 {
16123 tree sub = op0;
16124 tree subtype;
16125
16126 STRIP_NOPS (sub);
16127 subtype = TREE_TYPE (sub);
16128 if (!POINTER_TYPE_P (subtype))
16129 return NULL_TREE;
16130
16131 if (TREE_CODE (sub) == ADDR_EXPR)
16132 {
16133 tree op = TREE_OPERAND (sub, 0);
16134 tree optype = TREE_TYPE (op);
16135 /* *&CONST_DECL -> to the value of the const decl. */
16136 if (TREE_CODE (op) == CONST_DECL)
16137 return DECL_INITIAL (op);
16138 /* *&p => p; make sure to handle *&"str"[cst] here. */
16139 if (type == optype)
16140 {
16141 tree fop = fold_read_from_constant_string (op);
16142 if (fop)
16143 return fop;
16144 else
16145 return op;
16146 }
16147 /* *(foo *)&fooarray => fooarray[0] */
16148 else if (TREE_CODE (optype) == ARRAY_TYPE
16149 && type == TREE_TYPE (optype)
16150 && (!in_gimple_form
16151 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16152 {
16153 tree type_domain = TYPE_DOMAIN (optype);
16154 tree min_val = size_zero_node;
16155 if (type_domain && TYPE_MIN_VALUE (type_domain))
16156 min_val = TYPE_MIN_VALUE (type_domain);
16157 if (in_gimple_form
16158 && TREE_CODE (min_val) != INTEGER_CST)
16159 return NULL_TREE;
16160 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16161 NULL_TREE, NULL_TREE);
16162 }
16163 /* *(foo *)&complexfoo => __real__ complexfoo */
16164 else if (TREE_CODE (optype) == COMPLEX_TYPE
16165 && type == TREE_TYPE (optype))
16166 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16167 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16168 else if (TREE_CODE (optype) == VECTOR_TYPE
16169 && type == TREE_TYPE (optype))
16170 {
16171 tree part_width = TYPE_SIZE (type);
16172 tree index = bitsize_int (0);
16173 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16174 }
16175 }
16176
16177 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16178 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16179 {
16180 tree op00 = TREE_OPERAND (sub, 0);
16181 tree op01 = TREE_OPERAND (sub, 1);
16182
16183 STRIP_NOPS (op00);
16184 if (TREE_CODE (op00) == ADDR_EXPR)
16185 {
16186 tree op00type;
16187 op00 = TREE_OPERAND (op00, 0);
16188 op00type = TREE_TYPE (op00);
16189
16190 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16191 if (TREE_CODE (op00type) == VECTOR_TYPE
16192 && type == TREE_TYPE (op00type))
16193 {
16194 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16195 tree part_width = TYPE_SIZE (type);
16196 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16197 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16198 tree index = bitsize_int (indexi);
16199
16200 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16201 return fold_build3_loc (loc,
16202 BIT_FIELD_REF, type, op00,
16203 part_width, index);
16204
16205 }
16206 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16207 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16208 && type == TREE_TYPE (op00type))
16209 {
16210 tree size = TYPE_SIZE_UNIT (type);
16211 if (tree_int_cst_equal (size, op01))
16212 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16213 }
16214 /* ((foo *)&fooarray)[1] => fooarray[1] */
16215 else if (TREE_CODE (op00type) == ARRAY_TYPE
16216 && type == TREE_TYPE (op00type))
16217 {
16218 tree type_domain = TYPE_DOMAIN (op00type);
16219 tree min_val = size_zero_node;
16220 if (type_domain && TYPE_MIN_VALUE (type_domain))
16221 min_val = TYPE_MIN_VALUE (type_domain);
16222 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16223 TYPE_SIZE_UNIT (type));
16224 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16225 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16226 NULL_TREE, NULL_TREE);
16227 }
16228 }
16229 }
16230
16231 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16232 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16233 && type == TREE_TYPE (TREE_TYPE (subtype))
16234 && (!in_gimple_form
16235 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16236 {
16237 tree type_domain;
16238 tree min_val = size_zero_node;
16239 sub = build_fold_indirect_ref_loc (loc, sub);
16240 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16241 if (type_domain && TYPE_MIN_VALUE (type_domain))
16242 min_val = TYPE_MIN_VALUE (type_domain);
16243 if (in_gimple_form
16244 && TREE_CODE (min_val) != INTEGER_CST)
16245 return NULL_TREE;
16246 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16247 NULL_TREE);
16248 }
16249
16250 return NULL_TREE;
16251 }
16252
16253 /* Builds an expression for an indirection through T, simplifying some
16254 cases. */
16255
16256 tree
16257 build_fold_indirect_ref_loc (location_t loc, tree t)
16258 {
16259 tree type = TREE_TYPE (TREE_TYPE (t));
16260 tree sub = fold_indirect_ref_1 (loc, type, t);
16261
16262 if (sub)
16263 return sub;
16264
16265 return build1_loc (loc, INDIRECT_REF, type, t);
16266 }
16267
16268 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16269
16270 tree
16271 fold_indirect_ref_loc (location_t loc, tree t)
16272 {
16273 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16274
16275 if (sub)
16276 return sub;
16277 else
16278 return t;
16279 }
16280
16281 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16282 whose result is ignored. The type of the returned tree need not be
16283 the same as the original expression. */
16284
16285 tree
16286 fold_ignored_result (tree t)
16287 {
16288 if (!TREE_SIDE_EFFECTS (t))
16289 return integer_zero_node;
16290
16291 for (;;)
16292 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16293 {
16294 case tcc_unary:
16295 t = TREE_OPERAND (t, 0);
16296 break;
16297
16298 case tcc_binary:
16299 case tcc_comparison:
16300 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16301 t = TREE_OPERAND (t, 0);
16302 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16303 t = TREE_OPERAND (t, 1);
16304 else
16305 return t;
16306 break;
16307
16308 case tcc_expression:
16309 switch (TREE_CODE (t))
16310 {
16311 case COMPOUND_EXPR:
16312 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16313 return t;
16314 t = TREE_OPERAND (t, 0);
16315 break;
16316
16317 case COND_EXPR:
16318 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16319 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16320 return t;
16321 t = TREE_OPERAND (t, 0);
16322 break;
16323
16324 default:
16325 return t;
16326 }
16327 break;
16328
16329 default:
16330 return t;
16331 }
16332 }
16333
16334 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16335 This can only be applied to objects of a sizetype. */
16336
16337 tree
16338 round_up_loc (location_t loc, tree value, int divisor)
16339 {
16340 tree div = NULL_TREE;
16341
16342 gcc_assert (divisor > 0);
16343 if (divisor == 1)
16344 return value;
16345
16346 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16347 have to do anything. Only do this when we are not given a const,
16348 because in that case, this check is more expensive than just
16349 doing it. */
16350 if (TREE_CODE (value) != INTEGER_CST)
16351 {
16352 div = build_int_cst (TREE_TYPE (value), divisor);
16353
16354 if (multiple_of_p (TREE_TYPE (value), value, div))
16355 return value;
16356 }
16357
16358 /* If divisor is a power of two, simplify this to bit manipulation. */
16359 if (divisor == (divisor & -divisor))
16360 {
16361 if (TREE_CODE (value) == INTEGER_CST)
16362 {
16363 double_int val = tree_to_double_int (value);
16364 bool overflow_p;
16365
16366 if ((val.low & (divisor - 1)) == 0)
16367 return value;
16368
16369 overflow_p = TREE_OVERFLOW (value);
16370 val.low &= ~(divisor - 1);
16371 val.low += divisor;
16372 if (val.low == 0)
16373 {
16374 val.high++;
16375 if (val.high == 0)
16376 overflow_p = true;
16377 }
16378
16379 return force_fit_type_double (TREE_TYPE (value), val,
16380 -1, overflow_p);
16381 }
16382 else
16383 {
16384 tree t;
16385
16386 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16387 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16388 t = build_int_cst (TREE_TYPE (value), -divisor);
16389 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16390 }
16391 }
16392 else
16393 {
16394 if (!div)
16395 div = build_int_cst (TREE_TYPE (value), divisor);
16396 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16397 value = size_binop_loc (loc, MULT_EXPR, value, div);
16398 }
16399
16400 return value;
16401 }
16402
16403 /* Likewise, but round down. */
16404
16405 tree
16406 round_down_loc (location_t loc, tree value, int divisor)
16407 {
16408 tree div = NULL_TREE;
16409
16410 gcc_assert (divisor > 0);
16411 if (divisor == 1)
16412 return value;
16413
16414 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16415 have to do anything. Only do this when we are not given a const,
16416 because in that case, this check is more expensive than just
16417 doing it. */
16418 if (TREE_CODE (value) != INTEGER_CST)
16419 {
16420 div = build_int_cst (TREE_TYPE (value), divisor);
16421
16422 if (multiple_of_p (TREE_TYPE (value), value, div))
16423 return value;
16424 }
16425
16426 /* If divisor is a power of two, simplify this to bit manipulation. */
16427 if (divisor == (divisor & -divisor))
16428 {
16429 tree t;
16430
16431 t = build_int_cst (TREE_TYPE (value), -divisor);
16432 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16433 }
16434 else
16435 {
16436 if (!div)
16437 div = build_int_cst (TREE_TYPE (value), divisor);
16438 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16439 value = size_binop_loc (loc, MULT_EXPR, value, div);
16440 }
16441
16442 return value;
16443 }
16444
16445 /* Returns the pointer to the base of the object addressed by EXP and
16446 extracts the information about the offset of the access, storing it
16447 to PBITPOS and POFFSET. */
16448
16449 static tree
16450 split_address_to_core_and_offset (tree exp,
16451 HOST_WIDE_INT *pbitpos, tree *poffset)
16452 {
16453 tree core;
16454 enum machine_mode mode;
16455 int unsignedp, volatilep;
16456 HOST_WIDE_INT bitsize;
16457 location_t loc = EXPR_LOCATION (exp);
16458
16459 if (TREE_CODE (exp) == ADDR_EXPR)
16460 {
16461 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16462 poffset, &mode, &unsignedp, &volatilep,
16463 false);
16464 core = build_fold_addr_expr_loc (loc, core);
16465 }
16466 else
16467 {
16468 core = exp;
16469 *pbitpos = 0;
16470 *poffset = NULL_TREE;
16471 }
16472
16473 return core;
16474 }
16475
16476 /* Returns true if addresses of E1 and E2 differ by a constant, false
16477 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16478
16479 bool
16480 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16481 {
16482 tree core1, core2;
16483 HOST_WIDE_INT bitpos1, bitpos2;
16484 tree toffset1, toffset2, tdiff, type;
16485
16486 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16487 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16488
16489 if (bitpos1 % BITS_PER_UNIT != 0
16490 || bitpos2 % BITS_PER_UNIT != 0
16491 || !operand_equal_p (core1, core2, 0))
16492 return false;
16493
16494 if (toffset1 && toffset2)
16495 {
16496 type = TREE_TYPE (toffset1);
16497 if (type != TREE_TYPE (toffset2))
16498 toffset2 = fold_convert (type, toffset2);
16499
16500 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16501 if (!cst_and_fits_in_hwi (tdiff))
16502 return false;
16503
16504 *diff = int_cst_value (tdiff);
16505 }
16506 else if (toffset1 || toffset2)
16507 {
16508 /* If only one of the offsets is non-constant, the difference cannot
16509 be a constant. */
16510 return false;
16511 }
16512 else
16513 *diff = 0;
16514
16515 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16516 return true;
16517 }
16518
16519 /* Simplify the floating point expression EXP when the sign of the
16520 result is not significant. Return NULL_TREE if no simplification
16521 is possible. */
16522
16523 tree
16524 fold_strip_sign_ops (tree exp)
16525 {
16526 tree arg0, arg1;
16527 location_t loc = EXPR_LOCATION (exp);
16528
16529 switch (TREE_CODE (exp))
16530 {
16531 case ABS_EXPR:
16532 case NEGATE_EXPR:
16533 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16534 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16535
16536 case MULT_EXPR:
16537 case RDIV_EXPR:
16538 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16539 return NULL_TREE;
16540 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16541 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16542 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16543 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16544 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16545 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16546 break;
16547
16548 case COMPOUND_EXPR:
16549 arg0 = TREE_OPERAND (exp, 0);
16550 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16551 if (arg1)
16552 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16553 break;
16554
16555 case COND_EXPR:
16556 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16557 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16558 if (arg0 || arg1)
16559 return fold_build3_loc (loc,
16560 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16561 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16562 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16563 break;
16564
16565 case CALL_EXPR:
16566 {
16567 const enum built_in_function fcode = builtin_mathfn_code (exp);
16568 switch (fcode)
16569 {
16570 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16571 /* Strip copysign function call, return the 1st argument. */
16572 arg0 = CALL_EXPR_ARG (exp, 0);
16573 arg1 = CALL_EXPR_ARG (exp, 1);
16574 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16575
16576 default:
16577 /* Strip sign ops from the argument of "odd" math functions. */
16578 if (negate_mathfn_p (fcode))
16579 {
16580 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16581 if (arg0)
16582 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16583 }
16584 break;
16585 }
16586 }
16587 break;
16588
16589 default:
16590 break;
16591 }
16592 return NULL_TREE;
16593 }