]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fold-const.c
* tree.def (FIXED_POINT_TYPE): New type.
[thirdparty/gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
44 sets TREE_OVERFLOW.
45
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "fixed-value.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "intl.h"
63 #include "ggc.h"
64 #include "hashtab.h"
65 #include "langhooks.h"
66 #include "md5.h"
67
68 /* Nonzero if we are folding constants inside an initializer; zero
69 otherwise. */
70 int folding_initializer = 0;
71
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
76 COMPCODE_FALSE = 0,
77 COMPCODE_LT = 1,
78 COMPCODE_EQ = 2,
79 COMPCODE_LE = 3,
80 COMPCODE_GT = 4,
81 COMPCODE_LTGT = 5,
82 COMPCODE_GE = 6,
83 COMPCODE_ORD = 7,
84 COMPCODE_UNORD = 8,
85 COMPCODE_UNLT = 9,
86 COMPCODE_UNEQ = 10,
87 COMPCODE_UNLE = 11,
88 COMPCODE_UNGT = 12,
89 COMPCODE_NE = 13,
90 COMPCODE_UNGE = 14,
91 COMPCODE_TRUE = 15
92 };
93
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static int all_ones_mask_p (tree, int);
118 static tree sign_bit_p (tree, tree);
119 static int simple_operand_p (tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 tree);
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146
147
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
151 addition.
152
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 sign. */
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 \f
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
162
163 #define LOWPART(x) \
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
172
173 static void
174 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 {
176 words[0] = LOWPART (low);
177 words[1] = HIGHPART (low);
178 words[2] = LOWPART (hi);
179 words[3] = HIGHPART (hi);
180 }
181
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
185
186 static void
187 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
188 HOST_WIDE_INT *hi)
189 {
190 *low = words[0] + words[1] * BASE;
191 *hi = words[2] + words[3] * BASE;
192 }
193 \f
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
198
199 int
200 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
201 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
202 {
203 unsigned HOST_WIDE_INT low0 = l1;
204 HOST_WIDE_INT high0 = h1;
205 unsigned int prec;
206 int sign_extended_type;
207
208 if (POINTER_TYPE_P (type)
209 || TREE_CODE (type) == OFFSET_TYPE)
210 prec = POINTER_SIZE;
211 else
212 prec = TYPE_PRECISION (type);
213
214 /* Size types *are* sign extended. */
215 sign_extended_type = (!TYPE_UNSIGNED (type)
216 || (TREE_CODE (type) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type)));
218
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
221 ;
222 else if (prec > HOST_BITS_PER_WIDE_INT)
223 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
224 else
225 {
226 h1 = 0;
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 }
230
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type)
233 /* No sign extension */;
234 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 /* Correct width already. */;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
237 {
238 /* Sign extend top half? */
239 if (h1 & ((unsigned HOST_WIDE_INT)1
240 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
241 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
242 }
243 else if (prec == HOST_BITS_PER_WIDE_INT)
244 {
245 if ((HOST_WIDE_INT)l1 < 0)
246 h1 = -1;
247 }
248 else
249 {
250 /* Sign extend bottom half? */
251 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
252 {
253 h1 = -1;
254 l1 |= (HOST_WIDE_INT)(-1) << prec;
255 }
256 }
257
258 *lv = l1;
259 *hv = h1;
260
261 /* If the value didn't fit, signal overflow. */
262 return l1 != low0 || h1 != high0;
263 }
264
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
279
280 tree
281 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
282 HOST_WIDE_INT high, int overflowable,
283 bool overflowed)
284 {
285 int sign_extended_type;
286 bool overflow;
287
288 /* Size types *are* sign extended. */
289 sign_extended_type = (!TYPE_UNSIGNED (type)
290 || (TREE_CODE (type) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type)));
292
293 overflow = fit_double_type (low, high, &low, &high, type);
294
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed || overflow)
297 {
298 if (overflowed
299 || overflowable < 0
300 || (overflowable > 0 && sign_extended_type))
301 {
302 tree t = make_node (INTEGER_CST);
303 TREE_INT_CST_LOW (t) = low;
304 TREE_INT_CST_HIGH (t) = high;
305 TREE_TYPE (t) = type;
306 TREE_OVERFLOW (t) = 1;
307 return t;
308 }
309 }
310
311 /* Else build a shared node. */
312 return build_int_cst_wide (type, low, high);
313 }
314 \f
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320
321 int
322 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 bool unsigned_p)
326 {
327 unsigned HOST_WIDE_INT l;
328 HOST_WIDE_INT h;
329
330 l = l1 + l2;
331 h = h1 + h2 + (l < l1);
332
333 *lv = l;
334 *hv = h;
335
336 if (unsigned_p)
337 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
338 else
339 return OVERFLOW_SUM_SIGN (h1, h2, h);
340 }
341
342 /* Negate a doubleword integer with doubleword result.
343 Return nonzero if the operation overflows, assuming it's signed.
344 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
345 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
346
347 int
348 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
349 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
350 {
351 if (l1 == 0)
352 {
353 *lv = 0;
354 *hv = - h1;
355 return (*hv & h1) < 0;
356 }
357 else
358 {
359 *lv = -l1;
360 *hv = ~h1;
361 return 0;
362 }
363 }
364 \f
365 /* Multiply two doubleword integers with doubleword result.
366 Return nonzero if the operation overflows according to UNSIGNED_P.
367 Each argument is given as two `HOST_WIDE_INT' pieces.
368 One argument is L1 and H1; the other, L2 and H2.
369 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
370
371 int
372 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
373 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
374 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
375 bool unsigned_p)
376 {
377 HOST_WIDE_INT arg1[4];
378 HOST_WIDE_INT arg2[4];
379 HOST_WIDE_INT prod[4 * 2];
380 unsigned HOST_WIDE_INT carry;
381 int i, j, k;
382 unsigned HOST_WIDE_INT toplow, neglow;
383 HOST_WIDE_INT tophigh, neghigh;
384
385 encode (arg1, l1, h1);
386 encode (arg2, l2, h2);
387
388 memset (prod, 0, sizeof prod);
389
390 for (i = 0; i < 4; i++)
391 {
392 carry = 0;
393 for (j = 0; j < 4; j++)
394 {
395 k = i + j;
396 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
397 carry += arg1[i] * arg2[j];
398 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
399 carry += prod[k];
400 prod[k] = LOWPART (carry);
401 carry = HIGHPART (carry);
402 }
403 prod[i + 4] = carry;
404 }
405
406 decode (prod, lv, hv);
407 decode (prod + 4, &toplow, &tophigh);
408
409 /* Unsigned overflow is immediate. */
410 if (unsigned_p)
411 return (toplow | tophigh) != 0;
412
413 /* Check for signed overflow by calculating the signed representation of the
414 top half of the result; it should agree with the low half's sign bit. */
415 if (h1 < 0)
416 {
417 neg_double (l2, h2, &neglow, &neghigh);
418 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
419 }
420 if (h2 < 0)
421 {
422 neg_double (l1, h1, &neglow, &neghigh);
423 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 }
425 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
426 }
427 \f
428 /* Shift the doubleword integer in L1, H1 left by COUNT places
429 keeping only PREC bits of result.
430 Shift right if COUNT is negative.
431 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
433
434 void
435 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
436 HOST_WIDE_INT count, unsigned int prec,
437 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
438 {
439 unsigned HOST_WIDE_INT signmask;
440
441 if (count < 0)
442 {
443 rshift_double (l1, h1, -count, prec, lv, hv, arith);
444 return;
445 }
446
447 if (SHIFT_COUNT_TRUNCATED)
448 count %= prec;
449
450 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
451 {
452 /* Shifting by the host word size is undefined according to the
453 ANSI standard, so we must handle this as a special case. */
454 *hv = 0;
455 *lv = 0;
456 }
457 else if (count >= HOST_BITS_PER_WIDE_INT)
458 {
459 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
460 *lv = 0;
461 }
462 else
463 {
464 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
465 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
466 *lv = l1 << count;
467 }
468
469 /* Sign extend all bits that are beyond the precision. */
470
471 signmask = -((prec > HOST_BITS_PER_WIDE_INT
472 ? ((unsigned HOST_WIDE_INT) *hv
473 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
474 : (*lv >> (prec - 1))) & 1);
475
476 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
477 ;
478 else if (prec >= HOST_BITS_PER_WIDE_INT)
479 {
480 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
481 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
482 }
483 else
484 {
485 *hv = signmask;
486 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
487 *lv |= signmask << prec;
488 }
489 }
490
491 /* Shift the doubleword integer in L1, H1 right by COUNT places
492 keeping only PREC bits of result. COUNT must be positive.
493 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
494 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
495
496 void
497 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
498 HOST_WIDE_INT count, unsigned int prec,
499 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
500 int arith)
501 {
502 unsigned HOST_WIDE_INT signmask;
503
504 signmask = (arith
505 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
506 : 0);
507
508 if (SHIFT_COUNT_TRUNCATED)
509 count %= prec;
510
511 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
512 {
513 /* Shifting by the host word size is undefined according to the
514 ANSI standard, so we must handle this as a special case. */
515 *hv = 0;
516 *lv = 0;
517 }
518 else if (count >= HOST_BITS_PER_WIDE_INT)
519 {
520 *hv = 0;
521 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
522 }
523 else
524 {
525 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
526 *lv = ((l1 >> count)
527 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
528 }
529
530 /* Zero / sign extend all bits that are beyond the precision. */
531
532 if (count >= (HOST_WIDE_INT)prec)
533 {
534 *hv = signmask;
535 *lv = signmask;
536 }
537 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
538 ;
539 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
540 {
541 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
542 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
543 }
544 else
545 {
546 *hv = signmask;
547 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
548 *lv |= signmask << (prec - count);
549 }
550 }
551 \f
552 /* Rotate the doubleword integer in L1, H1 left by COUNT places
553 keeping only PREC bits of result.
554 Rotate right if COUNT is negative.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
556
557 void
558 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
561 {
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
564
565 count %= prec;
566 if (count < 0)
567 count += prec;
568
569 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
571 *lv = s1l | s2l;
572 *hv = s1h | s2h;
573 }
574
575 /* Rotate the doubleword integer in L1, H1 left by COUNT places
576 keeping only PREC bits of result. COUNT must be positive.
577 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
578
579 void
580 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
581 HOST_WIDE_INT count, unsigned int prec,
582 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
583 {
584 unsigned HOST_WIDE_INT s1l, s2l;
585 HOST_WIDE_INT s1h, s2h;
586
587 count %= prec;
588 if (count < 0)
589 count += prec;
590
591 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
592 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
593 *lv = s1l | s2l;
594 *hv = s1h | s2h;
595 }
596 \f
597 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
598 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
599 CODE is a tree code for a kind of division, one of
600 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
601 or EXACT_DIV_EXPR
602 It controls how the quotient is rounded to an integer.
603 Return nonzero if the operation overflows.
604 UNS nonzero says do unsigned division. */
605
606 int
607 div_and_round_double (enum tree_code code, int uns,
608 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
609 HOST_WIDE_INT hnum_orig,
610 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
611 HOST_WIDE_INT hden_orig,
612 unsigned HOST_WIDE_INT *lquo,
613 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
614 HOST_WIDE_INT *hrem)
615 {
616 int quo_neg = 0;
617 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
618 HOST_WIDE_INT den[4], quo[4];
619 int i, j;
620 unsigned HOST_WIDE_INT work;
621 unsigned HOST_WIDE_INT carry = 0;
622 unsigned HOST_WIDE_INT lnum = lnum_orig;
623 HOST_WIDE_INT hnum = hnum_orig;
624 unsigned HOST_WIDE_INT lden = lden_orig;
625 HOST_WIDE_INT hden = hden_orig;
626 int overflow = 0;
627
628 if (hden == 0 && lden == 0)
629 overflow = 1, lden = 1;
630
631 /* Calculate quotient sign and convert operands to unsigned. */
632 if (!uns)
633 {
634 if (hnum < 0)
635 {
636 quo_neg = ~ quo_neg;
637 /* (minimum integer) / (-1) is the only overflow case. */
638 if (neg_double (lnum, hnum, &lnum, &hnum)
639 && ((HOST_WIDE_INT) lden & hden) == -1)
640 overflow = 1;
641 }
642 if (hden < 0)
643 {
644 quo_neg = ~ quo_neg;
645 neg_double (lden, hden, &lden, &hden);
646 }
647 }
648
649 if (hnum == 0 && hden == 0)
650 { /* single precision */
651 *hquo = *hrem = 0;
652 /* This unsigned division rounds toward zero. */
653 *lquo = lnum / lden;
654 goto finish_up;
655 }
656
657 if (hnum == 0)
658 { /* trivial case: dividend < divisor */
659 /* hden != 0 already checked. */
660 *hquo = *lquo = 0;
661 *hrem = hnum;
662 *lrem = lnum;
663 goto finish_up;
664 }
665
666 memset (quo, 0, sizeof quo);
667
668 memset (num, 0, sizeof num); /* to zero 9th element */
669 memset (den, 0, sizeof den);
670
671 encode (num, lnum, hnum);
672 encode (den, lden, hden);
673
674 /* Special code for when the divisor < BASE. */
675 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
676 {
677 /* hnum != 0 already checked. */
678 for (i = 4 - 1; i >= 0; i--)
679 {
680 work = num[i] + carry * BASE;
681 quo[i] = work / lden;
682 carry = work % lden;
683 }
684 }
685 else
686 {
687 /* Full double precision division,
688 with thanks to Don Knuth's "Seminumerical Algorithms". */
689 int num_hi_sig, den_hi_sig;
690 unsigned HOST_WIDE_INT quo_est, scale;
691
692 /* Find the highest nonzero divisor digit. */
693 for (i = 4 - 1;; i--)
694 if (den[i] != 0)
695 {
696 den_hi_sig = i;
697 break;
698 }
699
700 /* Insure that the first digit of the divisor is at least BASE/2.
701 This is required by the quotient digit estimation algorithm. */
702
703 scale = BASE / (den[den_hi_sig] + 1);
704 if (scale > 1)
705 { /* scale divisor and dividend */
706 carry = 0;
707 for (i = 0; i <= 4 - 1; i++)
708 {
709 work = (num[i] * scale) + carry;
710 num[i] = LOWPART (work);
711 carry = HIGHPART (work);
712 }
713
714 num[4] = carry;
715 carry = 0;
716 for (i = 0; i <= 4 - 1; i++)
717 {
718 work = (den[i] * scale) + carry;
719 den[i] = LOWPART (work);
720 carry = HIGHPART (work);
721 if (den[i] != 0) den_hi_sig = i;
722 }
723 }
724
725 num_hi_sig = 4;
726
727 /* Main loop */
728 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
729 {
730 /* Guess the next quotient digit, quo_est, by dividing the first
731 two remaining dividend digits by the high order quotient digit.
732 quo_est is never low and is at most 2 high. */
733 unsigned HOST_WIDE_INT tmp;
734
735 num_hi_sig = i + den_hi_sig + 1;
736 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
737 if (num[num_hi_sig] != den[den_hi_sig])
738 quo_est = work / den[den_hi_sig];
739 else
740 quo_est = BASE - 1;
741
742 /* Refine quo_est so it's usually correct, and at most one high. */
743 tmp = work - quo_est * den[den_hi_sig];
744 if (tmp < BASE
745 && (den[den_hi_sig - 1] * quo_est
746 > (tmp * BASE + num[num_hi_sig - 2])))
747 quo_est--;
748
749 /* Try QUO_EST as the quotient digit, by multiplying the
750 divisor by QUO_EST and subtracting from the remaining dividend.
751 Keep in mind that QUO_EST is the I - 1st digit. */
752
753 carry = 0;
754 for (j = 0; j <= den_hi_sig; j++)
755 {
756 work = quo_est * den[j] + carry;
757 carry = HIGHPART (work);
758 work = num[i + j] - LOWPART (work);
759 num[i + j] = LOWPART (work);
760 carry += HIGHPART (work) != 0;
761 }
762
763 /* If quo_est was high by one, then num[i] went negative and
764 we need to correct things. */
765 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
766 {
767 quo_est--;
768 carry = 0; /* add divisor back in */
769 for (j = 0; j <= den_hi_sig; j++)
770 {
771 work = num[i + j] + den[j] + carry;
772 carry = HIGHPART (work);
773 num[i + j] = LOWPART (work);
774 }
775
776 num [num_hi_sig] += carry;
777 }
778
779 /* Store the quotient digit. */
780 quo[i] = quo_est;
781 }
782 }
783
784 decode (quo, lquo, hquo);
785
786 finish_up:
787 /* If result is negative, make it so. */
788 if (quo_neg)
789 neg_double (*lquo, *hquo, lquo, hquo);
790
791 /* Compute trial remainder: rem = num - (quo * den) */
792 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
793 neg_double (*lrem, *hrem, lrem, hrem);
794 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
795
796 switch (code)
797 {
798 case TRUNC_DIV_EXPR:
799 case TRUNC_MOD_EXPR: /* round toward zero */
800 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
801 return overflow;
802
803 case FLOOR_DIV_EXPR:
804 case FLOOR_MOD_EXPR: /* round toward negative infinity */
805 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
806 {
807 /* quo = quo - 1; */
808 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
809 lquo, hquo);
810 }
811 else
812 return overflow;
813 break;
814
815 case CEIL_DIV_EXPR:
816 case CEIL_MOD_EXPR: /* round toward positive infinity */
817 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
818 {
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
821 }
822 else
823 return overflow;
824 break;
825
826 case ROUND_DIV_EXPR:
827 case ROUND_MOD_EXPR: /* round to closest integer */
828 {
829 unsigned HOST_WIDE_INT labs_rem = *lrem;
830 HOST_WIDE_INT habs_rem = *hrem;
831 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
832 HOST_WIDE_INT habs_den = hden, htwice;
833
834 /* Get absolute values. */
835 if (*hrem < 0)
836 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
837 if (hden < 0)
838 neg_double (lden, hden, &labs_den, &habs_den);
839
840 /* If (2 * abs (lrem) >= abs (lden)) */
841 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
842 labs_rem, habs_rem, &ltwice, &htwice);
843
844 if (((unsigned HOST_WIDE_INT) habs_den
845 < (unsigned HOST_WIDE_INT) htwice)
846 || (((unsigned HOST_WIDE_INT) habs_den
847 == (unsigned HOST_WIDE_INT) htwice)
848 && (labs_den < ltwice)))
849 {
850 if (*hquo < 0)
851 /* quo = quo - 1; */
852 add_double (*lquo, *hquo,
853 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
854 else
855 /* quo = quo + 1; */
856 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
857 lquo, hquo);
858 }
859 else
860 return overflow;
861 }
862 break;
863
864 default:
865 gcc_unreachable ();
866 }
867
868 /* Compute true remainder: rem = num - (quo * den) */
869 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
870 neg_double (*lrem, *hrem, lrem, hrem);
871 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
872 return overflow;
873 }
874
875 /* If ARG2 divides ARG1 with zero remainder, carries out the division
876 of type CODE and returns the quotient.
877 Otherwise returns NULL_TREE. */
878
879 static tree
880 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
881 {
882 unsigned HOST_WIDE_INT int1l, int2l;
883 HOST_WIDE_INT int1h, int2h;
884 unsigned HOST_WIDE_INT quol, reml;
885 HOST_WIDE_INT quoh, remh;
886 tree type = TREE_TYPE (arg1);
887 int uns = TYPE_UNSIGNED (type);
888
889 int1l = TREE_INT_CST_LOW (arg1);
890 int1h = TREE_INT_CST_HIGH (arg1);
891 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
892 &obj[some_exotic_number]. */
893 if (POINTER_TYPE_P (type))
894 {
895 uns = false;
896 type = signed_type_for (type);
897 fit_double_type (int1l, int1h, &int1l, &int1h,
898 type);
899 }
900 else
901 fit_double_type (int1l, int1h, &int1l, &int1h, type);
902 int2l = TREE_INT_CST_LOW (arg2);
903 int2h = TREE_INT_CST_HIGH (arg2);
904
905 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
906 &quol, &quoh, &reml, &remh);
907 if (remh != 0 || reml != 0)
908 return NULL_TREE;
909
910 return build_int_cst_wide (type, quol, quoh);
911 }
912 \f
913 /* This is nonzero if we should defer warnings about undefined
914 overflow. This facility exists because these warnings are a
915 special case. The code to estimate loop iterations does not want
916 to issue any warnings, since it works with expressions which do not
917 occur in user code. Various bits of cleanup code call fold(), but
918 only use the result if it has certain characteristics (e.g., is a
919 constant); that code only wants to issue a warning if the result is
920 used. */
921
922 static int fold_deferring_overflow_warnings;
923
924 /* If a warning about undefined overflow is deferred, this is the
925 warning. Note that this may cause us to turn two warnings into
926 one, but that is fine since it is sufficient to only give one
927 warning per expression. */
928
929 static const char* fold_deferred_overflow_warning;
930
931 /* If a warning about undefined overflow is deferred, this is the
932 level at which the warning should be emitted. */
933
934 static enum warn_strict_overflow_code fold_deferred_overflow_code;
935
936 /* Start deferring overflow warnings. We could use a stack here to
937 permit nested calls, but at present it is not necessary. */
938
939 void
940 fold_defer_overflow_warnings (void)
941 {
942 ++fold_deferring_overflow_warnings;
943 }
944
945 /* Stop deferring overflow warnings. If there is a pending warning,
946 and ISSUE is true, then issue the warning if appropriate. STMT is
947 the statement with which the warning should be associated (used for
948 location information); STMT may be NULL. CODE is the level of the
949 warning--a warn_strict_overflow_code value. This function will use
950 the smaller of CODE and the deferred code when deciding whether to
951 issue the warning. CODE may be zero to mean to always use the
952 deferred code. */
953
954 void
955 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
956 {
957 const char *warnmsg;
958 location_t locus;
959
960 gcc_assert (fold_deferring_overflow_warnings > 0);
961 --fold_deferring_overflow_warnings;
962 if (fold_deferring_overflow_warnings > 0)
963 {
964 if (fold_deferred_overflow_warning != NULL
965 && code != 0
966 && code < (int) fold_deferred_overflow_code)
967 fold_deferred_overflow_code = code;
968 return;
969 }
970
971 warnmsg = fold_deferred_overflow_warning;
972 fold_deferred_overflow_warning = NULL;
973
974 if (!issue || warnmsg == NULL)
975 return;
976
977 /* Use the smallest code level when deciding to issue the
978 warning. */
979 if (code == 0 || code > (int) fold_deferred_overflow_code)
980 code = fold_deferred_overflow_code;
981
982 if (!issue_strict_overflow_warning (code))
983 return;
984
985 if (stmt == NULL_TREE || !expr_has_location (stmt))
986 locus = input_location;
987 else
988 locus = expr_location (stmt);
989 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
990 }
991
992 /* Stop deferring overflow warnings, ignoring any deferred
993 warnings. */
994
995 void
996 fold_undefer_and_ignore_overflow_warnings (void)
997 {
998 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
999 }
1000
1001 /* Whether we are deferring overflow warnings. */
1002
1003 bool
1004 fold_deferring_overflow_warnings_p (void)
1005 {
1006 return fold_deferring_overflow_warnings > 0;
1007 }
1008
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1011
1012 static void
1013 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1014 {
1015 gcc_assert (!flag_wrapv && !flag_trapv);
1016 if (fold_deferring_overflow_warnings > 0)
1017 {
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1020 {
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1023 }
1024 }
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1027 }
1028 \f
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1031
1032 static bool
1033 negate_mathfn_p (enum built_in_function code)
1034 {
1035 switch (code)
1036 {
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1060 return true;
1061
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1067
1068 default:
1069 break;
1070 }
1071 return false;
1072 }
1073
1074 /* Check whether we may negate an integer constant T without causing
1075 overflow. */
1076
1077 bool
1078 may_negate_without_overflow_p (const_tree t)
1079 {
1080 unsigned HOST_WIDE_INT val;
1081 unsigned int prec;
1082 tree type;
1083
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1088 return false;
1089
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1092 {
1093 if (TREE_INT_CST_LOW (t) != 0)
1094 return true;
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1097 }
1098 else
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1103 }
1104
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1107
1108 static bool
1109 negate_expr_p (tree t)
1110 {
1111 tree type;
1112
1113 if (t == 0)
1114 return false;
1115
1116 type = TREE_TYPE (t);
1117
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1120 {
1121 case INTEGER_CST:
1122 if (TYPE_OVERFLOW_WRAPS (type))
1123 return true;
1124
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1127 case BIT_NOT_EXPR:
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1130
1131 case FIXED_CST:
1132 case REAL_CST:
1133 case NEGATE_EXPR:
1134 return true;
1135
1136 case COMPLEX_CST:
1137 return negate_expr_p (TREE_REALPART (t))
1138 && negate_expr_p (TREE_IMAGPART (t));
1139
1140 case COMPLEX_EXPR:
1141 return negate_expr_p (TREE_OPERAND (t, 0))
1142 && negate_expr_p (TREE_OPERAND (t, 1));
1143
1144 case CONJ_EXPR:
1145 return negate_expr_p (TREE_OPERAND (t, 0));
1146
1147 case PLUS_EXPR:
1148 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1149 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1150 return false;
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t, 1))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1)))
1155 return true;
1156 /* -(A + B) -> (-A) - B. */
1157 return negate_expr_p (TREE_OPERAND (t, 0));
1158
1159 case MINUS_EXPR:
1160 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1161 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1163 && reorder_operands_p (TREE_OPERAND (t, 0),
1164 TREE_OPERAND (t, 1));
1165
1166 case MULT_EXPR:
1167 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1168 break;
1169
1170 /* Fall through. */
1171
1172 case RDIV_EXPR:
1173 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1174 return negate_expr_p (TREE_OPERAND (t, 1))
1175 || negate_expr_p (TREE_OPERAND (t, 0));
1176 break;
1177
1178 case TRUNC_DIV_EXPR:
1179 case ROUND_DIV_EXPR:
1180 case FLOOR_DIV_EXPR:
1181 case CEIL_DIV_EXPR:
1182 case EXACT_DIV_EXPR:
1183 /* In general we can't negate A / B, because if A is INT_MIN and
1184 B is 1, we may turn this into INT_MIN / -1 which is undefined
1185 and actually traps on some architectures. But if overflow is
1186 undefined, we can negate, because - (INT_MIN / 1) is an
1187 overflow. */
1188 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1189 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1190 break;
1191 return negate_expr_p (TREE_OPERAND (t, 1))
1192 || negate_expr_p (TREE_OPERAND (t, 0));
1193
1194 case NOP_EXPR:
1195 /* Negate -((double)float) as (double)(-float). */
1196 if (TREE_CODE (type) == REAL_TYPE)
1197 {
1198 tree tem = strip_float_extensions (t);
1199 if (tem != t)
1200 return negate_expr_p (tem);
1201 }
1202 break;
1203
1204 case CALL_EXPR:
1205 /* Negate -f(x) as f(-x). */
1206 if (negate_mathfn_p (builtin_mathfn_code (t)))
1207 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1208 break;
1209
1210 case RSHIFT_EXPR:
1211 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1212 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1213 {
1214 tree op1 = TREE_OPERAND (t, 1);
1215 if (TREE_INT_CST_HIGH (op1) == 0
1216 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1217 == TREE_INT_CST_LOW (op1))
1218 return true;
1219 }
1220 break;
1221
1222 default:
1223 break;
1224 }
1225 return false;
1226 }
1227
1228 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1229 simplification is possible.
1230 If negate_expr_p would return true for T, NULL_TREE will never be
1231 returned. */
1232
1233 static tree
1234 fold_negate_expr (tree t)
1235 {
1236 tree type = TREE_TYPE (t);
1237 tree tem;
1238
1239 switch (TREE_CODE (t))
1240 {
1241 /* Convert - (~A) to A + 1. */
1242 case BIT_NOT_EXPR:
1243 if (INTEGRAL_TYPE_P (type))
1244 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1245 build_int_cst (type, 1));
1246 break;
1247
1248 case INTEGER_CST:
1249 tem = fold_negate_const (t, type);
1250 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1251 || !TYPE_OVERFLOW_TRAPS (type))
1252 return tem;
1253 break;
1254
1255 case REAL_CST:
1256 tem = fold_negate_const (t, type);
1257 /* Two's complement FP formats, such as c4x, may overflow. */
1258 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1259 return tem;
1260 break;
1261
1262 case FIXED_CST:
1263 tem = fold_negate_const (t, type);
1264 return tem;
1265
1266 case COMPLEX_CST:
1267 {
1268 tree rpart = negate_expr (TREE_REALPART (t));
1269 tree ipart = negate_expr (TREE_IMAGPART (t));
1270
1271 if ((TREE_CODE (rpart) == REAL_CST
1272 && TREE_CODE (ipart) == REAL_CST)
1273 || (TREE_CODE (rpart) == INTEGER_CST
1274 && TREE_CODE (ipart) == INTEGER_CST))
1275 return build_complex (type, rpart, ipart);
1276 }
1277 break;
1278
1279 case COMPLEX_EXPR:
1280 if (negate_expr_p (t))
1281 return fold_build2 (COMPLEX_EXPR, type,
1282 fold_negate_expr (TREE_OPERAND (t, 0)),
1283 fold_negate_expr (TREE_OPERAND (t, 1)));
1284 break;
1285
1286 case CONJ_EXPR:
1287 if (negate_expr_p (t))
1288 return fold_build1 (CONJ_EXPR, type,
1289 fold_negate_expr (TREE_OPERAND (t, 0)));
1290 break;
1291
1292 case NEGATE_EXPR:
1293 return TREE_OPERAND (t, 0);
1294
1295 case PLUS_EXPR:
1296 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1298 {
1299 /* -(A + B) -> (-B) - A. */
1300 if (negate_expr_p (TREE_OPERAND (t, 1))
1301 && reorder_operands_p (TREE_OPERAND (t, 0),
1302 TREE_OPERAND (t, 1)))
1303 {
1304 tem = negate_expr (TREE_OPERAND (t, 1));
1305 return fold_build2 (MINUS_EXPR, type,
1306 tem, TREE_OPERAND (t, 0));
1307 }
1308
1309 /* -(A + B) -> (-A) - B. */
1310 if (negate_expr_p (TREE_OPERAND (t, 0)))
1311 {
1312 tem = negate_expr (TREE_OPERAND (t, 0));
1313 return fold_build2 (MINUS_EXPR, type,
1314 tem, TREE_OPERAND (t, 1));
1315 }
1316 }
1317 break;
1318
1319 case MINUS_EXPR:
1320 /* - (A - B) -> B - A */
1321 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1322 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1323 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1324 return fold_build2 (MINUS_EXPR, type,
1325 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1326 break;
1327
1328 case MULT_EXPR:
1329 if (TYPE_UNSIGNED (type))
1330 break;
1331
1332 /* Fall through. */
1333
1334 case RDIV_EXPR:
1335 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1336 {
1337 tem = TREE_OPERAND (t, 1);
1338 if (negate_expr_p (tem))
1339 return fold_build2 (TREE_CODE (t), type,
1340 TREE_OPERAND (t, 0), negate_expr (tem));
1341 tem = TREE_OPERAND (t, 0);
1342 if (negate_expr_p (tem))
1343 return fold_build2 (TREE_CODE (t), type,
1344 negate_expr (tem), TREE_OPERAND (t, 1));
1345 }
1346 break;
1347
1348 case TRUNC_DIV_EXPR:
1349 case ROUND_DIV_EXPR:
1350 case FLOOR_DIV_EXPR:
1351 case CEIL_DIV_EXPR:
1352 case EXACT_DIV_EXPR:
1353 /* In general we can't negate A / B, because if A is INT_MIN and
1354 B is 1, we may turn this into INT_MIN / -1 which is undefined
1355 and actually traps on some architectures. But if overflow is
1356 undefined, we can negate, because - (INT_MIN / 1) is an
1357 overflow. */
1358 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1359 {
1360 const char * const warnmsg = G_("assuming signed overflow does not "
1361 "occur when negating a division");
1362 tem = TREE_OPERAND (t, 1);
1363 if (negate_expr_p (tem))
1364 {
1365 if (INTEGRAL_TYPE_P (type)
1366 && (TREE_CODE (tem) != INTEGER_CST
1367 || integer_onep (tem)))
1368 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1369 return fold_build2 (TREE_CODE (t), type,
1370 TREE_OPERAND (t, 0), negate_expr (tem));
1371 }
1372 tem = TREE_OPERAND (t, 0);
1373 if (negate_expr_p (tem))
1374 {
1375 if (INTEGRAL_TYPE_P (type)
1376 && (TREE_CODE (tem) != INTEGER_CST
1377 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1379 return fold_build2 (TREE_CODE (t), type,
1380 negate_expr (tem), TREE_OPERAND (t, 1));
1381 }
1382 }
1383 break;
1384
1385 case NOP_EXPR:
1386 /* Convert -((double)float) into (double)(-float). */
1387 if (TREE_CODE (type) == REAL_TYPE)
1388 {
1389 tem = strip_float_extensions (t);
1390 if (tem != t && negate_expr_p (tem))
1391 return fold_convert (type, negate_expr (tem));
1392 }
1393 break;
1394
1395 case CALL_EXPR:
1396 /* Negate -f(x) as f(-x). */
1397 if (negate_mathfn_p (builtin_mathfn_code (t))
1398 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1399 {
1400 tree fndecl, arg;
1401
1402 fndecl = get_callee_fndecl (t);
1403 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1404 return build_call_expr (fndecl, 1, arg);
1405 }
1406 break;
1407
1408 case RSHIFT_EXPR:
1409 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1410 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1411 {
1412 tree op1 = TREE_OPERAND (t, 1);
1413 if (TREE_INT_CST_HIGH (op1) == 0
1414 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1415 == TREE_INT_CST_LOW (op1))
1416 {
1417 tree ntype = TYPE_UNSIGNED (type)
1418 ? signed_type_for (type)
1419 : unsigned_type_for (type);
1420 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1421 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1422 return fold_convert (type, temp);
1423 }
1424 }
1425 break;
1426
1427 default:
1428 break;
1429 }
1430
1431 return NULL_TREE;
1432 }
1433
1434 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1435 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1436 return NULL_TREE. */
1437
1438 static tree
1439 negate_expr (tree t)
1440 {
1441 tree type, tem;
1442
1443 if (t == NULL_TREE)
1444 return NULL_TREE;
1445
1446 type = TREE_TYPE (t);
1447 STRIP_SIGN_NOPS (t);
1448
1449 tem = fold_negate_expr (t);
1450 if (!tem)
1451 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1452 return fold_convert (type, tem);
1453 }
1454 \f
1455 /* Split a tree IN into a constant, literal and variable parts that could be
1456 combined with CODE to make IN. "constant" means an expression with
1457 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1458 commutative arithmetic operation. Store the constant part into *CONP,
1459 the literal in *LITP and return the variable part. If a part isn't
1460 present, set it to null. If the tree does not decompose in this way,
1461 return the entire tree as the variable part and the other parts as null.
1462
1463 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1464 case, we negate an operand that was subtracted. Except if it is a
1465 literal for which we use *MINUS_LITP instead.
1466
1467 If NEGATE_P is true, we are negating all of IN, again except a literal
1468 for which we use *MINUS_LITP instead.
1469
1470 If IN is itself a literal or constant, return it as appropriate.
1471
1472 Note that we do not guarantee that any of the three values will be the
1473 same type as IN, but they will have the same signedness and mode. */
1474
1475 static tree
1476 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1477 tree *minus_litp, int negate_p)
1478 {
1479 tree var = 0;
1480
1481 *conp = 0;
1482 *litp = 0;
1483 *minus_litp = 0;
1484
1485 /* Strip any conversions that don't change the machine mode or signedness. */
1486 STRIP_SIGN_NOPS (in);
1487
1488 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1489 || TREE_CODE (in) == FIXED_CST)
1490 *litp = in;
1491 else if (TREE_CODE (in) == code
1492 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1493 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1494 /* We can associate addition and subtraction together (even
1495 though the C standard doesn't say so) for integers because
1496 the value is not affected. For reals, the value might be
1497 affected, so we can't. */
1498 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1499 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1500 {
1501 tree op0 = TREE_OPERAND (in, 0);
1502 tree op1 = TREE_OPERAND (in, 1);
1503 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1504 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1505
1506 /* First see if either of the operands is a literal, then a constant. */
1507 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1508 || TREE_CODE (op0) == FIXED_CST)
1509 *litp = op0, op0 = 0;
1510 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1511 || TREE_CODE (op1) == FIXED_CST)
1512 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1513
1514 if (op0 != 0 && TREE_CONSTANT (op0))
1515 *conp = op0, op0 = 0;
1516 else if (op1 != 0 && TREE_CONSTANT (op1))
1517 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1518
1519 /* If we haven't dealt with either operand, this is not a case we can
1520 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1521 if (op0 != 0 && op1 != 0)
1522 var = in;
1523 else if (op0 != 0)
1524 var = op0;
1525 else
1526 var = op1, neg_var_p = neg1_p;
1527
1528 /* Now do any needed negations. */
1529 if (neg_litp_p)
1530 *minus_litp = *litp, *litp = 0;
1531 if (neg_conp_p)
1532 *conp = negate_expr (*conp);
1533 if (neg_var_p)
1534 var = negate_expr (var);
1535 }
1536 else if (TREE_CONSTANT (in))
1537 *conp = in;
1538 else
1539 var = in;
1540
1541 if (negate_p)
1542 {
1543 if (*litp)
1544 *minus_litp = *litp, *litp = 0;
1545 else if (*minus_litp)
1546 *litp = *minus_litp, *minus_litp = 0;
1547 *conp = negate_expr (*conp);
1548 var = negate_expr (var);
1549 }
1550
1551 return var;
1552 }
1553
1554 /* Re-associate trees split by the above function. T1 and T2 are either
1555 expressions to associate or null. Return the new expression, if any. If
1556 we build an operation, do it in TYPE and with CODE. */
1557
1558 static tree
1559 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1560 {
1561 if (t1 == 0)
1562 return t2;
1563 else if (t2 == 0)
1564 return t1;
1565
1566 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1567 try to fold this since we will have infinite recursion. But do
1568 deal with any NEGATE_EXPRs. */
1569 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1570 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1571 {
1572 if (code == PLUS_EXPR)
1573 {
1574 if (TREE_CODE (t1) == NEGATE_EXPR)
1575 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1576 fold_convert (type, TREE_OPERAND (t1, 0)));
1577 else if (TREE_CODE (t2) == NEGATE_EXPR)
1578 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1579 fold_convert (type, TREE_OPERAND (t2, 0)));
1580 else if (integer_zerop (t2))
1581 return fold_convert (type, t1);
1582 }
1583 else if (code == MINUS_EXPR)
1584 {
1585 if (integer_zerop (t2))
1586 return fold_convert (type, t1);
1587 }
1588
1589 return build2 (code, type, fold_convert (type, t1),
1590 fold_convert (type, t2));
1591 }
1592
1593 return fold_build2 (code, type, fold_convert (type, t1),
1594 fold_convert (type, t2));
1595 }
1596 \f
1597 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1598 for use in int_const_binop, size_binop and size_diffop. */
1599
1600 static bool
1601 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1602 {
1603 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1604 return false;
1605 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1606 return false;
1607
1608 switch (code)
1609 {
1610 case LSHIFT_EXPR:
1611 case RSHIFT_EXPR:
1612 case LROTATE_EXPR:
1613 case RROTATE_EXPR:
1614 return true;
1615
1616 default:
1617 break;
1618 }
1619
1620 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1621 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1622 && TYPE_MODE (type1) == TYPE_MODE (type2);
1623 }
1624
1625
1626 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1627 to produce a new constant. Return NULL_TREE if we don't know how
1628 to evaluate CODE at compile-time.
1629
1630 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1631
1632 tree
1633 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1634 {
1635 unsigned HOST_WIDE_INT int1l, int2l;
1636 HOST_WIDE_INT int1h, int2h;
1637 unsigned HOST_WIDE_INT low;
1638 HOST_WIDE_INT hi;
1639 unsigned HOST_WIDE_INT garbagel;
1640 HOST_WIDE_INT garbageh;
1641 tree t;
1642 tree type = TREE_TYPE (arg1);
1643 int uns = TYPE_UNSIGNED (type);
1644 int is_sizetype
1645 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1646 int overflow = 0;
1647
1648 int1l = TREE_INT_CST_LOW (arg1);
1649 int1h = TREE_INT_CST_HIGH (arg1);
1650 int2l = TREE_INT_CST_LOW (arg2);
1651 int2h = TREE_INT_CST_HIGH (arg2);
1652
1653 switch (code)
1654 {
1655 case BIT_IOR_EXPR:
1656 low = int1l | int2l, hi = int1h | int2h;
1657 break;
1658
1659 case BIT_XOR_EXPR:
1660 low = int1l ^ int2l, hi = int1h ^ int2h;
1661 break;
1662
1663 case BIT_AND_EXPR:
1664 low = int1l & int2l, hi = int1h & int2h;
1665 break;
1666
1667 case RSHIFT_EXPR:
1668 int2l = -int2l;
1669 case LSHIFT_EXPR:
1670 /* It's unclear from the C standard whether shifts can overflow.
1671 The following code ignores overflow; perhaps a C standard
1672 interpretation ruling is needed. */
1673 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1674 &low, &hi, !uns);
1675 break;
1676
1677 case RROTATE_EXPR:
1678 int2l = - int2l;
1679 case LROTATE_EXPR:
1680 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1681 &low, &hi);
1682 break;
1683
1684 case PLUS_EXPR:
1685 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1686 break;
1687
1688 case MINUS_EXPR:
1689 neg_double (int2l, int2h, &low, &hi);
1690 add_double (int1l, int1h, low, hi, &low, &hi);
1691 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1692 break;
1693
1694 case MULT_EXPR:
1695 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1696 break;
1697
1698 case TRUNC_DIV_EXPR:
1699 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1700 case EXACT_DIV_EXPR:
1701 /* This is a shortcut for a common special case. */
1702 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1703 && !TREE_OVERFLOW (arg1)
1704 && !TREE_OVERFLOW (arg2)
1705 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1706 {
1707 if (code == CEIL_DIV_EXPR)
1708 int1l += int2l - 1;
1709
1710 low = int1l / int2l, hi = 0;
1711 break;
1712 }
1713
1714 /* ... fall through ... */
1715
1716 case ROUND_DIV_EXPR:
1717 if (int2h == 0 && int2l == 0)
1718 return NULL_TREE;
1719 if (int2h == 0 && int2l == 1)
1720 {
1721 low = int1l, hi = int1h;
1722 break;
1723 }
1724 if (int1l == int2l && int1h == int2h
1725 && ! (int1l == 0 && int1h == 0))
1726 {
1727 low = 1, hi = 0;
1728 break;
1729 }
1730 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1731 &low, &hi, &garbagel, &garbageh);
1732 break;
1733
1734 case TRUNC_MOD_EXPR:
1735 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1736 /* This is a shortcut for a common special case. */
1737 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1738 && !TREE_OVERFLOW (arg1)
1739 && !TREE_OVERFLOW (arg2)
1740 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1741 {
1742 if (code == CEIL_MOD_EXPR)
1743 int1l += int2l - 1;
1744 low = int1l % int2l, hi = 0;
1745 break;
1746 }
1747
1748 /* ... fall through ... */
1749
1750 case ROUND_MOD_EXPR:
1751 if (int2h == 0 && int2l == 0)
1752 return NULL_TREE;
1753 overflow = div_and_round_double (code, uns,
1754 int1l, int1h, int2l, int2h,
1755 &garbagel, &garbageh, &low, &hi);
1756 break;
1757
1758 case MIN_EXPR:
1759 case MAX_EXPR:
1760 if (uns)
1761 low = (((unsigned HOST_WIDE_INT) int1h
1762 < (unsigned HOST_WIDE_INT) int2h)
1763 || (((unsigned HOST_WIDE_INT) int1h
1764 == (unsigned HOST_WIDE_INT) int2h)
1765 && int1l < int2l));
1766 else
1767 low = (int1h < int2h
1768 || (int1h == int2h && int1l < int2l));
1769
1770 if (low == (code == MIN_EXPR))
1771 low = int1l, hi = int1h;
1772 else
1773 low = int2l, hi = int2h;
1774 break;
1775
1776 default:
1777 return NULL_TREE;
1778 }
1779
1780 if (notrunc)
1781 {
1782 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1783
1784 /* Propagate overflow flags ourselves. */
1785 if (((!uns || is_sizetype) && overflow)
1786 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1787 {
1788 t = copy_node (t);
1789 TREE_OVERFLOW (t) = 1;
1790 }
1791 }
1792 else
1793 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1794 ((!uns || is_sizetype) && overflow)
1795 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1796
1797 return t;
1798 }
1799
1800 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1801 constant. We assume ARG1 and ARG2 have the same data type, or at least
1802 are the same kind of constant and the same machine mode. Return zero if
1803 combining the constants is not allowed in the current operating mode.
1804
1805 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1806
1807 static tree
1808 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1809 {
1810 /* Sanity check for the recursive cases. */
1811 if (!arg1 || !arg2)
1812 return NULL_TREE;
1813
1814 STRIP_NOPS (arg1);
1815 STRIP_NOPS (arg2);
1816
1817 if (TREE_CODE (arg1) == INTEGER_CST)
1818 return int_const_binop (code, arg1, arg2, notrunc);
1819
1820 if (TREE_CODE (arg1) == REAL_CST)
1821 {
1822 enum machine_mode mode;
1823 REAL_VALUE_TYPE d1;
1824 REAL_VALUE_TYPE d2;
1825 REAL_VALUE_TYPE value;
1826 REAL_VALUE_TYPE result;
1827 bool inexact;
1828 tree t, type;
1829
1830 /* The following codes are handled by real_arithmetic. */
1831 switch (code)
1832 {
1833 case PLUS_EXPR:
1834 case MINUS_EXPR:
1835 case MULT_EXPR:
1836 case RDIV_EXPR:
1837 case MIN_EXPR:
1838 case MAX_EXPR:
1839 break;
1840
1841 default:
1842 return NULL_TREE;
1843 }
1844
1845 d1 = TREE_REAL_CST (arg1);
1846 d2 = TREE_REAL_CST (arg2);
1847
1848 type = TREE_TYPE (arg1);
1849 mode = TYPE_MODE (type);
1850
1851 /* Don't perform operation if we honor signaling NaNs and
1852 either operand is a NaN. */
1853 if (HONOR_SNANS (mode)
1854 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1855 return NULL_TREE;
1856
1857 /* Don't perform operation if it would raise a division
1858 by zero exception. */
1859 if (code == RDIV_EXPR
1860 && REAL_VALUES_EQUAL (d2, dconst0)
1861 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1862 return NULL_TREE;
1863
1864 /* If either operand is a NaN, just return it. Otherwise, set up
1865 for floating-point trap; we return an overflow. */
1866 if (REAL_VALUE_ISNAN (d1))
1867 return arg1;
1868 else if (REAL_VALUE_ISNAN (d2))
1869 return arg2;
1870
1871 inexact = real_arithmetic (&value, code, &d1, &d2);
1872 real_convert (&result, mode, &value);
1873
1874 /* Don't constant fold this floating point operation if
1875 the result has overflowed and flag_trapping_math. */
1876 if (flag_trapping_math
1877 && MODE_HAS_INFINITIES (mode)
1878 && REAL_VALUE_ISINF (result)
1879 && !REAL_VALUE_ISINF (d1)
1880 && !REAL_VALUE_ISINF (d2))
1881 return NULL_TREE;
1882
1883 /* Don't constant fold this floating point operation if the
1884 result may dependent upon the run-time rounding mode and
1885 flag_rounding_math is set, or if GCC's software emulation
1886 is unable to accurately represent the result. */
1887 if ((flag_rounding_math
1888 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1889 && !flag_unsafe_math_optimizations))
1890 && (inexact || !real_identical (&result, &value)))
1891 return NULL_TREE;
1892
1893 t = build_real (type, result);
1894
1895 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1896 return t;
1897 }
1898
1899 if (TREE_CODE (arg1) == FIXED_CST)
1900 {
1901 FIXED_VALUE_TYPE f1;
1902 FIXED_VALUE_TYPE f2;
1903 FIXED_VALUE_TYPE result;
1904 tree t, type;
1905 int sat_p;
1906 bool overflow_p;
1907
1908 /* The following codes are handled by fixed_arithmetic. */
1909 switch (code)
1910 {
1911 case PLUS_EXPR:
1912 case MINUS_EXPR:
1913 case MULT_EXPR:
1914 case TRUNC_DIV_EXPR:
1915 f2 = TREE_FIXED_CST (arg2);
1916 break;
1917
1918 case LSHIFT_EXPR:
1919 case RSHIFT_EXPR:
1920 f2.data.high = TREE_INT_CST_HIGH (arg2);
1921 f2.data.low = TREE_INT_CST_LOW (arg2);
1922 f2.mode = SImode;
1923 break;
1924
1925 default:
1926 return NULL_TREE;
1927 }
1928
1929 f1 = TREE_FIXED_CST (arg1);
1930 type = TREE_TYPE (arg1);
1931 sat_p = TYPE_SATURATING (type);
1932 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1933 t = build_fixed (type, result);
1934 /* Propagate overflow flags. */
1935 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 {
1937 TREE_OVERFLOW (t) = 1;
1938 TREE_CONSTANT_OVERFLOW (t) = 1;
1939 }
1940 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1941 TREE_CONSTANT_OVERFLOW (t) = 1;
1942 return t;
1943 }
1944
1945 if (TREE_CODE (arg1) == COMPLEX_CST)
1946 {
1947 tree type = TREE_TYPE (arg1);
1948 tree r1 = TREE_REALPART (arg1);
1949 tree i1 = TREE_IMAGPART (arg1);
1950 tree r2 = TREE_REALPART (arg2);
1951 tree i2 = TREE_IMAGPART (arg2);
1952 tree real, imag;
1953
1954 switch (code)
1955 {
1956 case PLUS_EXPR:
1957 case MINUS_EXPR:
1958 real = const_binop (code, r1, r2, notrunc);
1959 imag = const_binop (code, i1, i2, notrunc);
1960 break;
1961
1962 case MULT_EXPR:
1963 real = const_binop (MINUS_EXPR,
1964 const_binop (MULT_EXPR, r1, r2, notrunc),
1965 const_binop (MULT_EXPR, i1, i2, notrunc),
1966 notrunc);
1967 imag = const_binop (PLUS_EXPR,
1968 const_binop (MULT_EXPR, r1, i2, notrunc),
1969 const_binop (MULT_EXPR, i1, r2, notrunc),
1970 notrunc);
1971 break;
1972
1973 case RDIV_EXPR:
1974 {
1975 tree magsquared
1976 = const_binop (PLUS_EXPR,
1977 const_binop (MULT_EXPR, r2, r2, notrunc),
1978 const_binop (MULT_EXPR, i2, i2, notrunc),
1979 notrunc);
1980 tree t1
1981 = const_binop (PLUS_EXPR,
1982 const_binop (MULT_EXPR, r1, r2, notrunc),
1983 const_binop (MULT_EXPR, i1, i2, notrunc),
1984 notrunc);
1985 tree t2
1986 = const_binop (MINUS_EXPR,
1987 const_binop (MULT_EXPR, i1, r2, notrunc),
1988 const_binop (MULT_EXPR, r1, i2, notrunc),
1989 notrunc);
1990
1991 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1992 code = TRUNC_DIV_EXPR;
1993
1994 real = const_binop (code, t1, magsquared, notrunc);
1995 imag = const_binop (code, t2, magsquared, notrunc);
1996 }
1997 break;
1998
1999 default:
2000 return NULL_TREE;
2001 }
2002
2003 if (real && imag)
2004 return build_complex (type, real, imag);
2005 }
2006
2007 return NULL_TREE;
2008 }
2009
2010 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2011 indicates which particular sizetype to create. */
2012
2013 tree
2014 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2015 {
2016 return build_int_cst (sizetype_tab[(int) kind], number);
2017 }
2018 \f
2019 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2020 is a tree code. The type of the result is taken from the operands.
2021 Both must be equivalent integer types, ala int_binop_types_match_p.
2022 If the operands are constant, so is the result. */
2023
2024 tree
2025 size_binop (enum tree_code code, tree arg0, tree arg1)
2026 {
2027 tree type = TREE_TYPE (arg0);
2028
2029 if (arg0 == error_mark_node || arg1 == error_mark_node)
2030 return error_mark_node;
2031
2032 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2033 TREE_TYPE (arg1)));
2034
2035 /* Handle the special case of two integer constants faster. */
2036 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2037 {
2038 /* And some specific cases even faster than that. */
2039 if (code == PLUS_EXPR)
2040 {
2041 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2042 return arg1;
2043 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2044 return arg0;
2045 }
2046 else if (code == MINUS_EXPR)
2047 {
2048 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2049 return arg0;
2050 }
2051 else if (code == MULT_EXPR)
2052 {
2053 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2054 return arg1;
2055 }
2056
2057 /* Handle general case of two integer constants. */
2058 return int_const_binop (code, arg0, arg1, 0);
2059 }
2060
2061 return fold_build2 (code, type, arg0, arg1);
2062 }
2063
2064 /* Given two values, either both of sizetype or both of bitsizetype,
2065 compute the difference between the two values. Return the value
2066 in signed type corresponding to the type of the operands. */
2067
2068 tree
2069 size_diffop (tree arg0, tree arg1)
2070 {
2071 tree type = TREE_TYPE (arg0);
2072 tree ctype;
2073
2074 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2075 TREE_TYPE (arg1)));
2076
2077 /* If the type is already signed, just do the simple thing. */
2078 if (!TYPE_UNSIGNED (type))
2079 return size_binop (MINUS_EXPR, arg0, arg1);
2080
2081 if (type == sizetype)
2082 ctype = ssizetype;
2083 else if (type == bitsizetype)
2084 ctype = sbitsizetype;
2085 else
2086 ctype = signed_type_for (type);
2087
2088 /* If either operand is not a constant, do the conversions to the signed
2089 type and subtract. The hardware will do the right thing with any
2090 overflow in the subtraction. */
2091 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2092 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2093 fold_convert (ctype, arg1));
2094
2095 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2096 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2097 overflow) and negate (which can't either). Special-case a result
2098 of zero while we're here. */
2099 if (tree_int_cst_equal (arg0, arg1))
2100 return build_int_cst (ctype, 0);
2101 else if (tree_int_cst_lt (arg1, arg0))
2102 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2103 else
2104 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2105 fold_convert (ctype, size_binop (MINUS_EXPR,
2106 arg1, arg0)));
2107 }
2108 \f
2109 /* A subroutine of fold_convert_const handling conversions of an
2110 INTEGER_CST to another integer type. */
2111
2112 static tree
2113 fold_convert_const_int_from_int (tree type, tree arg1)
2114 {
2115 tree t;
2116
2117 /* Given an integer constant, make new constant with new type,
2118 appropriately sign-extended or truncated. */
2119 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2120 TREE_INT_CST_HIGH (arg1),
2121 /* Don't set the overflow when
2122 converting a pointer */
2123 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2124 (TREE_INT_CST_HIGH (arg1) < 0
2125 && (TYPE_UNSIGNED (type)
2126 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2127 | TREE_OVERFLOW (arg1));
2128
2129 return t;
2130 }
2131
2132 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2133 to an integer type. */
2134
2135 static tree
2136 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2137 {
2138 int overflow = 0;
2139 tree t;
2140
2141 /* The following code implements the floating point to integer
2142 conversion rules required by the Java Language Specification,
2143 that IEEE NaNs are mapped to zero and values that overflow
2144 the target precision saturate, i.e. values greater than
2145 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2146 are mapped to INT_MIN. These semantics are allowed by the
2147 C and C++ standards that simply state that the behavior of
2148 FP-to-integer conversion is unspecified upon overflow. */
2149
2150 HOST_WIDE_INT high, low;
2151 REAL_VALUE_TYPE r;
2152 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2153
2154 switch (code)
2155 {
2156 case FIX_TRUNC_EXPR:
2157 real_trunc (&r, VOIDmode, &x);
2158 break;
2159
2160 default:
2161 gcc_unreachable ();
2162 }
2163
2164 /* If R is NaN, return zero and show we have an overflow. */
2165 if (REAL_VALUE_ISNAN (r))
2166 {
2167 overflow = 1;
2168 high = 0;
2169 low = 0;
2170 }
2171
2172 /* See if R is less than the lower bound or greater than the
2173 upper bound. */
2174
2175 if (! overflow)
2176 {
2177 tree lt = TYPE_MIN_VALUE (type);
2178 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2179 if (REAL_VALUES_LESS (r, l))
2180 {
2181 overflow = 1;
2182 high = TREE_INT_CST_HIGH (lt);
2183 low = TREE_INT_CST_LOW (lt);
2184 }
2185 }
2186
2187 if (! overflow)
2188 {
2189 tree ut = TYPE_MAX_VALUE (type);
2190 if (ut)
2191 {
2192 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2193 if (REAL_VALUES_LESS (u, r))
2194 {
2195 overflow = 1;
2196 high = TREE_INT_CST_HIGH (ut);
2197 low = TREE_INT_CST_LOW (ut);
2198 }
2199 }
2200 }
2201
2202 if (! overflow)
2203 REAL_VALUE_TO_INT (&low, &high, r);
2204
2205 t = force_fit_type_double (type, low, high, -1,
2206 overflow | TREE_OVERFLOW (arg1));
2207 return t;
2208 }
2209
2210 /* A subroutine of fold_convert_const handling conversions of a
2211 FIXED_CST to an integer type. */
2212
2213 static tree
2214 fold_convert_const_int_from_fixed (tree type, tree arg1)
2215 {
2216 tree t;
2217 double_int temp, temp_trunc;
2218 unsigned int mode;
2219
2220 /* Right shift FIXED_CST to temp by fbit. */
2221 temp = TREE_FIXED_CST (arg1).data;
2222 mode = TREE_FIXED_CST (arg1).mode;
2223 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2224 {
2225 lshift_double (temp.low, temp.high,
2226 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2227 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2228
2229 /* Left shift temp to temp_trunc by fbit. */
2230 lshift_double (temp.low, temp.high,
2231 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2232 &temp_trunc.low, &temp_trunc.high,
2233 SIGNED_FIXED_POINT_MODE_P (mode));
2234 }
2235 else
2236 {
2237 temp.low = 0;
2238 temp.high = 0;
2239 temp_trunc.low = 0;
2240 temp_trunc.high = 0;
2241 }
2242
2243 /* If FIXED_CST is negative, we need to round the value toward 0.
2244 By checking if the fractional bits are not zero to add 1 to temp. */
2245 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2246 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2247 {
2248 double_int one;
2249 one.low = 1;
2250 one.high = 0;
2251 temp = double_int_add (temp, one);
2252 }
2253
2254 /* Given a fixed-point constant, make new constant with new type,
2255 appropriately sign-extended or truncated. */
2256 t = force_fit_type_double (type, temp.low, temp.high, -1,
2257 (temp.high < 0
2258 && (TYPE_UNSIGNED (type)
2259 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2260 | TREE_OVERFLOW (arg1));
2261
2262 return t;
2263 }
2264
2265 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2266 to another floating point type. */
2267
2268 static tree
2269 fold_convert_const_real_from_real (tree type, tree arg1)
2270 {
2271 REAL_VALUE_TYPE value;
2272 tree t;
2273
2274 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2275 t = build_real (type, value);
2276
2277 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2278 return t;
2279 }
2280
2281 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2282 to a floating point type. */
2283
2284 static tree
2285 fold_convert_const_real_from_fixed (tree type, tree arg1)
2286 {
2287 REAL_VALUE_TYPE value;
2288 tree t;
2289
2290 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2291 t = build_real (type, value);
2292
2293 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2294 TREE_CONSTANT_OVERFLOW (t)
2295 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2296 return t;
2297 }
2298
2299 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2300 to another fixed-point type. */
2301
2302 static tree
2303 fold_convert_const_fixed_from_fixed (tree type, tree arg1)
2304 {
2305 FIXED_VALUE_TYPE value;
2306 tree t;
2307 bool overflow_p;
2308
2309 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2310 TYPE_SATURATING (type));
2311 t = build_fixed (type, value);
2312
2313 /* Propagate overflow flags. */
2314 if (overflow_p | TREE_OVERFLOW (arg1))
2315 {
2316 TREE_OVERFLOW (t) = 1;
2317 TREE_CONSTANT_OVERFLOW (t) = 1;
2318 }
2319 else if (TREE_CONSTANT_OVERFLOW (arg1))
2320 TREE_CONSTANT_OVERFLOW (t) = 1;
2321 return t;
2322 }
2323
2324 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2325 to a fixed-point type. */
2326
2327 static tree
2328 fold_convert_const_fixed_from_int (tree type, tree arg1)
2329 {
2330 FIXED_VALUE_TYPE value;
2331 tree t;
2332 bool overflow_p;
2333
2334 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2335 TREE_INT_CST (arg1),
2336 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2337 TYPE_SATURATING (type));
2338 t = build_fixed (type, value);
2339
2340 /* Propagate overflow flags. */
2341 if (overflow_p | TREE_OVERFLOW (arg1))
2342 {
2343 TREE_OVERFLOW (t) = 1;
2344 TREE_CONSTANT_OVERFLOW (t) = 1;
2345 }
2346 else if (TREE_CONSTANT_OVERFLOW (arg1))
2347 TREE_CONSTANT_OVERFLOW (t) = 1;
2348 return t;
2349 }
2350
2351 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2352 to a fixed-point type. */
2353
2354 static tree
2355 fold_convert_const_fixed_from_real (tree type, tree arg1)
2356 {
2357 FIXED_VALUE_TYPE value;
2358 tree t;
2359 bool overflow_p;
2360
2361 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2362 &TREE_REAL_CST (arg1),
2363 TYPE_SATURATING (type));
2364 t = build_fixed (type, value);
2365
2366 /* Propagate overflow flags. */
2367 if (overflow_p | TREE_OVERFLOW (arg1))
2368 {
2369 TREE_OVERFLOW (t) = 1;
2370 TREE_CONSTANT_OVERFLOW (t) = 1;
2371 }
2372 else if (TREE_CONSTANT_OVERFLOW (arg1))
2373 TREE_CONSTANT_OVERFLOW (t) = 1;
2374 return t;
2375 }
2376
2377 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2378 type TYPE. If no simplification can be done return NULL_TREE. */
2379
2380 static tree
2381 fold_convert_const (enum tree_code code, tree type, tree arg1)
2382 {
2383 if (TREE_TYPE (arg1) == type)
2384 return arg1;
2385
2386 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2387 {
2388 if (TREE_CODE (arg1) == INTEGER_CST)
2389 return fold_convert_const_int_from_int (type, arg1);
2390 else if (TREE_CODE (arg1) == REAL_CST)
2391 return fold_convert_const_int_from_real (code, type, arg1);
2392 else if (TREE_CODE (arg1) == FIXED_CST)
2393 return fold_convert_const_int_from_fixed (type, arg1);
2394 }
2395 else if (TREE_CODE (type) == REAL_TYPE)
2396 {
2397 if (TREE_CODE (arg1) == INTEGER_CST)
2398 return build_real_from_int_cst (type, arg1);
2399 else if (TREE_CODE (arg1) == REAL_CST)
2400 return fold_convert_const_real_from_real (type, arg1);
2401 else if (TREE_CODE (arg1) == FIXED_CST)
2402 return fold_convert_const_real_from_fixed (type, arg1);
2403 }
2404 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2405 {
2406 if (TREE_CODE (arg1) == FIXED_CST)
2407 return fold_convert_const_fixed_from_fixed (type, arg1);
2408 else if (TREE_CODE (arg1) == INTEGER_CST)
2409 return fold_convert_const_fixed_from_int (type, arg1);
2410 else if (TREE_CODE (arg1) == REAL_CST)
2411 return fold_convert_const_fixed_from_real (type, arg1);
2412 }
2413 return NULL_TREE;
2414 }
2415
2416 /* Construct a vector of zero elements of vector type TYPE. */
2417
2418 static tree
2419 build_zero_vector (tree type)
2420 {
2421 tree elem, list;
2422 int i, units;
2423
2424 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2425 units = TYPE_VECTOR_SUBPARTS (type);
2426
2427 list = NULL_TREE;
2428 for (i = 0; i < units; i++)
2429 list = tree_cons (NULL_TREE, elem, list);
2430 return build_vector (type, list);
2431 }
2432
2433 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2434
2435 bool
2436 fold_convertible_p (const_tree type, const_tree arg)
2437 {
2438 tree orig = TREE_TYPE (arg);
2439
2440 if (type == orig)
2441 return true;
2442
2443 if (TREE_CODE (arg) == ERROR_MARK
2444 || TREE_CODE (type) == ERROR_MARK
2445 || TREE_CODE (orig) == ERROR_MARK)
2446 return false;
2447
2448 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2449 return true;
2450
2451 switch (TREE_CODE (type))
2452 {
2453 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2454 case POINTER_TYPE: case REFERENCE_TYPE:
2455 case OFFSET_TYPE:
2456 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2457 || TREE_CODE (orig) == OFFSET_TYPE)
2458 return true;
2459 return (TREE_CODE (orig) == VECTOR_TYPE
2460 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2461
2462 default:
2463 return TREE_CODE (type) == TREE_CODE (orig);
2464 }
2465 }
2466
2467 /* Convert expression ARG to type TYPE. Used by the middle-end for
2468 simple conversions in preference to calling the front-end's convert. */
2469
2470 tree
2471 fold_convert (tree type, tree arg)
2472 {
2473 tree orig = TREE_TYPE (arg);
2474 tree tem;
2475
2476 if (type == orig)
2477 return arg;
2478
2479 if (TREE_CODE (arg) == ERROR_MARK
2480 || TREE_CODE (type) == ERROR_MARK
2481 || TREE_CODE (orig) == ERROR_MARK)
2482 return error_mark_node;
2483
2484 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2485 return fold_build1 (NOP_EXPR, type, arg);
2486
2487 switch (TREE_CODE (type))
2488 {
2489 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2490 case POINTER_TYPE: case REFERENCE_TYPE:
2491 case OFFSET_TYPE:
2492 if (TREE_CODE (arg) == INTEGER_CST)
2493 {
2494 tem = fold_convert_const (NOP_EXPR, type, arg);
2495 if (tem != NULL_TREE)
2496 return tem;
2497 }
2498 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2499 || TREE_CODE (orig) == OFFSET_TYPE)
2500 return fold_build1 (NOP_EXPR, type, arg);
2501 if (TREE_CODE (orig) == COMPLEX_TYPE)
2502 {
2503 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2504 return fold_convert (type, tem);
2505 }
2506 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2507 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2508 return fold_build1 (NOP_EXPR, type, arg);
2509
2510 case REAL_TYPE:
2511 if (TREE_CODE (arg) == INTEGER_CST)
2512 {
2513 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2514 if (tem != NULL_TREE)
2515 return tem;
2516 }
2517 else if (TREE_CODE (arg) == REAL_CST)
2518 {
2519 tem = fold_convert_const (NOP_EXPR, type, arg);
2520 if (tem != NULL_TREE)
2521 return tem;
2522 }
2523 else if (TREE_CODE (arg) == FIXED_CST)
2524 {
2525 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2526 if (tem != NULL_TREE)
2527 return tem;
2528 }
2529
2530 switch (TREE_CODE (orig))
2531 {
2532 case INTEGER_TYPE:
2533 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2534 case POINTER_TYPE: case REFERENCE_TYPE:
2535 return fold_build1 (FLOAT_EXPR, type, arg);
2536
2537 case REAL_TYPE:
2538 return fold_build1 (NOP_EXPR, type, arg);
2539
2540 case FIXED_POINT_TYPE:
2541 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2542
2543 case COMPLEX_TYPE:
2544 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2545 return fold_convert (type, tem);
2546
2547 default:
2548 gcc_unreachable ();
2549 }
2550
2551 case FIXED_POINT_TYPE:
2552 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2553 || TREE_CODE (arg) == REAL_CST)
2554 {
2555 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2556 if (tem != NULL_TREE)
2557 return tem;
2558 }
2559
2560 switch (TREE_CODE (orig))
2561 {
2562 case FIXED_POINT_TYPE:
2563 case INTEGER_TYPE:
2564 case ENUMERAL_TYPE:
2565 case BOOLEAN_TYPE:
2566 case REAL_TYPE:
2567 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2568
2569 case COMPLEX_TYPE:
2570 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2571 return fold_convert (type, tem);
2572
2573 default:
2574 gcc_unreachable ();
2575 }
2576
2577 case COMPLEX_TYPE:
2578 switch (TREE_CODE (orig))
2579 {
2580 case INTEGER_TYPE:
2581 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2582 case POINTER_TYPE: case REFERENCE_TYPE:
2583 case REAL_TYPE:
2584 case FIXED_POINT_TYPE:
2585 return build2 (COMPLEX_EXPR, type,
2586 fold_convert (TREE_TYPE (type), arg),
2587 fold_convert (TREE_TYPE (type), integer_zero_node));
2588 case COMPLEX_TYPE:
2589 {
2590 tree rpart, ipart;
2591
2592 if (TREE_CODE (arg) == COMPLEX_EXPR)
2593 {
2594 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2595 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2596 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2597 }
2598
2599 arg = save_expr (arg);
2600 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2601 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2602 rpart = fold_convert (TREE_TYPE (type), rpart);
2603 ipart = fold_convert (TREE_TYPE (type), ipart);
2604 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2605 }
2606
2607 default:
2608 gcc_unreachable ();
2609 }
2610
2611 case VECTOR_TYPE:
2612 if (integer_zerop (arg))
2613 return build_zero_vector (type);
2614 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2615 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2616 || TREE_CODE (orig) == VECTOR_TYPE);
2617 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2618
2619 case VOID_TYPE:
2620 tem = fold_ignored_result (arg);
2621 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2622 return tem;
2623 return fold_build1 (NOP_EXPR, type, tem);
2624
2625 default:
2626 gcc_unreachable ();
2627 }
2628 }
2629 \f
2630 /* Return false if expr can be assumed not to be an lvalue, true
2631 otherwise. */
2632
2633 static bool
2634 maybe_lvalue_p (tree x)
2635 {
2636 /* We only need to wrap lvalue tree codes. */
2637 switch (TREE_CODE (x))
2638 {
2639 case VAR_DECL:
2640 case PARM_DECL:
2641 case RESULT_DECL:
2642 case LABEL_DECL:
2643 case FUNCTION_DECL:
2644 case SSA_NAME:
2645
2646 case COMPONENT_REF:
2647 case INDIRECT_REF:
2648 case ALIGN_INDIRECT_REF:
2649 case MISALIGNED_INDIRECT_REF:
2650 case ARRAY_REF:
2651 case ARRAY_RANGE_REF:
2652 case BIT_FIELD_REF:
2653 case OBJ_TYPE_REF:
2654
2655 case REALPART_EXPR:
2656 case IMAGPART_EXPR:
2657 case PREINCREMENT_EXPR:
2658 case PREDECREMENT_EXPR:
2659 case SAVE_EXPR:
2660 case TRY_CATCH_EXPR:
2661 case WITH_CLEANUP_EXPR:
2662 case COMPOUND_EXPR:
2663 case MODIFY_EXPR:
2664 case GIMPLE_MODIFY_STMT:
2665 case TARGET_EXPR:
2666 case COND_EXPR:
2667 case BIND_EXPR:
2668 case MIN_EXPR:
2669 case MAX_EXPR:
2670 break;
2671
2672 default:
2673 /* Assume the worst for front-end tree codes. */
2674 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2675 break;
2676 return false;
2677 }
2678
2679 return true;
2680 }
2681
2682 /* Return an expr equal to X but certainly not valid as an lvalue. */
2683
2684 tree
2685 non_lvalue (tree x)
2686 {
2687 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2688 us. */
2689 if (in_gimple_form)
2690 return x;
2691
2692 if (! maybe_lvalue_p (x))
2693 return x;
2694 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2695 }
2696
2697 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2698 Zero means allow extended lvalues. */
2699
2700 int pedantic_lvalues;
2701
2702 /* When pedantic, return an expr equal to X but certainly not valid as a
2703 pedantic lvalue. Otherwise, return X. */
2704
2705 static tree
2706 pedantic_non_lvalue (tree x)
2707 {
2708 if (pedantic_lvalues)
2709 return non_lvalue (x);
2710 else
2711 return x;
2712 }
2713 \f
2714 /* Given a tree comparison code, return the code that is the logical inverse
2715 of the given code. It is not safe to do this for floating-point
2716 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2717 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2718
2719 enum tree_code
2720 invert_tree_comparison (enum tree_code code, bool honor_nans)
2721 {
2722 if (honor_nans && flag_trapping_math)
2723 return ERROR_MARK;
2724
2725 switch (code)
2726 {
2727 case EQ_EXPR:
2728 return NE_EXPR;
2729 case NE_EXPR:
2730 return EQ_EXPR;
2731 case GT_EXPR:
2732 return honor_nans ? UNLE_EXPR : LE_EXPR;
2733 case GE_EXPR:
2734 return honor_nans ? UNLT_EXPR : LT_EXPR;
2735 case LT_EXPR:
2736 return honor_nans ? UNGE_EXPR : GE_EXPR;
2737 case LE_EXPR:
2738 return honor_nans ? UNGT_EXPR : GT_EXPR;
2739 case LTGT_EXPR:
2740 return UNEQ_EXPR;
2741 case UNEQ_EXPR:
2742 return LTGT_EXPR;
2743 case UNGT_EXPR:
2744 return LE_EXPR;
2745 case UNGE_EXPR:
2746 return LT_EXPR;
2747 case UNLT_EXPR:
2748 return GE_EXPR;
2749 case UNLE_EXPR:
2750 return GT_EXPR;
2751 case ORDERED_EXPR:
2752 return UNORDERED_EXPR;
2753 case UNORDERED_EXPR:
2754 return ORDERED_EXPR;
2755 default:
2756 gcc_unreachable ();
2757 }
2758 }
2759
2760 /* Similar, but return the comparison that results if the operands are
2761 swapped. This is safe for floating-point. */
2762
2763 enum tree_code
2764 swap_tree_comparison (enum tree_code code)
2765 {
2766 switch (code)
2767 {
2768 case EQ_EXPR:
2769 case NE_EXPR:
2770 case ORDERED_EXPR:
2771 case UNORDERED_EXPR:
2772 case LTGT_EXPR:
2773 case UNEQ_EXPR:
2774 return code;
2775 case GT_EXPR:
2776 return LT_EXPR;
2777 case GE_EXPR:
2778 return LE_EXPR;
2779 case LT_EXPR:
2780 return GT_EXPR;
2781 case LE_EXPR:
2782 return GE_EXPR;
2783 case UNGT_EXPR:
2784 return UNLT_EXPR;
2785 case UNGE_EXPR:
2786 return UNLE_EXPR;
2787 case UNLT_EXPR:
2788 return UNGT_EXPR;
2789 case UNLE_EXPR:
2790 return UNGE_EXPR;
2791 default:
2792 gcc_unreachable ();
2793 }
2794 }
2795
2796
2797 /* Convert a comparison tree code from an enum tree_code representation
2798 into a compcode bit-based encoding. This function is the inverse of
2799 compcode_to_comparison. */
2800
2801 static enum comparison_code
2802 comparison_to_compcode (enum tree_code code)
2803 {
2804 switch (code)
2805 {
2806 case LT_EXPR:
2807 return COMPCODE_LT;
2808 case EQ_EXPR:
2809 return COMPCODE_EQ;
2810 case LE_EXPR:
2811 return COMPCODE_LE;
2812 case GT_EXPR:
2813 return COMPCODE_GT;
2814 case NE_EXPR:
2815 return COMPCODE_NE;
2816 case GE_EXPR:
2817 return COMPCODE_GE;
2818 case ORDERED_EXPR:
2819 return COMPCODE_ORD;
2820 case UNORDERED_EXPR:
2821 return COMPCODE_UNORD;
2822 case UNLT_EXPR:
2823 return COMPCODE_UNLT;
2824 case UNEQ_EXPR:
2825 return COMPCODE_UNEQ;
2826 case UNLE_EXPR:
2827 return COMPCODE_UNLE;
2828 case UNGT_EXPR:
2829 return COMPCODE_UNGT;
2830 case LTGT_EXPR:
2831 return COMPCODE_LTGT;
2832 case UNGE_EXPR:
2833 return COMPCODE_UNGE;
2834 default:
2835 gcc_unreachable ();
2836 }
2837 }
2838
2839 /* Convert a compcode bit-based encoding of a comparison operator back
2840 to GCC's enum tree_code representation. This function is the
2841 inverse of comparison_to_compcode. */
2842
2843 static enum tree_code
2844 compcode_to_comparison (enum comparison_code code)
2845 {
2846 switch (code)
2847 {
2848 case COMPCODE_LT:
2849 return LT_EXPR;
2850 case COMPCODE_EQ:
2851 return EQ_EXPR;
2852 case COMPCODE_LE:
2853 return LE_EXPR;
2854 case COMPCODE_GT:
2855 return GT_EXPR;
2856 case COMPCODE_NE:
2857 return NE_EXPR;
2858 case COMPCODE_GE:
2859 return GE_EXPR;
2860 case COMPCODE_ORD:
2861 return ORDERED_EXPR;
2862 case COMPCODE_UNORD:
2863 return UNORDERED_EXPR;
2864 case COMPCODE_UNLT:
2865 return UNLT_EXPR;
2866 case COMPCODE_UNEQ:
2867 return UNEQ_EXPR;
2868 case COMPCODE_UNLE:
2869 return UNLE_EXPR;
2870 case COMPCODE_UNGT:
2871 return UNGT_EXPR;
2872 case COMPCODE_LTGT:
2873 return LTGT_EXPR;
2874 case COMPCODE_UNGE:
2875 return UNGE_EXPR;
2876 default:
2877 gcc_unreachable ();
2878 }
2879 }
2880
2881 /* Return a tree for the comparison which is the combination of
2882 doing the AND or OR (depending on CODE) of the two operations LCODE
2883 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2884 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2885 if this makes the transformation invalid. */
2886
2887 tree
2888 combine_comparisons (enum tree_code code, enum tree_code lcode,
2889 enum tree_code rcode, tree truth_type,
2890 tree ll_arg, tree lr_arg)
2891 {
2892 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2893 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2894 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2895 enum comparison_code compcode;
2896
2897 switch (code)
2898 {
2899 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2900 compcode = lcompcode & rcompcode;
2901 break;
2902
2903 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2904 compcode = lcompcode | rcompcode;
2905 break;
2906
2907 default:
2908 return NULL_TREE;
2909 }
2910
2911 if (!honor_nans)
2912 {
2913 /* Eliminate unordered comparisons, as well as LTGT and ORD
2914 which are not used unless the mode has NaNs. */
2915 compcode &= ~COMPCODE_UNORD;
2916 if (compcode == COMPCODE_LTGT)
2917 compcode = COMPCODE_NE;
2918 else if (compcode == COMPCODE_ORD)
2919 compcode = COMPCODE_TRUE;
2920 }
2921 else if (flag_trapping_math)
2922 {
2923 /* Check that the original operation and the optimized ones will trap
2924 under the same condition. */
2925 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2926 && (lcompcode != COMPCODE_EQ)
2927 && (lcompcode != COMPCODE_ORD);
2928 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2929 && (rcompcode != COMPCODE_EQ)
2930 && (rcompcode != COMPCODE_ORD);
2931 bool trap = (compcode & COMPCODE_UNORD) == 0
2932 && (compcode != COMPCODE_EQ)
2933 && (compcode != COMPCODE_ORD);
2934
2935 /* In a short-circuited boolean expression the LHS might be
2936 such that the RHS, if evaluated, will never trap. For
2937 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2938 if neither x nor y is NaN. (This is a mixed blessing: for
2939 example, the expression above will never trap, hence
2940 optimizing it to x < y would be invalid). */
2941 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2942 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2943 rtrap = false;
2944
2945 /* If the comparison was short-circuited, and only the RHS
2946 trapped, we may now generate a spurious trap. */
2947 if (rtrap && !ltrap
2948 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2949 return NULL_TREE;
2950
2951 /* If we changed the conditions that cause a trap, we lose. */
2952 if ((ltrap || rtrap) != trap)
2953 return NULL_TREE;
2954 }
2955
2956 if (compcode == COMPCODE_TRUE)
2957 return constant_boolean_node (true, truth_type);
2958 else if (compcode == COMPCODE_FALSE)
2959 return constant_boolean_node (false, truth_type);
2960 else
2961 return fold_build2 (compcode_to_comparison (compcode),
2962 truth_type, ll_arg, lr_arg);
2963 }
2964
2965 /* Return nonzero if CODE is a tree code that represents a truth value. */
2966
2967 static int
2968 truth_value_p (enum tree_code code)
2969 {
2970 return (TREE_CODE_CLASS (code) == tcc_comparison
2971 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2972 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2973 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2974 }
2975 \f
2976 /* Return nonzero if two operands (typically of the same tree node)
2977 are necessarily equal. If either argument has side-effects this
2978 function returns zero. FLAGS modifies behavior as follows:
2979
2980 If OEP_ONLY_CONST is set, only return nonzero for constants.
2981 This function tests whether the operands are indistinguishable;
2982 it does not test whether they are equal using C's == operation.
2983 The distinction is important for IEEE floating point, because
2984 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2985 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2986
2987 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2988 even though it may hold multiple values during a function.
2989 This is because a GCC tree node guarantees that nothing else is
2990 executed between the evaluation of its "operands" (which may often
2991 be evaluated in arbitrary order). Hence if the operands themselves
2992 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2993 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2994 unset means assuming isochronic (or instantaneous) tree equivalence.
2995 Unless comparing arbitrary expression trees, such as from different
2996 statements, this flag can usually be left unset.
2997
2998 If OEP_PURE_SAME is set, then pure functions with identical arguments
2999 are considered the same. It is used when the caller has other ways
3000 to ensure that global memory is unchanged in between. */
3001
3002 int
3003 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3004 {
3005 /* If either is ERROR_MARK, they aren't equal. */
3006 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3007 return 0;
3008
3009 /* If both types don't have the same signedness, then we can't consider
3010 them equal. We must check this before the STRIP_NOPS calls
3011 because they may change the signedness of the arguments. */
3012 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3013 return 0;
3014
3015 /* If both types don't have the same precision, then it is not safe
3016 to strip NOPs. */
3017 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3018 return 0;
3019
3020 STRIP_NOPS (arg0);
3021 STRIP_NOPS (arg1);
3022
3023 /* In case both args are comparisons but with different comparison
3024 code, try to swap the comparison operands of one arg to produce
3025 a match and compare that variant. */
3026 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3027 && COMPARISON_CLASS_P (arg0)
3028 && COMPARISON_CLASS_P (arg1))
3029 {
3030 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3031
3032 if (TREE_CODE (arg0) == swap_code)
3033 return operand_equal_p (TREE_OPERAND (arg0, 0),
3034 TREE_OPERAND (arg1, 1), flags)
3035 && operand_equal_p (TREE_OPERAND (arg0, 1),
3036 TREE_OPERAND (arg1, 0), flags);
3037 }
3038
3039 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3040 /* This is needed for conversions and for COMPONENT_REF.
3041 Might as well play it safe and always test this. */
3042 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3043 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3044 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3045 return 0;
3046
3047 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3048 We don't care about side effects in that case because the SAVE_EXPR
3049 takes care of that for us. In all other cases, two expressions are
3050 equal if they have no side effects. If we have two identical
3051 expressions with side effects that should be treated the same due
3052 to the only side effects being identical SAVE_EXPR's, that will
3053 be detected in the recursive calls below. */
3054 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3055 && (TREE_CODE (arg0) == SAVE_EXPR
3056 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3057 return 1;
3058
3059 /* Next handle constant cases, those for which we can return 1 even
3060 if ONLY_CONST is set. */
3061 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3062 switch (TREE_CODE (arg0))
3063 {
3064 case INTEGER_CST:
3065 return tree_int_cst_equal (arg0, arg1);
3066
3067 case FIXED_CST:
3068 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3069 TREE_FIXED_CST (arg1));
3070
3071 case REAL_CST:
3072 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3073 TREE_REAL_CST (arg1)))
3074 return 1;
3075
3076
3077 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3078 {
3079 /* If we do not distinguish between signed and unsigned zero,
3080 consider them equal. */
3081 if (real_zerop (arg0) && real_zerop (arg1))
3082 return 1;
3083 }
3084 return 0;
3085
3086 case VECTOR_CST:
3087 {
3088 tree v1, v2;
3089
3090 v1 = TREE_VECTOR_CST_ELTS (arg0);
3091 v2 = TREE_VECTOR_CST_ELTS (arg1);
3092 while (v1 && v2)
3093 {
3094 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3095 flags))
3096 return 0;
3097 v1 = TREE_CHAIN (v1);
3098 v2 = TREE_CHAIN (v2);
3099 }
3100
3101 return v1 == v2;
3102 }
3103
3104 case COMPLEX_CST:
3105 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3106 flags)
3107 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3108 flags));
3109
3110 case STRING_CST:
3111 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3112 && ! memcmp (TREE_STRING_POINTER (arg0),
3113 TREE_STRING_POINTER (arg1),
3114 TREE_STRING_LENGTH (arg0)));
3115
3116 case ADDR_EXPR:
3117 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3118 0);
3119 default:
3120 break;
3121 }
3122
3123 if (flags & OEP_ONLY_CONST)
3124 return 0;
3125
3126 /* Define macros to test an operand from arg0 and arg1 for equality and a
3127 variant that allows null and views null as being different from any
3128 non-null value. In the latter case, if either is null, the both
3129 must be; otherwise, do the normal comparison. */
3130 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3131 TREE_OPERAND (arg1, N), flags)
3132
3133 #define OP_SAME_WITH_NULL(N) \
3134 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3135 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3136
3137 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3138 {
3139 case tcc_unary:
3140 /* Two conversions are equal only if signedness and modes match. */
3141 switch (TREE_CODE (arg0))
3142 {
3143 case NOP_EXPR:
3144 case CONVERT_EXPR:
3145 case FIX_TRUNC_EXPR:
3146 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3147 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3148 return 0;
3149 break;
3150 default:
3151 break;
3152 }
3153
3154 return OP_SAME (0);
3155
3156
3157 case tcc_comparison:
3158 case tcc_binary:
3159 if (OP_SAME (0) && OP_SAME (1))
3160 return 1;
3161
3162 /* For commutative ops, allow the other order. */
3163 return (commutative_tree_code (TREE_CODE (arg0))
3164 && operand_equal_p (TREE_OPERAND (arg0, 0),
3165 TREE_OPERAND (arg1, 1), flags)
3166 && operand_equal_p (TREE_OPERAND (arg0, 1),
3167 TREE_OPERAND (arg1, 0), flags));
3168
3169 case tcc_reference:
3170 /* If either of the pointer (or reference) expressions we are
3171 dereferencing contain a side effect, these cannot be equal. */
3172 if (TREE_SIDE_EFFECTS (arg0)
3173 || TREE_SIDE_EFFECTS (arg1))
3174 return 0;
3175
3176 switch (TREE_CODE (arg0))
3177 {
3178 case INDIRECT_REF:
3179 case ALIGN_INDIRECT_REF:
3180 case MISALIGNED_INDIRECT_REF:
3181 case REALPART_EXPR:
3182 case IMAGPART_EXPR:
3183 return OP_SAME (0);
3184
3185 case ARRAY_REF:
3186 case ARRAY_RANGE_REF:
3187 /* Operands 2 and 3 may be null.
3188 Compare the array index by value if it is constant first as we
3189 may have different types but same value here. */
3190 return (OP_SAME (0)
3191 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3192 TREE_OPERAND (arg1, 1))
3193 || OP_SAME (1))
3194 && OP_SAME_WITH_NULL (2)
3195 && OP_SAME_WITH_NULL (3));
3196
3197 case COMPONENT_REF:
3198 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3199 may be NULL when we're called to compare MEM_EXPRs. */
3200 return OP_SAME_WITH_NULL (0)
3201 && OP_SAME (1)
3202 && OP_SAME_WITH_NULL (2);
3203
3204 case BIT_FIELD_REF:
3205 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3206
3207 default:
3208 return 0;
3209 }
3210
3211 case tcc_expression:
3212 switch (TREE_CODE (arg0))
3213 {
3214 case ADDR_EXPR:
3215 case TRUTH_NOT_EXPR:
3216 return OP_SAME (0);
3217
3218 case TRUTH_ANDIF_EXPR:
3219 case TRUTH_ORIF_EXPR:
3220 return OP_SAME (0) && OP_SAME (1);
3221
3222 case TRUTH_AND_EXPR:
3223 case TRUTH_OR_EXPR:
3224 case TRUTH_XOR_EXPR:
3225 if (OP_SAME (0) && OP_SAME (1))
3226 return 1;
3227
3228 /* Otherwise take into account this is a commutative operation. */
3229 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3230 TREE_OPERAND (arg1, 1), flags)
3231 && operand_equal_p (TREE_OPERAND (arg0, 1),
3232 TREE_OPERAND (arg1, 0), flags));
3233
3234 default:
3235 return 0;
3236 }
3237
3238 case tcc_vl_exp:
3239 switch (TREE_CODE (arg0))
3240 {
3241 case CALL_EXPR:
3242 /* If the CALL_EXPRs call different functions, then they
3243 clearly can not be equal. */
3244 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3245 flags))
3246 return 0;
3247
3248 {
3249 unsigned int cef = call_expr_flags (arg0);
3250 if (flags & OEP_PURE_SAME)
3251 cef &= ECF_CONST | ECF_PURE;
3252 else
3253 cef &= ECF_CONST;
3254 if (!cef)
3255 return 0;
3256 }
3257
3258 /* Now see if all the arguments are the same. */
3259 {
3260 const_call_expr_arg_iterator iter0, iter1;
3261 const_tree a0, a1;
3262 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3263 a1 = first_const_call_expr_arg (arg1, &iter1);
3264 a0 && a1;
3265 a0 = next_const_call_expr_arg (&iter0),
3266 a1 = next_const_call_expr_arg (&iter1))
3267 if (! operand_equal_p (a0, a1, flags))
3268 return 0;
3269
3270 /* If we get here and both argument lists are exhausted
3271 then the CALL_EXPRs are equal. */
3272 return ! (a0 || a1);
3273 }
3274 default:
3275 return 0;
3276 }
3277
3278 case tcc_declaration:
3279 /* Consider __builtin_sqrt equal to sqrt. */
3280 return (TREE_CODE (arg0) == FUNCTION_DECL
3281 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3282 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3283 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3284
3285 default:
3286 return 0;
3287 }
3288
3289 #undef OP_SAME
3290 #undef OP_SAME_WITH_NULL
3291 }
3292 \f
3293 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3294 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3295
3296 When in doubt, return 0. */
3297
3298 static int
3299 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3300 {
3301 int unsignedp1, unsignedpo;
3302 tree primarg0, primarg1, primother;
3303 unsigned int correct_width;
3304
3305 if (operand_equal_p (arg0, arg1, 0))
3306 return 1;
3307
3308 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3309 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3310 return 0;
3311
3312 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3313 and see if the inner values are the same. This removes any
3314 signedness comparison, which doesn't matter here. */
3315 primarg0 = arg0, primarg1 = arg1;
3316 STRIP_NOPS (primarg0);
3317 STRIP_NOPS (primarg1);
3318 if (operand_equal_p (primarg0, primarg1, 0))
3319 return 1;
3320
3321 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3322 actual comparison operand, ARG0.
3323
3324 First throw away any conversions to wider types
3325 already present in the operands. */
3326
3327 primarg1 = get_narrower (arg1, &unsignedp1);
3328 primother = get_narrower (other, &unsignedpo);
3329
3330 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3331 if (unsignedp1 == unsignedpo
3332 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3333 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3334 {
3335 tree type = TREE_TYPE (arg0);
3336
3337 /* Make sure shorter operand is extended the right way
3338 to match the longer operand. */
3339 primarg1 = fold_convert (signed_or_unsigned_type_for
3340 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3341
3342 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3343 return 1;
3344 }
3345
3346 return 0;
3347 }
3348 \f
3349 /* See if ARG is an expression that is either a comparison or is performing
3350 arithmetic on comparisons. The comparisons must only be comparing
3351 two different values, which will be stored in *CVAL1 and *CVAL2; if
3352 they are nonzero it means that some operands have already been found.
3353 No variables may be used anywhere else in the expression except in the
3354 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3355 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3356
3357 If this is true, return 1. Otherwise, return zero. */
3358
3359 static int
3360 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3361 {
3362 enum tree_code code = TREE_CODE (arg);
3363 enum tree_code_class class = TREE_CODE_CLASS (code);
3364
3365 /* We can handle some of the tcc_expression cases here. */
3366 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3367 class = tcc_unary;
3368 else if (class == tcc_expression
3369 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3370 || code == COMPOUND_EXPR))
3371 class = tcc_binary;
3372
3373 else if (class == tcc_expression && code == SAVE_EXPR
3374 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3375 {
3376 /* If we've already found a CVAL1 or CVAL2, this expression is
3377 two complex to handle. */
3378 if (*cval1 || *cval2)
3379 return 0;
3380
3381 class = tcc_unary;
3382 *save_p = 1;
3383 }
3384
3385 switch (class)
3386 {
3387 case tcc_unary:
3388 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3389
3390 case tcc_binary:
3391 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3392 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3393 cval1, cval2, save_p));
3394
3395 case tcc_constant:
3396 return 1;
3397
3398 case tcc_expression:
3399 if (code == COND_EXPR)
3400 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3401 cval1, cval2, save_p)
3402 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3403 cval1, cval2, save_p)
3404 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3405 cval1, cval2, save_p));
3406 return 0;
3407
3408 case tcc_comparison:
3409 /* First see if we can handle the first operand, then the second. For
3410 the second operand, we know *CVAL1 can't be zero. It must be that
3411 one side of the comparison is each of the values; test for the
3412 case where this isn't true by failing if the two operands
3413 are the same. */
3414
3415 if (operand_equal_p (TREE_OPERAND (arg, 0),
3416 TREE_OPERAND (arg, 1), 0))
3417 return 0;
3418
3419 if (*cval1 == 0)
3420 *cval1 = TREE_OPERAND (arg, 0);
3421 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3422 ;
3423 else if (*cval2 == 0)
3424 *cval2 = TREE_OPERAND (arg, 0);
3425 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3426 ;
3427 else
3428 return 0;
3429
3430 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3431 ;
3432 else if (*cval2 == 0)
3433 *cval2 = TREE_OPERAND (arg, 1);
3434 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3435 ;
3436 else
3437 return 0;
3438
3439 return 1;
3440
3441 default:
3442 return 0;
3443 }
3444 }
3445 \f
3446 /* ARG is a tree that is known to contain just arithmetic operations and
3447 comparisons. Evaluate the operations in the tree substituting NEW0 for
3448 any occurrence of OLD0 as an operand of a comparison and likewise for
3449 NEW1 and OLD1. */
3450
3451 static tree
3452 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3453 {
3454 tree type = TREE_TYPE (arg);
3455 enum tree_code code = TREE_CODE (arg);
3456 enum tree_code_class class = TREE_CODE_CLASS (code);
3457
3458 /* We can handle some of the tcc_expression cases here. */
3459 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3460 class = tcc_unary;
3461 else if (class == tcc_expression
3462 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3463 class = tcc_binary;
3464
3465 switch (class)
3466 {
3467 case tcc_unary:
3468 return fold_build1 (code, type,
3469 eval_subst (TREE_OPERAND (arg, 0),
3470 old0, new0, old1, new1));
3471
3472 case tcc_binary:
3473 return fold_build2 (code, type,
3474 eval_subst (TREE_OPERAND (arg, 0),
3475 old0, new0, old1, new1),
3476 eval_subst (TREE_OPERAND (arg, 1),
3477 old0, new0, old1, new1));
3478
3479 case tcc_expression:
3480 switch (code)
3481 {
3482 case SAVE_EXPR:
3483 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3484
3485 case COMPOUND_EXPR:
3486 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3487
3488 case COND_EXPR:
3489 return fold_build3 (code, type,
3490 eval_subst (TREE_OPERAND (arg, 0),
3491 old0, new0, old1, new1),
3492 eval_subst (TREE_OPERAND (arg, 1),
3493 old0, new0, old1, new1),
3494 eval_subst (TREE_OPERAND (arg, 2),
3495 old0, new0, old1, new1));
3496 default:
3497 break;
3498 }
3499 /* Fall through - ??? */
3500
3501 case tcc_comparison:
3502 {
3503 tree arg0 = TREE_OPERAND (arg, 0);
3504 tree arg1 = TREE_OPERAND (arg, 1);
3505
3506 /* We need to check both for exact equality and tree equality. The
3507 former will be true if the operand has a side-effect. In that
3508 case, we know the operand occurred exactly once. */
3509
3510 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3511 arg0 = new0;
3512 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3513 arg0 = new1;
3514
3515 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3516 arg1 = new0;
3517 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3518 arg1 = new1;
3519
3520 return fold_build2 (code, type, arg0, arg1);
3521 }
3522
3523 default:
3524 return arg;
3525 }
3526 }
3527 \f
3528 /* Return a tree for the case when the result of an expression is RESULT
3529 converted to TYPE and OMITTED was previously an operand of the expression
3530 but is now not needed (e.g., we folded OMITTED * 0).
3531
3532 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3533 the conversion of RESULT to TYPE. */
3534
3535 tree
3536 omit_one_operand (tree type, tree result, tree omitted)
3537 {
3538 tree t = fold_convert (type, result);
3539
3540 if (TREE_SIDE_EFFECTS (omitted))
3541 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3542
3543 return non_lvalue (t);
3544 }
3545
3546 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3547
3548 static tree
3549 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3550 {
3551 tree t = fold_convert (type, result);
3552
3553 if (TREE_SIDE_EFFECTS (omitted))
3554 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3555
3556 return pedantic_non_lvalue (t);
3557 }
3558
3559 /* Return a tree for the case when the result of an expression is RESULT
3560 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3561 of the expression but are now not needed.
3562
3563 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3564 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3565 evaluated before OMITTED2. Otherwise, if neither has side effects,
3566 just do the conversion of RESULT to TYPE. */
3567
3568 tree
3569 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3570 {
3571 tree t = fold_convert (type, result);
3572
3573 if (TREE_SIDE_EFFECTS (omitted2))
3574 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3575 if (TREE_SIDE_EFFECTS (omitted1))
3576 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3577
3578 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3579 }
3580
3581 \f
3582 /* Return a simplified tree node for the truth-negation of ARG. This
3583 never alters ARG itself. We assume that ARG is an operation that
3584 returns a truth value (0 or 1).
3585
3586 FIXME: one would think we would fold the result, but it causes
3587 problems with the dominator optimizer. */
3588
3589 tree
3590 fold_truth_not_expr (tree arg)
3591 {
3592 tree type = TREE_TYPE (arg);
3593 enum tree_code code = TREE_CODE (arg);
3594
3595 /* If this is a comparison, we can simply invert it, except for
3596 floating-point non-equality comparisons, in which case we just
3597 enclose a TRUTH_NOT_EXPR around what we have. */
3598
3599 if (TREE_CODE_CLASS (code) == tcc_comparison)
3600 {
3601 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3602 if (FLOAT_TYPE_P (op_type)
3603 && flag_trapping_math
3604 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3605 && code != NE_EXPR && code != EQ_EXPR)
3606 return NULL_TREE;
3607 else
3608 {
3609 code = invert_tree_comparison (code,
3610 HONOR_NANS (TYPE_MODE (op_type)));
3611 if (code == ERROR_MARK)
3612 return NULL_TREE;
3613 else
3614 return build2 (code, type,
3615 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3616 }
3617 }
3618
3619 switch (code)
3620 {
3621 case INTEGER_CST:
3622 return constant_boolean_node (integer_zerop (arg), type);
3623
3624 case TRUTH_AND_EXPR:
3625 return build2 (TRUTH_OR_EXPR, type,
3626 invert_truthvalue (TREE_OPERAND (arg, 0)),
3627 invert_truthvalue (TREE_OPERAND (arg, 1)));
3628
3629 case TRUTH_OR_EXPR:
3630 return build2 (TRUTH_AND_EXPR, type,
3631 invert_truthvalue (TREE_OPERAND (arg, 0)),
3632 invert_truthvalue (TREE_OPERAND (arg, 1)));
3633
3634 case TRUTH_XOR_EXPR:
3635 /* Here we can invert either operand. We invert the first operand
3636 unless the second operand is a TRUTH_NOT_EXPR in which case our
3637 result is the XOR of the first operand with the inside of the
3638 negation of the second operand. */
3639
3640 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3641 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3642 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3643 else
3644 return build2 (TRUTH_XOR_EXPR, type,
3645 invert_truthvalue (TREE_OPERAND (arg, 0)),
3646 TREE_OPERAND (arg, 1));
3647
3648 case TRUTH_ANDIF_EXPR:
3649 return build2 (TRUTH_ORIF_EXPR, type,
3650 invert_truthvalue (TREE_OPERAND (arg, 0)),
3651 invert_truthvalue (TREE_OPERAND (arg, 1)));
3652
3653 case TRUTH_ORIF_EXPR:
3654 return build2 (TRUTH_ANDIF_EXPR, type,
3655 invert_truthvalue (TREE_OPERAND (arg, 0)),
3656 invert_truthvalue (TREE_OPERAND (arg, 1)));
3657
3658 case TRUTH_NOT_EXPR:
3659 return TREE_OPERAND (arg, 0);
3660
3661 case COND_EXPR:
3662 {
3663 tree arg1 = TREE_OPERAND (arg, 1);
3664 tree arg2 = TREE_OPERAND (arg, 2);
3665 /* A COND_EXPR may have a throw as one operand, which
3666 then has void type. Just leave void operands
3667 as they are. */
3668 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3669 VOID_TYPE_P (TREE_TYPE (arg1))
3670 ? arg1 : invert_truthvalue (arg1),
3671 VOID_TYPE_P (TREE_TYPE (arg2))
3672 ? arg2 : invert_truthvalue (arg2));
3673 }
3674
3675 case COMPOUND_EXPR:
3676 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3677 invert_truthvalue (TREE_OPERAND (arg, 1)));
3678
3679 case NON_LVALUE_EXPR:
3680 return invert_truthvalue (TREE_OPERAND (arg, 0));
3681
3682 case NOP_EXPR:
3683 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3684 return build1 (TRUTH_NOT_EXPR, type, arg);
3685
3686 case CONVERT_EXPR:
3687 case FLOAT_EXPR:
3688 return build1 (TREE_CODE (arg), type,
3689 invert_truthvalue (TREE_OPERAND (arg, 0)));
3690
3691 case BIT_AND_EXPR:
3692 if (!integer_onep (TREE_OPERAND (arg, 1)))
3693 break;
3694 return build2 (EQ_EXPR, type, arg,
3695 build_int_cst (type, 0));
3696
3697 case SAVE_EXPR:
3698 return build1 (TRUTH_NOT_EXPR, type, arg);
3699
3700 case CLEANUP_POINT_EXPR:
3701 return build1 (CLEANUP_POINT_EXPR, type,
3702 invert_truthvalue (TREE_OPERAND (arg, 0)));
3703
3704 default:
3705 break;
3706 }
3707
3708 return NULL_TREE;
3709 }
3710
3711 /* Return a simplified tree node for the truth-negation of ARG. This
3712 never alters ARG itself. We assume that ARG is an operation that
3713 returns a truth value (0 or 1).
3714
3715 FIXME: one would think we would fold the result, but it causes
3716 problems with the dominator optimizer. */
3717
3718 tree
3719 invert_truthvalue (tree arg)
3720 {
3721 tree tem;
3722
3723 if (TREE_CODE (arg) == ERROR_MARK)
3724 return arg;
3725
3726 tem = fold_truth_not_expr (arg);
3727 if (!tem)
3728 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3729
3730 return tem;
3731 }
3732
3733 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3734 operands are another bit-wise operation with a common input. If so,
3735 distribute the bit operations to save an operation and possibly two if
3736 constants are involved. For example, convert
3737 (A | B) & (A | C) into A | (B & C)
3738 Further simplification will occur if B and C are constants.
3739
3740 If this optimization cannot be done, 0 will be returned. */
3741
3742 static tree
3743 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3744 {
3745 tree common;
3746 tree left, right;
3747
3748 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3749 || TREE_CODE (arg0) == code
3750 || (TREE_CODE (arg0) != BIT_AND_EXPR
3751 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3752 return 0;
3753
3754 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3755 {
3756 common = TREE_OPERAND (arg0, 0);
3757 left = TREE_OPERAND (arg0, 1);
3758 right = TREE_OPERAND (arg1, 1);
3759 }
3760 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3761 {
3762 common = TREE_OPERAND (arg0, 0);
3763 left = TREE_OPERAND (arg0, 1);
3764 right = TREE_OPERAND (arg1, 0);
3765 }
3766 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3767 {
3768 common = TREE_OPERAND (arg0, 1);
3769 left = TREE_OPERAND (arg0, 0);
3770 right = TREE_OPERAND (arg1, 1);
3771 }
3772 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3773 {
3774 common = TREE_OPERAND (arg0, 1);
3775 left = TREE_OPERAND (arg0, 0);
3776 right = TREE_OPERAND (arg1, 0);
3777 }
3778 else
3779 return 0;
3780
3781 return fold_build2 (TREE_CODE (arg0), type, common,
3782 fold_build2 (code, type, left, right));
3783 }
3784
3785 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3786 with code CODE. This optimization is unsafe. */
3787 static tree
3788 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3789 {
3790 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3791 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3792
3793 /* (A / C) +- (B / C) -> (A +- B) / C. */
3794 if (mul0 == mul1
3795 && operand_equal_p (TREE_OPERAND (arg0, 1),
3796 TREE_OPERAND (arg1, 1), 0))
3797 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3798 fold_build2 (code, type,
3799 TREE_OPERAND (arg0, 0),
3800 TREE_OPERAND (arg1, 0)),
3801 TREE_OPERAND (arg0, 1));
3802
3803 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3804 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3805 TREE_OPERAND (arg1, 0), 0)
3806 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3807 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3808 {
3809 REAL_VALUE_TYPE r0, r1;
3810 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3811 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3812 if (!mul0)
3813 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3814 if (!mul1)
3815 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3816 real_arithmetic (&r0, code, &r0, &r1);
3817 return fold_build2 (MULT_EXPR, type,
3818 TREE_OPERAND (arg0, 0),
3819 build_real (type, r0));
3820 }
3821
3822 return NULL_TREE;
3823 }
3824 \f
3825 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3826 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3827
3828 static tree
3829 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3830 int unsignedp)
3831 {
3832 tree result;
3833
3834 if (bitpos == 0)
3835 {
3836 tree size = TYPE_SIZE (TREE_TYPE (inner));
3837 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3838 || POINTER_TYPE_P (TREE_TYPE (inner)))
3839 && host_integerp (size, 0)
3840 && tree_low_cst (size, 0) == bitsize)
3841 return fold_convert (type, inner);
3842 }
3843
3844 result = build3 (BIT_FIELD_REF, type, inner,
3845 size_int (bitsize), bitsize_int (bitpos));
3846
3847 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3848
3849 return result;
3850 }
3851
3852 /* Optimize a bit-field compare.
3853
3854 There are two cases: First is a compare against a constant and the
3855 second is a comparison of two items where the fields are at the same
3856 bit position relative to the start of a chunk (byte, halfword, word)
3857 large enough to contain it. In these cases we can avoid the shift
3858 implicit in bitfield extractions.
3859
3860 For constants, we emit a compare of the shifted constant with the
3861 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3862 compared. For two fields at the same position, we do the ANDs with the
3863 similar mask and compare the result of the ANDs.
3864
3865 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3866 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3867 are the left and right operands of the comparison, respectively.
3868
3869 If the optimization described above can be done, we return the resulting
3870 tree. Otherwise we return zero. */
3871
3872 static tree
3873 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3874 tree lhs, tree rhs)
3875 {
3876 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3877 tree type = TREE_TYPE (lhs);
3878 tree signed_type, unsigned_type;
3879 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3880 enum machine_mode lmode, rmode, nmode;
3881 int lunsignedp, runsignedp;
3882 int lvolatilep = 0, rvolatilep = 0;
3883 tree linner, rinner = NULL_TREE;
3884 tree mask;
3885 tree offset;
3886
3887 /* Get all the information about the extractions being done. If the bit size
3888 if the same as the size of the underlying object, we aren't doing an
3889 extraction at all and so can do nothing. We also don't want to
3890 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3891 then will no longer be able to replace it. */
3892 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3893 &lunsignedp, &lvolatilep, false);
3894 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3895 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3896 return 0;
3897
3898 if (!const_p)
3899 {
3900 /* If this is not a constant, we can only do something if bit positions,
3901 sizes, and signedness are the same. */
3902 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3903 &runsignedp, &rvolatilep, false);
3904
3905 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3906 || lunsignedp != runsignedp || offset != 0
3907 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3908 return 0;
3909 }
3910
3911 /* See if we can find a mode to refer to this field. We should be able to,
3912 but fail if we can't. */
3913 nmode = get_best_mode (lbitsize, lbitpos,
3914 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3915 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3916 TYPE_ALIGN (TREE_TYPE (rinner))),
3917 word_mode, lvolatilep || rvolatilep);
3918 if (nmode == VOIDmode)
3919 return 0;
3920
3921 /* Set signed and unsigned types of the precision of this mode for the
3922 shifts below. */
3923 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3924 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3925
3926 /* Compute the bit position and size for the new reference and our offset
3927 within it. If the new reference is the same size as the original, we
3928 won't optimize anything, so return zero. */
3929 nbitsize = GET_MODE_BITSIZE (nmode);
3930 nbitpos = lbitpos & ~ (nbitsize - 1);
3931 lbitpos -= nbitpos;
3932 if (nbitsize == lbitsize)
3933 return 0;
3934
3935 if (BYTES_BIG_ENDIAN)
3936 lbitpos = nbitsize - lbitsize - lbitpos;
3937
3938 /* Make the mask to be used against the extracted field. */
3939 mask = build_int_cst_type (unsigned_type, -1);
3940 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3941 mask = const_binop (RSHIFT_EXPR, mask,
3942 size_int (nbitsize - lbitsize - lbitpos), 0);
3943
3944 if (! const_p)
3945 /* If not comparing with constant, just rework the comparison
3946 and return. */
3947 return fold_build2 (code, compare_type,
3948 fold_build2 (BIT_AND_EXPR, unsigned_type,
3949 make_bit_field_ref (linner,
3950 unsigned_type,
3951 nbitsize, nbitpos,
3952 1),
3953 mask),
3954 fold_build2 (BIT_AND_EXPR, unsigned_type,
3955 make_bit_field_ref (rinner,
3956 unsigned_type,
3957 nbitsize, nbitpos,
3958 1),
3959 mask));
3960
3961 /* Otherwise, we are handling the constant case. See if the constant is too
3962 big for the field. Warn and return a tree of for 0 (false) if so. We do
3963 this not only for its own sake, but to avoid having to test for this
3964 error case below. If we didn't, we might generate wrong code.
3965
3966 For unsigned fields, the constant shifted right by the field length should
3967 be all zero. For signed fields, the high-order bits should agree with
3968 the sign bit. */
3969
3970 if (lunsignedp)
3971 {
3972 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3973 fold_convert (unsigned_type, rhs),
3974 size_int (lbitsize), 0)))
3975 {
3976 warning (0, "comparison is always %d due to width of bit-field",
3977 code == NE_EXPR);
3978 return constant_boolean_node (code == NE_EXPR, compare_type);
3979 }
3980 }
3981 else
3982 {
3983 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3984 size_int (lbitsize - 1), 0);
3985 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3986 {
3987 warning (0, "comparison is always %d due to width of bit-field",
3988 code == NE_EXPR);
3989 return constant_boolean_node (code == NE_EXPR, compare_type);
3990 }
3991 }
3992
3993 /* Single-bit compares should always be against zero. */
3994 if (lbitsize == 1 && ! integer_zerop (rhs))
3995 {
3996 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3997 rhs = build_int_cst (type, 0);
3998 }
3999
4000 /* Make a new bitfield reference, shift the constant over the
4001 appropriate number of bits and mask it with the computed mask
4002 (in case this was a signed field). If we changed it, make a new one. */
4003 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4004 if (lvolatilep)
4005 {
4006 TREE_SIDE_EFFECTS (lhs) = 1;
4007 TREE_THIS_VOLATILE (lhs) = 1;
4008 }
4009
4010 rhs = const_binop (BIT_AND_EXPR,
4011 const_binop (LSHIFT_EXPR,
4012 fold_convert (unsigned_type, rhs),
4013 size_int (lbitpos), 0),
4014 mask, 0);
4015
4016 return build2 (code, compare_type,
4017 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4018 rhs);
4019 }
4020 \f
4021 /* Subroutine for fold_truthop: decode a field reference.
4022
4023 If EXP is a comparison reference, we return the innermost reference.
4024
4025 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4026 set to the starting bit number.
4027
4028 If the innermost field can be completely contained in a mode-sized
4029 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4030
4031 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4032 otherwise it is not changed.
4033
4034 *PUNSIGNEDP is set to the signedness of the field.
4035
4036 *PMASK is set to the mask used. This is either contained in a
4037 BIT_AND_EXPR or derived from the width of the field.
4038
4039 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4040
4041 Return 0 if this is not a component reference or is one that we can't
4042 do anything with. */
4043
4044 static tree
4045 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4046 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4047 int *punsignedp, int *pvolatilep,
4048 tree *pmask, tree *pand_mask)
4049 {
4050 tree outer_type = 0;
4051 tree and_mask = 0;
4052 tree mask, inner, offset;
4053 tree unsigned_type;
4054 unsigned int precision;
4055
4056 /* All the optimizations using this function assume integer fields.
4057 There are problems with FP fields since the type_for_size call
4058 below can fail for, e.g., XFmode. */
4059 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4060 return 0;
4061
4062 /* We are interested in the bare arrangement of bits, so strip everything
4063 that doesn't affect the machine mode. However, record the type of the
4064 outermost expression if it may matter below. */
4065 if (TREE_CODE (exp) == NOP_EXPR
4066 || TREE_CODE (exp) == CONVERT_EXPR
4067 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4068 outer_type = TREE_TYPE (exp);
4069 STRIP_NOPS (exp);
4070
4071 if (TREE_CODE (exp) == BIT_AND_EXPR)
4072 {
4073 and_mask = TREE_OPERAND (exp, 1);
4074 exp = TREE_OPERAND (exp, 0);
4075 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4076 if (TREE_CODE (and_mask) != INTEGER_CST)
4077 return 0;
4078 }
4079
4080 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4081 punsignedp, pvolatilep, false);
4082 if ((inner == exp && and_mask == 0)
4083 || *pbitsize < 0 || offset != 0
4084 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4085 return 0;
4086
4087 /* If the number of bits in the reference is the same as the bitsize of
4088 the outer type, then the outer type gives the signedness. Otherwise
4089 (in case of a small bitfield) the signedness is unchanged. */
4090 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4091 *punsignedp = TYPE_UNSIGNED (outer_type);
4092
4093 /* Compute the mask to access the bitfield. */
4094 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4095 precision = TYPE_PRECISION (unsigned_type);
4096
4097 mask = build_int_cst_type (unsigned_type, -1);
4098
4099 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4100 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4101
4102 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4103 if (and_mask != 0)
4104 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4105 fold_convert (unsigned_type, and_mask), mask);
4106
4107 *pmask = mask;
4108 *pand_mask = and_mask;
4109 return inner;
4110 }
4111
4112 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4113 bit positions. */
4114
4115 static int
4116 all_ones_mask_p (tree mask, int size)
4117 {
4118 tree type = TREE_TYPE (mask);
4119 unsigned int precision = TYPE_PRECISION (type);
4120 tree tmask;
4121
4122 tmask = build_int_cst_type (signed_type_for (type), -1);
4123
4124 return
4125 tree_int_cst_equal (mask,
4126 const_binop (RSHIFT_EXPR,
4127 const_binop (LSHIFT_EXPR, tmask,
4128 size_int (precision - size),
4129 0),
4130 size_int (precision - size), 0));
4131 }
4132
4133 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4134 represents the sign bit of EXP's type. If EXP represents a sign
4135 or zero extension, also test VAL against the unextended type.
4136 The return value is the (sub)expression whose sign bit is VAL,
4137 or NULL_TREE otherwise. */
4138
4139 static tree
4140 sign_bit_p (tree exp, tree val)
4141 {
4142 unsigned HOST_WIDE_INT mask_lo, lo;
4143 HOST_WIDE_INT mask_hi, hi;
4144 int width;
4145 tree t;
4146
4147 /* Tree EXP must have an integral type. */
4148 t = TREE_TYPE (exp);
4149 if (! INTEGRAL_TYPE_P (t))
4150 return NULL_TREE;
4151
4152 /* Tree VAL must be an integer constant. */
4153 if (TREE_CODE (val) != INTEGER_CST
4154 || TREE_OVERFLOW (val))
4155 return NULL_TREE;
4156
4157 width = TYPE_PRECISION (t);
4158 if (width > HOST_BITS_PER_WIDE_INT)
4159 {
4160 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4161 lo = 0;
4162
4163 mask_hi = ((unsigned HOST_WIDE_INT) -1
4164 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4165 mask_lo = -1;
4166 }
4167 else
4168 {
4169 hi = 0;
4170 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4171
4172 mask_hi = 0;
4173 mask_lo = ((unsigned HOST_WIDE_INT) -1
4174 >> (HOST_BITS_PER_WIDE_INT - width));
4175 }
4176
4177 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4178 treat VAL as if it were unsigned. */
4179 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4180 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4181 return exp;
4182
4183 /* Handle extension from a narrower type. */
4184 if (TREE_CODE (exp) == NOP_EXPR
4185 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4186 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4187
4188 return NULL_TREE;
4189 }
4190
4191 /* Subroutine for fold_truthop: determine if an operand is simple enough
4192 to be evaluated unconditionally. */
4193
4194 static int
4195 simple_operand_p (tree exp)
4196 {
4197 /* Strip any conversions that don't change the machine mode. */
4198 STRIP_NOPS (exp);
4199
4200 return (CONSTANT_CLASS_P (exp)
4201 || TREE_CODE (exp) == SSA_NAME
4202 || (DECL_P (exp)
4203 && ! TREE_ADDRESSABLE (exp)
4204 && ! TREE_THIS_VOLATILE (exp)
4205 && ! DECL_NONLOCAL (exp)
4206 /* Don't regard global variables as simple. They may be
4207 allocated in ways unknown to the compiler (shared memory,
4208 #pragma weak, etc). */
4209 && ! TREE_PUBLIC (exp)
4210 && ! DECL_EXTERNAL (exp)
4211 /* Loading a static variable is unduly expensive, but global
4212 registers aren't expensive. */
4213 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4214 }
4215 \f
4216 /* The following functions are subroutines to fold_range_test and allow it to
4217 try to change a logical combination of comparisons into a range test.
4218
4219 For example, both
4220 X == 2 || X == 3 || X == 4 || X == 5
4221 and
4222 X >= 2 && X <= 5
4223 are converted to
4224 (unsigned) (X - 2) <= 3
4225
4226 We describe each set of comparisons as being either inside or outside
4227 a range, using a variable named like IN_P, and then describe the
4228 range with a lower and upper bound. If one of the bounds is omitted,
4229 it represents either the highest or lowest value of the type.
4230
4231 In the comments below, we represent a range by two numbers in brackets
4232 preceded by a "+" to designate being inside that range, or a "-" to
4233 designate being outside that range, so the condition can be inverted by
4234 flipping the prefix. An omitted bound is represented by a "-". For
4235 example, "- [-, 10]" means being outside the range starting at the lowest
4236 possible value and ending at 10, in other words, being greater than 10.
4237 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4238 always false.
4239
4240 We set up things so that the missing bounds are handled in a consistent
4241 manner so neither a missing bound nor "true" and "false" need to be
4242 handled using a special case. */
4243
4244 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4245 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4246 and UPPER1_P are nonzero if the respective argument is an upper bound
4247 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4248 must be specified for a comparison. ARG1 will be converted to ARG0's
4249 type if both are specified. */
4250
4251 static tree
4252 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4253 tree arg1, int upper1_p)
4254 {
4255 tree tem;
4256 int result;
4257 int sgn0, sgn1;
4258
4259 /* If neither arg represents infinity, do the normal operation.
4260 Else, if not a comparison, return infinity. Else handle the special
4261 comparison rules. Note that most of the cases below won't occur, but
4262 are handled for consistency. */
4263
4264 if (arg0 != 0 && arg1 != 0)
4265 {
4266 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4267 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4268 STRIP_NOPS (tem);
4269 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4270 }
4271
4272 if (TREE_CODE_CLASS (code) != tcc_comparison)
4273 return 0;
4274
4275 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4276 for neither. In real maths, we cannot assume open ended ranges are
4277 the same. But, this is computer arithmetic, where numbers are finite.
4278 We can therefore make the transformation of any unbounded range with
4279 the value Z, Z being greater than any representable number. This permits
4280 us to treat unbounded ranges as equal. */
4281 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4282 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4283 switch (code)
4284 {
4285 case EQ_EXPR:
4286 result = sgn0 == sgn1;
4287 break;
4288 case NE_EXPR:
4289 result = sgn0 != sgn1;
4290 break;
4291 case LT_EXPR:
4292 result = sgn0 < sgn1;
4293 break;
4294 case LE_EXPR:
4295 result = sgn0 <= sgn1;
4296 break;
4297 case GT_EXPR:
4298 result = sgn0 > sgn1;
4299 break;
4300 case GE_EXPR:
4301 result = sgn0 >= sgn1;
4302 break;
4303 default:
4304 gcc_unreachable ();
4305 }
4306
4307 return constant_boolean_node (result, type);
4308 }
4309 \f
4310 /* Given EXP, a logical expression, set the range it is testing into
4311 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4312 actually being tested. *PLOW and *PHIGH will be made of the same
4313 type as the returned expression. If EXP is not a comparison, we
4314 will most likely not be returning a useful value and range. Set
4315 *STRICT_OVERFLOW_P to true if the return value is only valid
4316 because signed overflow is undefined; otherwise, do not change
4317 *STRICT_OVERFLOW_P. */
4318
4319 static tree
4320 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4321 bool *strict_overflow_p)
4322 {
4323 enum tree_code code;
4324 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4325 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4326 int in_p, n_in_p;
4327 tree low, high, n_low, n_high;
4328
4329 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4330 and see if we can refine the range. Some of the cases below may not
4331 happen, but it doesn't seem worth worrying about this. We "continue"
4332 the outer loop when we've changed something; otherwise we "break"
4333 the switch, which will "break" the while. */
4334
4335 in_p = 0;
4336 low = high = build_int_cst (TREE_TYPE (exp), 0);
4337
4338 while (1)
4339 {
4340 code = TREE_CODE (exp);
4341 exp_type = TREE_TYPE (exp);
4342
4343 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4344 {
4345 if (TREE_OPERAND_LENGTH (exp) > 0)
4346 arg0 = TREE_OPERAND (exp, 0);
4347 if (TREE_CODE_CLASS (code) == tcc_comparison
4348 || TREE_CODE_CLASS (code) == tcc_unary
4349 || TREE_CODE_CLASS (code) == tcc_binary)
4350 arg0_type = TREE_TYPE (arg0);
4351 if (TREE_CODE_CLASS (code) == tcc_binary
4352 || TREE_CODE_CLASS (code) == tcc_comparison
4353 || (TREE_CODE_CLASS (code) == tcc_expression
4354 && TREE_OPERAND_LENGTH (exp) > 1))
4355 arg1 = TREE_OPERAND (exp, 1);
4356 }
4357
4358 switch (code)
4359 {
4360 case TRUTH_NOT_EXPR:
4361 in_p = ! in_p, exp = arg0;
4362 continue;
4363
4364 case EQ_EXPR: case NE_EXPR:
4365 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4366 /* We can only do something if the range is testing for zero
4367 and if the second operand is an integer constant. Note that
4368 saying something is "in" the range we make is done by
4369 complementing IN_P since it will set in the initial case of
4370 being not equal to zero; "out" is leaving it alone. */
4371 if (low == 0 || high == 0
4372 || ! integer_zerop (low) || ! integer_zerop (high)
4373 || TREE_CODE (arg1) != INTEGER_CST)
4374 break;
4375
4376 switch (code)
4377 {
4378 case NE_EXPR: /* - [c, c] */
4379 low = high = arg1;
4380 break;
4381 case EQ_EXPR: /* + [c, c] */
4382 in_p = ! in_p, low = high = arg1;
4383 break;
4384 case GT_EXPR: /* - [-, c] */
4385 low = 0, high = arg1;
4386 break;
4387 case GE_EXPR: /* + [c, -] */
4388 in_p = ! in_p, low = arg1, high = 0;
4389 break;
4390 case LT_EXPR: /* - [c, -] */
4391 low = arg1, high = 0;
4392 break;
4393 case LE_EXPR: /* + [-, c] */
4394 in_p = ! in_p, low = 0, high = arg1;
4395 break;
4396 default:
4397 gcc_unreachable ();
4398 }
4399
4400 /* If this is an unsigned comparison, we also know that EXP is
4401 greater than or equal to zero. We base the range tests we make
4402 on that fact, so we record it here so we can parse existing
4403 range tests. We test arg0_type since often the return type
4404 of, e.g. EQ_EXPR, is boolean. */
4405 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4406 {
4407 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4408 in_p, low, high, 1,
4409 build_int_cst (arg0_type, 0),
4410 NULL_TREE))
4411 break;
4412
4413 in_p = n_in_p, low = n_low, high = n_high;
4414
4415 /* If the high bound is missing, but we have a nonzero low
4416 bound, reverse the range so it goes from zero to the low bound
4417 minus 1. */
4418 if (high == 0 && low && ! integer_zerop (low))
4419 {
4420 in_p = ! in_p;
4421 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4422 integer_one_node, 0);
4423 low = build_int_cst (arg0_type, 0);
4424 }
4425 }
4426
4427 exp = arg0;
4428 continue;
4429
4430 case NEGATE_EXPR:
4431 /* (-x) IN [a,b] -> x in [-b, -a] */
4432 n_low = range_binop (MINUS_EXPR, exp_type,
4433 build_int_cst (exp_type, 0),
4434 0, high, 1);
4435 n_high = range_binop (MINUS_EXPR, exp_type,
4436 build_int_cst (exp_type, 0),
4437 0, low, 0);
4438 low = n_low, high = n_high;
4439 exp = arg0;
4440 continue;
4441
4442 case BIT_NOT_EXPR:
4443 /* ~ X -> -X - 1 */
4444 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4445 build_int_cst (exp_type, 1));
4446 continue;
4447
4448 case PLUS_EXPR: case MINUS_EXPR:
4449 if (TREE_CODE (arg1) != INTEGER_CST)
4450 break;
4451
4452 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4453 move a constant to the other side. */
4454 if (!TYPE_UNSIGNED (arg0_type)
4455 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4456 break;
4457
4458 /* If EXP is signed, any overflow in the computation is undefined,
4459 so we don't worry about it so long as our computations on
4460 the bounds don't overflow. For unsigned, overflow is defined
4461 and this is exactly the right thing. */
4462 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4463 arg0_type, low, 0, arg1, 0);
4464 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4465 arg0_type, high, 1, arg1, 0);
4466 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4467 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4468 break;
4469
4470 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4471 *strict_overflow_p = true;
4472
4473 /* Check for an unsigned range which has wrapped around the maximum
4474 value thus making n_high < n_low, and normalize it. */
4475 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4476 {
4477 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4478 integer_one_node, 0);
4479 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4480 integer_one_node, 0);
4481
4482 /* If the range is of the form +/- [ x+1, x ], we won't
4483 be able to normalize it. But then, it represents the
4484 whole range or the empty set, so make it
4485 +/- [ -, - ]. */
4486 if (tree_int_cst_equal (n_low, low)
4487 && tree_int_cst_equal (n_high, high))
4488 low = high = 0;
4489 else
4490 in_p = ! in_p;
4491 }
4492 else
4493 low = n_low, high = n_high;
4494
4495 exp = arg0;
4496 continue;
4497
4498 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4499 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4500 break;
4501
4502 if (! INTEGRAL_TYPE_P (arg0_type)
4503 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4504 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4505 break;
4506
4507 n_low = low, n_high = high;
4508
4509 if (n_low != 0)
4510 n_low = fold_convert (arg0_type, n_low);
4511
4512 if (n_high != 0)
4513 n_high = fold_convert (arg0_type, n_high);
4514
4515
4516 /* If we're converting arg0 from an unsigned type, to exp,
4517 a signed type, we will be doing the comparison as unsigned.
4518 The tests above have already verified that LOW and HIGH
4519 are both positive.
4520
4521 So we have to ensure that we will handle large unsigned
4522 values the same way that the current signed bounds treat
4523 negative values. */
4524
4525 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4526 {
4527 tree high_positive;
4528 tree equiv_type;
4529 /* For fixed-point modes, we need to pass the saturating flag
4530 as the 2nd parameter. */
4531 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4532 equiv_type = lang_hooks.types.type_for_mode
4533 (TYPE_MODE (arg0_type),
4534 TYPE_SATURATING (arg0_type));
4535 else
4536 equiv_type = lang_hooks.types.type_for_mode
4537 (TYPE_MODE (arg0_type), 1);
4538
4539 /* A range without an upper bound is, naturally, unbounded.
4540 Since convert would have cropped a very large value, use
4541 the max value for the destination type. */
4542 high_positive
4543 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4544 : TYPE_MAX_VALUE (arg0_type);
4545
4546 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4547 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4548 fold_convert (arg0_type,
4549 high_positive),
4550 build_int_cst (arg0_type, 1));
4551
4552 /* If the low bound is specified, "and" the range with the
4553 range for which the original unsigned value will be
4554 positive. */
4555 if (low != 0)
4556 {
4557 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4558 1, n_low, n_high, 1,
4559 fold_convert (arg0_type,
4560 integer_zero_node),
4561 high_positive))
4562 break;
4563
4564 in_p = (n_in_p == in_p);
4565 }
4566 else
4567 {
4568 /* Otherwise, "or" the range with the range of the input
4569 that will be interpreted as negative. */
4570 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4571 0, n_low, n_high, 1,
4572 fold_convert (arg0_type,
4573 integer_zero_node),
4574 high_positive))
4575 break;
4576
4577 in_p = (in_p != n_in_p);
4578 }
4579 }
4580
4581 exp = arg0;
4582 low = n_low, high = n_high;
4583 continue;
4584
4585 default:
4586 break;
4587 }
4588
4589 break;
4590 }
4591
4592 /* If EXP is a constant, we can evaluate whether this is true or false. */
4593 if (TREE_CODE (exp) == INTEGER_CST)
4594 {
4595 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4596 exp, 0, low, 0))
4597 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4598 exp, 1, high, 1)));
4599 low = high = 0;
4600 exp = 0;
4601 }
4602
4603 *pin_p = in_p, *plow = low, *phigh = high;
4604 return exp;
4605 }
4606 \f
4607 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4608 type, TYPE, return an expression to test if EXP is in (or out of, depending
4609 on IN_P) the range. Return 0 if the test couldn't be created. */
4610
4611 static tree
4612 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4613 {
4614 tree etype = TREE_TYPE (exp);
4615 tree value;
4616
4617 #ifdef HAVE_canonicalize_funcptr_for_compare
4618 /* Disable this optimization for function pointer expressions
4619 on targets that require function pointer canonicalization. */
4620 if (HAVE_canonicalize_funcptr_for_compare
4621 && TREE_CODE (etype) == POINTER_TYPE
4622 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4623 return NULL_TREE;
4624 #endif
4625
4626 if (! in_p)
4627 {
4628 value = build_range_check (type, exp, 1, low, high);
4629 if (value != 0)
4630 return invert_truthvalue (value);
4631
4632 return 0;
4633 }
4634
4635 if (low == 0 && high == 0)
4636 return build_int_cst (type, 1);
4637
4638 if (low == 0)
4639 return fold_build2 (LE_EXPR, type, exp,
4640 fold_convert (etype, high));
4641
4642 if (high == 0)
4643 return fold_build2 (GE_EXPR, type, exp,
4644 fold_convert (etype, low));
4645
4646 if (operand_equal_p (low, high, 0))
4647 return fold_build2 (EQ_EXPR, type, exp,
4648 fold_convert (etype, low));
4649
4650 if (integer_zerop (low))
4651 {
4652 if (! TYPE_UNSIGNED (etype))
4653 {
4654 etype = unsigned_type_for (etype);
4655 high = fold_convert (etype, high);
4656 exp = fold_convert (etype, exp);
4657 }
4658 return build_range_check (type, exp, 1, 0, high);
4659 }
4660
4661 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4662 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4663 {
4664 unsigned HOST_WIDE_INT lo;
4665 HOST_WIDE_INT hi;
4666 int prec;
4667
4668 prec = TYPE_PRECISION (etype);
4669 if (prec <= HOST_BITS_PER_WIDE_INT)
4670 {
4671 hi = 0;
4672 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4673 }
4674 else
4675 {
4676 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4677 lo = (unsigned HOST_WIDE_INT) -1;
4678 }
4679
4680 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4681 {
4682 if (TYPE_UNSIGNED (etype))
4683 {
4684 etype = signed_type_for (etype);
4685 exp = fold_convert (etype, exp);
4686 }
4687 return fold_build2 (GT_EXPR, type, exp,
4688 build_int_cst (etype, 0));
4689 }
4690 }
4691
4692 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4693 This requires wrap-around arithmetics for the type of the expression. */
4694 switch (TREE_CODE (etype))
4695 {
4696 case INTEGER_TYPE:
4697 /* There is no requirement that LOW be within the range of ETYPE
4698 if the latter is a subtype. It must, however, be within the base
4699 type of ETYPE. So be sure we do the subtraction in that type. */
4700 if (TREE_TYPE (etype))
4701 etype = TREE_TYPE (etype);
4702 break;
4703
4704 case ENUMERAL_TYPE:
4705 case BOOLEAN_TYPE:
4706 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4707 TYPE_UNSIGNED (etype));
4708 break;
4709
4710 default:
4711 break;
4712 }
4713
4714 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4715 if (TREE_CODE (etype) == INTEGER_TYPE
4716 && !TYPE_OVERFLOW_WRAPS (etype))
4717 {
4718 tree utype, minv, maxv;
4719
4720 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4721 for the type in question, as we rely on this here. */
4722 utype = unsigned_type_for (etype);
4723 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4724 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4725 integer_one_node, 1);
4726 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4727
4728 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4729 minv, 1, maxv, 1)))
4730 etype = utype;
4731 else
4732 return 0;
4733 }
4734
4735 high = fold_convert (etype, high);
4736 low = fold_convert (etype, low);
4737 exp = fold_convert (etype, exp);
4738
4739 value = const_binop (MINUS_EXPR, high, low, 0);
4740
4741
4742 if (POINTER_TYPE_P (etype))
4743 {
4744 if (value != 0 && !TREE_OVERFLOW (value))
4745 {
4746 low = fold_convert (sizetype, low);
4747 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4748 return build_range_check (type,
4749 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4750 1, build_int_cst (etype, 0), value);
4751 }
4752 return 0;
4753 }
4754
4755 if (value != 0 && !TREE_OVERFLOW (value))
4756 return build_range_check (type,
4757 fold_build2 (MINUS_EXPR, etype, exp, low),
4758 1, build_int_cst (etype, 0), value);
4759
4760 return 0;
4761 }
4762 \f
4763 /* Return the predecessor of VAL in its type, handling the infinite case. */
4764
4765 static tree
4766 range_predecessor (tree val)
4767 {
4768 tree type = TREE_TYPE (val);
4769
4770 if (INTEGRAL_TYPE_P (type)
4771 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4772 return 0;
4773 else
4774 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4775 }
4776
4777 /* Return the successor of VAL in its type, handling the infinite case. */
4778
4779 static tree
4780 range_successor (tree val)
4781 {
4782 tree type = TREE_TYPE (val);
4783
4784 if (INTEGRAL_TYPE_P (type)
4785 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4786 return 0;
4787 else
4788 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4789 }
4790
4791 /* Given two ranges, see if we can merge them into one. Return 1 if we
4792 can, 0 if we can't. Set the output range into the specified parameters. */
4793
4794 static int
4795 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4796 tree high0, int in1_p, tree low1, tree high1)
4797 {
4798 int no_overlap;
4799 int subset;
4800 int temp;
4801 tree tem;
4802 int in_p;
4803 tree low, high;
4804 int lowequal = ((low0 == 0 && low1 == 0)
4805 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4806 low0, 0, low1, 0)));
4807 int highequal = ((high0 == 0 && high1 == 0)
4808 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4809 high0, 1, high1, 1)));
4810
4811 /* Make range 0 be the range that starts first, or ends last if they
4812 start at the same value. Swap them if it isn't. */
4813 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4814 low0, 0, low1, 0))
4815 || (lowequal
4816 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4817 high1, 1, high0, 1))))
4818 {
4819 temp = in0_p, in0_p = in1_p, in1_p = temp;
4820 tem = low0, low0 = low1, low1 = tem;
4821 tem = high0, high0 = high1, high1 = tem;
4822 }
4823
4824 /* Now flag two cases, whether the ranges are disjoint or whether the
4825 second range is totally subsumed in the first. Note that the tests
4826 below are simplified by the ones above. */
4827 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4828 high0, 1, low1, 0));
4829 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4830 high1, 1, high0, 1));
4831
4832 /* We now have four cases, depending on whether we are including or
4833 excluding the two ranges. */
4834 if (in0_p && in1_p)
4835 {
4836 /* If they don't overlap, the result is false. If the second range
4837 is a subset it is the result. Otherwise, the range is from the start
4838 of the second to the end of the first. */
4839 if (no_overlap)
4840 in_p = 0, low = high = 0;
4841 else if (subset)
4842 in_p = 1, low = low1, high = high1;
4843 else
4844 in_p = 1, low = low1, high = high0;
4845 }
4846
4847 else if (in0_p && ! in1_p)
4848 {
4849 /* If they don't overlap, the result is the first range. If they are
4850 equal, the result is false. If the second range is a subset of the
4851 first, and the ranges begin at the same place, we go from just after
4852 the end of the second range to the end of the first. If the second
4853 range is not a subset of the first, or if it is a subset and both
4854 ranges end at the same place, the range starts at the start of the
4855 first range and ends just before the second range.
4856 Otherwise, we can't describe this as a single range. */
4857 if (no_overlap)
4858 in_p = 1, low = low0, high = high0;
4859 else if (lowequal && highequal)
4860 in_p = 0, low = high = 0;
4861 else if (subset && lowequal)
4862 {
4863 low = range_successor (high1);
4864 high = high0;
4865 in_p = 1;
4866 if (low == 0)
4867 {
4868 /* We are in the weird situation where high0 > high1 but
4869 high1 has no successor. Punt. */
4870 return 0;
4871 }
4872 }
4873 else if (! subset || highequal)
4874 {
4875 low = low0;
4876 high = range_predecessor (low1);
4877 in_p = 1;
4878 if (high == 0)
4879 {
4880 /* low0 < low1 but low1 has no predecessor. Punt. */
4881 return 0;
4882 }
4883 }
4884 else
4885 return 0;
4886 }
4887
4888 else if (! in0_p && in1_p)
4889 {
4890 /* If they don't overlap, the result is the second range. If the second
4891 is a subset of the first, the result is false. Otherwise,
4892 the range starts just after the first range and ends at the
4893 end of the second. */
4894 if (no_overlap)
4895 in_p = 1, low = low1, high = high1;
4896 else if (subset || highequal)
4897 in_p = 0, low = high = 0;
4898 else
4899 {
4900 low = range_successor (high0);
4901 high = high1;
4902 in_p = 1;
4903 if (low == 0)
4904 {
4905 /* high1 > high0 but high0 has no successor. Punt. */
4906 return 0;
4907 }
4908 }
4909 }
4910
4911 else
4912 {
4913 /* The case where we are excluding both ranges. Here the complex case
4914 is if they don't overlap. In that case, the only time we have a
4915 range is if they are adjacent. If the second is a subset of the
4916 first, the result is the first. Otherwise, the range to exclude
4917 starts at the beginning of the first range and ends at the end of the
4918 second. */
4919 if (no_overlap)
4920 {
4921 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4922 range_successor (high0),
4923 1, low1, 0)))
4924 in_p = 0, low = low0, high = high1;
4925 else
4926 {
4927 /* Canonicalize - [min, x] into - [-, x]. */
4928 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4929 switch (TREE_CODE (TREE_TYPE (low0)))
4930 {
4931 case ENUMERAL_TYPE:
4932 if (TYPE_PRECISION (TREE_TYPE (low0))
4933 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4934 break;
4935 /* FALLTHROUGH */
4936 case INTEGER_TYPE:
4937 if (tree_int_cst_equal (low0,
4938 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4939 low0 = 0;
4940 break;
4941 case POINTER_TYPE:
4942 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4943 && integer_zerop (low0))
4944 low0 = 0;
4945 break;
4946 default:
4947 break;
4948 }
4949
4950 /* Canonicalize - [x, max] into - [x, -]. */
4951 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4952 switch (TREE_CODE (TREE_TYPE (high1)))
4953 {
4954 case ENUMERAL_TYPE:
4955 if (TYPE_PRECISION (TREE_TYPE (high1))
4956 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4957 break;
4958 /* FALLTHROUGH */
4959 case INTEGER_TYPE:
4960 if (tree_int_cst_equal (high1,
4961 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4962 high1 = 0;
4963 break;
4964 case POINTER_TYPE:
4965 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4966 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4967 high1, 1,
4968 integer_one_node, 1)))
4969 high1 = 0;
4970 break;
4971 default:
4972 break;
4973 }
4974
4975 /* The ranges might be also adjacent between the maximum and
4976 minimum values of the given type. For
4977 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4978 return + [x + 1, y - 1]. */
4979 if (low0 == 0 && high1 == 0)
4980 {
4981 low = range_successor (high0);
4982 high = range_predecessor (low1);
4983 if (low == 0 || high == 0)
4984 return 0;
4985
4986 in_p = 1;
4987 }
4988 else
4989 return 0;
4990 }
4991 }
4992 else if (subset)
4993 in_p = 0, low = low0, high = high0;
4994 else
4995 in_p = 0, low = low0, high = high1;
4996 }
4997
4998 *pin_p = in_p, *plow = low, *phigh = high;
4999 return 1;
5000 }
5001 \f
5002
5003 /* Subroutine of fold, looking inside expressions of the form
5004 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5005 of the COND_EXPR. This function is being used also to optimize
5006 A op B ? C : A, by reversing the comparison first.
5007
5008 Return a folded expression whose code is not a COND_EXPR
5009 anymore, or NULL_TREE if no folding opportunity is found. */
5010
5011 static tree
5012 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5013 {
5014 enum tree_code comp_code = TREE_CODE (arg0);
5015 tree arg00 = TREE_OPERAND (arg0, 0);
5016 tree arg01 = TREE_OPERAND (arg0, 1);
5017 tree arg1_type = TREE_TYPE (arg1);
5018 tree tem;
5019
5020 STRIP_NOPS (arg1);
5021 STRIP_NOPS (arg2);
5022
5023 /* If we have A op 0 ? A : -A, consider applying the following
5024 transformations:
5025
5026 A == 0? A : -A same as -A
5027 A != 0? A : -A same as A
5028 A >= 0? A : -A same as abs (A)
5029 A > 0? A : -A same as abs (A)
5030 A <= 0? A : -A same as -abs (A)
5031 A < 0? A : -A same as -abs (A)
5032
5033 None of these transformations work for modes with signed
5034 zeros. If A is +/-0, the first two transformations will
5035 change the sign of the result (from +0 to -0, or vice
5036 versa). The last four will fix the sign of the result,
5037 even though the original expressions could be positive or
5038 negative, depending on the sign of A.
5039
5040 Note that all these transformations are correct if A is
5041 NaN, since the two alternatives (A and -A) are also NaNs. */
5042 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
5043 ? real_zerop (arg01)
5044 : integer_zerop (arg01))
5045 && ((TREE_CODE (arg2) == NEGATE_EXPR
5046 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5047 /* In the case that A is of the form X-Y, '-A' (arg2) may
5048 have already been folded to Y-X, check for that. */
5049 || (TREE_CODE (arg1) == MINUS_EXPR
5050 && TREE_CODE (arg2) == MINUS_EXPR
5051 && operand_equal_p (TREE_OPERAND (arg1, 0),
5052 TREE_OPERAND (arg2, 1), 0)
5053 && operand_equal_p (TREE_OPERAND (arg1, 1),
5054 TREE_OPERAND (arg2, 0), 0))))
5055 switch (comp_code)
5056 {
5057 case EQ_EXPR:
5058 case UNEQ_EXPR:
5059 tem = fold_convert (arg1_type, arg1);
5060 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5061 case NE_EXPR:
5062 case LTGT_EXPR:
5063 return pedantic_non_lvalue (fold_convert (type, arg1));
5064 case UNGE_EXPR:
5065 case UNGT_EXPR:
5066 if (flag_trapping_math)
5067 break;
5068 /* Fall through. */
5069 case GE_EXPR:
5070 case GT_EXPR:
5071 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5072 arg1 = fold_convert (signed_type_for
5073 (TREE_TYPE (arg1)), arg1);
5074 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5075 return pedantic_non_lvalue (fold_convert (type, tem));
5076 case UNLE_EXPR:
5077 case UNLT_EXPR:
5078 if (flag_trapping_math)
5079 break;
5080 case LE_EXPR:
5081 case LT_EXPR:
5082 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5083 arg1 = fold_convert (signed_type_for
5084 (TREE_TYPE (arg1)), arg1);
5085 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5086 return negate_expr (fold_convert (type, tem));
5087 default:
5088 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5089 break;
5090 }
5091
5092 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5093 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5094 both transformations are correct when A is NaN: A != 0
5095 is then true, and A == 0 is false. */
5096
5097 if (integer_zerop (arg01) && integer_zerop (arg2))
5098 {
5099 if (comp_code == NE_EXPR)
5100 return pedantic_non_lvalue (fold_convert (type, arg1));
5101 else if (comp_code == EQ_EXPR)
5102 return build_int_cst (type, 0);
5103 }
5104
5105 /* Try some transformations of A op B ? A : B.
5106
5107 A == B? A : B same as B
5108 A != B? A : B same as A
5109 A >= B? A : B same as max (A, B)
5110 A > B? A : B same as max (B, A)
5111 A <= B? A : B same as min (A, B)
5112 A < B? A : B same as min (B, A)
5113
5114 As above, these transformations don't work in the presence
5115 of signed zeros. For example, if A and B are zeros of
5116 opposite sign, the first two transformations will change
5117 the sign of the result. In the last four, the original
5118 expressions give different results for (A=+0, B=-0) and
5119 (A=-0, B=+0), but the transformed expressions do not.
5120
5121 The first two transformations are correct if either A or B
5122 is a NaN. In the first transformation, the condition will
5123 be false, and B will indeed be chosen. In the case of the
5124 second transformation, the condition A != B will be true,
5125 and A will be chosen.
5126
5127 The conversions to max() and min() are not correct if B is
5128 a number and A is not. The conditions in the original
5129 expressions will be false, so all four give B. The min()
5130 and max() versions would give a NaN instead. */
5131 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
5132 /* Avoid these transformations if the COND_EXPR may be used
5133 as an lvalue in the C++ front-end. PR c++/19199. */
5134 && (in_gimple_form
5135 || (strcmp (lang_hooks.name, "GNU C++") != 0
5136 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5137 || ! maybe_lvalue_p (arg1)
5138 || ! maybe_lvalue_p (arg2)))
5139 {
5140 tree comp_op0 = arg00;
5141 tree comp_op1 = arg01;
5142 tree comp_type = TREE_TYPE (comp_op0);
5143
5144 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5145 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5146 {
5147 comp_type = type;
5148 comp_op0 = arg1;
5149 comp_op1 = arg2;
5150 }
5151
5152 switch (comp_code)
5153 {
5154 case EQ_EXPR:
5155 return pedantic_non_lvalue (fold_convert (type, arg2));
5156 case NE_EXPR:
5157 return pedantic_non_lvalue (fold_convert (type, arg1));
5158 case LE_EXPR:
5159 case LT_EXPR:
5160 case UNLE_EXPR:
5161 case UNLT_EXPR:
5162 /* In C++ a ?: expression can be an lvalue, so put the
5163 operand which will be used if they are equal first
5164 so that we can convert this back to the
5165 corresponding COND_EXPR. */
5166 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5167 {
5168 comp_op0 = fold_convert (comp_type, comp_op0);
5169 comp_op1 = fold_convert (comp_type, comp_op1);
5170 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5171 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5172 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5173 return pedantic_non_lvalue (fold_convert (type, tem));
5174 }
5175 break;
5176 case GE_EXPR:
5177 case GT_EXPR:
5178 case UNGE_EXPR:
5179 case UNGT_EXPR:
5180 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5181 {
5182 comp_op0 = fold_convert (comp_type, comp_op0);
5183 comp_op1 = fold_convert (comp_type, comp_op1);
5184 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5185 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5186 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5187 return pedantic_non_lvalue (fold_convert (type, tem));
5188 }
5189 break;
5190 case UNEQ_EXPR:
5191 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5192 return pedantic_non_lvalue (fold_convert (type, arg2));
5193 break;
5194 case LTGT_EXPR:
5195 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5196 return pedantic_non_lvalue (fold_convert (type, arg1));
5197 break;
5198 default:
5199 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5200 break;
5201 }
5202 }
5203
5204 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5205 we might still be able to simplify this. For example,
5206 if C1 is one less or one more than C2, this might have started
5207 out as a MIN or MAX and been transformed by this function.
5208 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5209
5210 if (INTEGRAL_TYPE_P (type)
5211 && TREE_CODE (arg01) == INTEGER_CST
5212 && TREE_CODE (arg2) == INTEGER_CST)
5213 switch (comp_code)
5214 {
5215 case EQ_EXPR:
5216 /* We can replace A with C1 in this case. */
5217 arg1 = fold_convert (type, arg01);
5218 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5219
5220 case LT_EXPR:
5221 /* If C1 is C2 + 1, this is min(A, C2). */
5222 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5223 OEP_ONLY_CONST)
5224 && operand_equal_p (arg01,
5225 const_binop (PLUS_EXPR, arg2,
5226 build_int_cst (type, 1), 0),
5227 OEP_ONLY_CONST))
5228 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5229 type,
5230 fold_convert (type, arg1),
5231 arg2));
5232 break;
5233
5234 case LE_EXPR:
5235 /* If C1 is C2 - 1, this is min(A, C2). */
5236 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5237 OEP_ONLY_CONST)
5238 && operand_equal_p (arg01,
5239 const_binop (MINUS_EXPR, arg2,
5240 build_int_cst (type, 1), 0),
5241 OEP_ONLY_CONST))
5242 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5243 type,
5244 fold_convert (type, arg1),
5245 arg2));
5246 break;
5247
5248 case GT_EXPR:
5249 /* If C1 is C2 - 1, this is max(A, C2). */
5250 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5251 OEP_ONLY_CONST)
5252 && operand_equal_p (arg01,
5253 const_binop (MINUS_EXPR, arg2,
5254 build_int_cst (type, 1), 0),
5255 OEP_ONLY_CONST))
5256 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5257 type,
5258 fold_convert (type, arg1),
5259 arg2));
5260 break;
5261
5262 case GE_EXPR:
5263 /* If C1 is C2 + 1, this is max(A, C2). */
5264 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5265 OEP_ONLY_CONST)
5266 && operand_equal_p (arg01,
5267 const_binop (PLUS_EXPR, arg2,
5268 build_int_cst (type, 1), 0),
5269 OEP_ONLY_CONST))
5270 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5271 type,
5272 fold_convert (type, arg1),
5273 arg2));
5274 break;
5275 case NE_EXPR:
5276 break;
5277 default:
5278 gcc_unreachable ();
5279 }
5280
5281 return NULL_TREE;
5282 }
5283
5284
5285 \f
5286 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5287 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5288 #endif
5289
5290 /* EXP is some logical combination of boolean tests. See if we can
5291 merge it into some range test. Return the new tree if so. */
5292
5293 static tree
5294 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5295 {
5296 int or_op = (code == TRUTH_ORIF_EXPR
5297 || code == TRUTH_OR_EXPR);
5298 int in0_p, in1_p, in_p;
5299 tree low0, low1, low, high0, high1, high;
5300 bool strict_overflow_p = false;
5301 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5302 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5303 tree tem;
5304 const char * const warnmsg = G_("assuming signed overflow does not occur "
5305 "when simplifying range test");
5306
5307 /* If this is an OR operation, invert both sides; we will invert
5308 again at the end. */
5309 if (or_op)
5310 in0_p = ! in0_p, in1_p = ! in1_p;
5311
5312 /* If both expressions are the same, if we can merge the ranges, and we
5313 can build the range test, return it or it inverted. If one of the
5314 ranges is always true or always false, consider it to be the same
5315 expression as the other. */
5316 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5317 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5318 in1_p, low1, high1)
5319 && 0 != (tem = (build_range_check (type,
5320 lhs != 0 ? lhs
5321 : rhs != 0 ? rhs : integer_zero_node,
5322 in_p, low, high))))
5323 {
5324 if (strict_overflow_p)
5325 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5326 return or_op ? invert_truthvalue (tem) : tem;
5327 }
5328
5329 /* On machines where the branch cost is expensive, if this is a
5330 short-circuited branch and the underlying object on both sides
5331 is the same, make a non-short-circuit operation. */
5332 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5333 && lhs != 0 && rhs != 0
5334 && (code == TRUTH_ANDIF_EXPR
5335 || code == TRUTH_ORIF_EXPR)
5336 && operand_equal_p (lhs, rhs, 0))
5337 {
5338 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5339 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5340 which cases we can't do this. */
5341 if (simple_operand_p (lhs))
5342 return build2 (code == TRUTH_ANDIF_EXPR
5343 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5344 type, op0, op1);
5345
5346 else if (lang_hooks.decls.global_bindings_p () == 0
5347 && ! CONTAINS_PLACEHOLDER_P (lhs))
5348 {
5349 tree common = save_expr (lhs);
5350
5351 if (0 != (lhs = build_range_check (type, common,
5352 or_op ? ! in0_p : in0_p,
5353 low0, high0))
5354 && (0 != (rhs = build_range_check (type, common,
5355 or_op ? ! in1_p : in1_p,
5356 low1, high1))))
5357 {
5358 if (strict_overflow_p)
5359 fold_overflow_warning (warnmsg,
5360 WARN_STRICT_OVERFLOW_COMPARISON);
5361 return build2 (code == TRUTH_ANDIF_EXPR
5362 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5363 type, lhs, rhs);
5364 }
5365 }
5366 }
5367
5368 return 0;
5369 }
5370 \f
5371 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5372 bit value. Arrange things so the extra bits will be set to zero if and
5373 only if C is signed-extended to its full width. If MASK is nonzero,
5374 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5375
5376 static tree
5377 unextend (tree c, int p, int unsignedp, tree mask)
5378 {
5379 tree type = TREE_TYPE (c);
5380 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5381 tree temp;
5382
5383 if (p == modesize || unsignedp)
5384 return c;
5385
5386 /* We work by getting just the sign bit into the low-order bit, then
5387 into the high-order bit, then sign-extend. We then XOR that value
5388 with C. */
5389 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5390 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5391
5392 /* We must use a signed type in order to get an arithmetic right shift.
5393 However, we must also avoid introducing accidental overflows, so that
5394 a subsequent call to integer_zerop will work. Hence we must
5395 do the type conversion here. At this point, the constant is either
5396 zero or one, and the conversion to a signed type can never overflow.
5397 We could get an overflow if this conversion is done anywhere else. */
5398 if (TYPE_UNSIGNED (type))
5399 temp = fold_convert (signed_type_for (type), temp);
5400
5401 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5402 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5403 if (mask != 0)
5404 temp = const_binop (BIT_AND_EXPR, temp,
5405 fold_convert (TREE_TYPE (c), mask), 0);
5406 /* If necessary, convert the type back to match the type of C. */
5407 if (TYPE_UNSIGNED (type))
5408 temp = fold_convert (type, temp);
5409
5410 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5411 }
5412 \f
5413 /* Find ways of folding logical expressions of LHS and RHS:
5414 Try to merge two comparisons to the same innermost item.
5415 Look for range tests like "ch >= '0' && ch <= '9'".
5416 Look for combinations of simple terms on machines with expensive branches
5417 and evaluate the RHS unconditionally.
5418
5419 For example, if we have p->a == 2 && p->b == 4 and we can make an
5420 object large enough to span both A and B, we can do this with a comparison
5421 against the object ANDed with the a mask.
5422
5423 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5424 operations to do this with one comparison.
5425
5426 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5427 function and the one above.
5428
5429 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5430 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5431
5432 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5433 two operands.
5434
5435 We return the simplified tree or 0 if no optimization is possible. */
5436
5437 static tree
5438 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5439 {
5440 /* If this is the "or" of two comparisons, we can do something if
5441 the comparisons are NE_EXPR. If this is the "and", we can do something
5442 if the comparisons are EQ_EXPR. I.e.,
5443 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5444
5445 WANTED_CODE is this operation code. For single bit fields, we can
5446 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5447 comparison for one-bit fields. */
5448
5449 enum tree_code wanted_code;
5450 enum tree_code lcode, rcode;
5451 tree ll_arg, lr_arg, rl_arg, rr_arg;
5452 tree ll_inner, lr_inner, rl_inner, rr_inner;
5453 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5454 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5455 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5456 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5457 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5458 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5459 enum machine_mode lnmode, rnmode;
5460 tree ll_mask, lr_mask, rl_mask, rr_mask;
5461 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5462 tree l_const, r_const;
5463 tree lntype, rntype, result;
5464 int first_bit, end_bit;
5465 int volatilep;
5466 tree orig_lhs = lhs, orig_rhs = rhs;
5467 enum tree_code orig_code = code;
5468
5469 /* Start by getting the comparison codes. Fail if anything is volatile.
5470 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5471 it were surrounded with a NE_EXPR. */
5472
5473 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5474 return 0;
5475
5476 lcode = TREE_CODE (lhs);
5477 rcode = TREE_CODE (rhs);
5478
5479 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5480 {
5481 lhs = build2 (NE_EXPR, truth_type, lhs,
5482 build_int_cst (TREE_TYPE (lhs), 0));
5483 lcode = NE_EXPR;
5484 }
5485
5486 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5487 {
5488 rhs = build2 (NE_EXPR, truth_type, rhs,
5489 build_int_cst (TREE_TYPE (rhs), 0));
5490 rcode = NE_EXPR;
5491 }
5492
5493 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5494 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5495 return 0;
5496
5497 ll_arg = TREE_OPERAND (lhs, 0);
5498 lr_arg = TREE_OPERAND (lhs, 1);
5499 rl_arg = TREE_OPERAND (rhs, 0);
5500 rr_arg = TREE_OPERAND (rhs, 1);
5501
5502 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5503 if (simple_operand_p (ll_arg)
5504 && simple_operand_p (lr_arg))
5505 {
5506 tree result;
5507 if (operand_equal_p (ll_arg, rl_arg, 0)
5508 && operand_equal_p (lr_arg, rr_arg, 0))
5509 {
5510 result = combine_comparisons (code, lcode, rcode,
5511 truth_type, ll_arg, lr_arg);
5512 if (result)
5513 return result;
5514 }
5515 else if (operand_equal_p (ll_arg, rr_arg, 0)
5516 && operand_equal_p (lr_arg, rl_arg, 0))
5517 {
5518 result = combine_comparisons (code, lcode,
5519 swap_tree_comparison (rcode),
5520 truth_type, ll_arg, lr_arg);
5521 if (result)
5522 return result;
5523 }
5524 }
5525
5526 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5527 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5528
5529 /* If the RHS can be evaluated unconditionally and its operands are
5530 simple, it wins to evaluate the RHS unconditionally on machines
5531 with expensive branches. In this case, this isn't a comparison
5532 that can be merged. Avoid doing this if the RHS is a floating-point
5533 comparison since those can trap. */
5534
5535 if (BRANCH_COST >= 2
5536 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5537 && simple_operand_p (rl_arg)
5538 && simple_operand_p (rr_arg))
5539 {
5540 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5541 if (code == TRUTH_OR_EXPR
5542 && lcode == NE_EXPR && integer_zerop (lr_arg)
5543 && rcode == NE_EXPR && integer_zerop (rr_arg)
5544 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5545 return build2 (NE_EXPR, truth_type,
5546 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5547 ll_arg, rl_arg),
5548 build_int_cst (TREE_TYPE (ll_arg), 0));
5549
5550 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5551 if (code == TRUTH_AND_EXPR
5552 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5553 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5554 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5555 return build2 (EQ_EXPR, truth_type,
5556 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5557 ll_arg, rl_arg),
5558 build_int_cst (TREE_TYPE (ll_arg), 0));
5559
5560 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5561 {
5562 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5563 return build2 (code, truth_type, lhs, rhs);
5564 return NULL_TREE;
5565 }
5566 }
5567
5568 /* See if the comparisons can be merged. Then get all the parameters for
5569 each side. */
5570
5571 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5572 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5573 return 0;
5574
5575 volatilep = 0;
5576 ll_inner = decode_field_reference (ll_arg,
5577 &ll_bitsize, &ll_bitpos, &ll_mode,
5578 &ll_unsignedp, &volatilep, &ll_mask,
5579 &ll_and_mask);
5580 lr_inner = decode_field_reference (lr_arg,
5581 &lr_bitsize, &lr_bitpos, &lr_mode,
5582 &lr_unsignedp, &volatilep, &lr_mask,
5583 &lr_and_mask);
5584 rl_inner = decode_field_reference (rl_arg,
5585 &rl_bitsize, &rl_bitpos, &rl_mode,
5586 &rl_unsignedp, &volatilep, &rl_mask,
5587 &rl_and_mask);
5588 rr_inner = decode_field_reference (rr_arg,
5589 &rr_bitsize, &rr_bitpos, &rr_mode,
5590 &rr_unsignedp, &volatilep, &rr_mask,
5591 &rr_and_mask);
5592
5593 /* It must be true that the inner operation on the lhs of each
5594 comparison must be the same if we are to be able to do anything.
5595 Then see if we have constants. If not, the same must be true for
5596 the rhs's. */
5597 if (volatilep || ll_inner == 0 || rl_inner == 0
5598 || ! operand_equal_p (ll_inner, rl_inner, 0))
5599 return 0;
5600
5601 if (TREE_CODE (lr_arg) == INTEGER_CST
5602 && TREE_CODE (rr_arg) == INTEGER_CST)
5603 l_const = lr_arg, r_const = rr_arg;
5604 else if (lr_inner == 0 || rr_inner == 0
5605 || ! operand_equal_p (lr_inner, rr_inner, 0))
5606 return 0;
5607 else
5608 l_const = r_const = 0;
5609
5610 /* If either comparison code is not correct for our logical operation,
5611 fail. However, we can convert a one-bit comparison against zero into
5612 the opposite comparison against that bit being set in the field. */
5613
5614 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5615 if (lcode != wanted_code)
5616 {
5617 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5618 {
5619 /* Make the left operand unsigned, since we are only interested
5620 in the value of one bit. Otherwise we are doing the wrong
5621 thing below. */
5622 ll_unsignedp = 1;
5623 l_const = ll_mask;
5624 }
5625 else
5626 return 0;
5627 }
5628
5629 /* This is analogous to the code for l_const above. */
5630 if (rcode != wanted_code)
5631 {
5632 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5633 {
5634 rl_unsignedp = 1;
5635 r_const = rl_mask;
5636 }
5637 else
5638 return 0;
5639 }
5640
5641 /* See if we can find a mode that contains both fields being compared on
5642 the left. If we can't, fail. Otherwise, update all constants and masks
5643 to be relative to a field of that size. */
5644 first_bit = MIN (ll_bitpos, rl_bitpos);
5645 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5646 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5647 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5648 volatilep);
5649 if (lnmode == VOIDmode)
5650 return 0;
5651
5652 lnbitsize = GET_MODE_BITSIZE (lnmode);
5653 lnbitpos = first_bit & ~ (lnbitsize - 1);
5654 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5655 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5656
5657 if (BYTES_BIG_ENDIAN)
5658 {
5659 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5660 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5661 }
5662
5663 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5664 size_int (xll_bitpos), 0);
5665 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5666 size_int (xrl_bitpos), 0);
5667
5668 if (l_const)
5669 {
5670 l_const = fold_convert (lntype, l_const);
5671 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5672 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5673 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5674 fold_build1 (BIT_NOT_EXPR,
5675 lntype, ll_mask),
5676 0)))
5677 {
5678 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5679
5680 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5681 }
5682 }
5683 if (r_const)
5684 {
5685 r_const = fold_convert (lntype, r_const);
5686 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5687 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5688 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5689 fold_build1 (BIT_NOT_EXPR,
5690 lntype, rl_mask),
5691 0)))
5692 {
5693 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5694
5695 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5696 }
5697 }
5698
5699 /* If the right sides are not constant, do the same for it. Also,
5700 disallow this optimization if a size or signedness mismatch occurs
5701 between the left and right sides. */
5702 if (l_const == 0)
5703 {
5704 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5705 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5706 /* Make sure the two fields on the right
5707 correspond to the left without being swapped. */
5708 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5709 return 0;
5710
5711 first_bit = MIN (lr_bitpos, rr_bitpos);
5712 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5713 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5714 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5715 volatilep);
5716 if (rnmode == VOIDmode)
5717 return 0;
5718
5719 rnbitsize = GET_MODE_BITSIZE (rnmode);
5720 rnbitpos = first_bit & ~ (rnbitsize - 1);
5721 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5722 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5723
5724 if (BYTES_BIG_ENDIAN)
5725 {
5726 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5727 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5728 }
5729
5730 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5731 size_int (xlr_bitpos), 0);
5732 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5733 size_int (xrr_bitpos), 0);
5734
5735 /* Make a mask that corresponds to both fields being compared.
5736 Do this for both items being compared. If the operands are the
5737 same size and the bits being compared are in the same position
5738 then we can do this by masking both and comparing the masked
5739 results. */
5740 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5741 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5742 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5743 {
5744 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5745 ll_unsignedp || rl_unsignedp);
5746 if (! all_ones_mask_p (ll_mask, lnbitsize))
5747 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5748
5749 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5750 lr_unsignedp || rr_unsignedp);
5751 if (! all_ones_mask_p (lr_mask, rnbitsize))
5752 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5753
5754 return build2 (wanted_code, truth_type, lhs, rhs);
5755 }
5756
5757 /* There is still another way we can do something: If both pairs of
5758 fields being compared are adjacent, we may be able to make a wider
5759 field containing them both.
5760
5761 Note that we still must mask the lhs/rhs expressions. Furthermore,
5762 the mask must be shifted to account for the shift done by
5763 make_bit_field_ref. */
5764 if ((ll_bitsize + ll_bitpos == rl_bitpos
5765 && lr_bitsize + lr_bitpos == rr_bitpos)
5766 || (ll_bitpos == rl_bitpos + rl_bitsize
5767 && lr_bitpos == rr_bitpos + rr_bitsize))
5768 {
5769 tree type;
5770
5771 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5772 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5773 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5774 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5775
5776 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5777 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5778 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5779 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5780
5781 /* Convert to the smaller type before masking out unwanted bits. */
5782 type = lntype;
5783 if (lntype != rntype)
5784 {
5785 if (lnbitsize > rnbitsize)
5786 {
5787 lhs = fold_convert (rntype, lhs);
5788 ll_mask = fold_convert (rntype, ll_mask);
5789 type = rntype;
5790 }
5791 else if (lnbitsize < rnbitsize)
5792 {
5793 rhs = fold_convert (lntype, rhs);
5794 lr_mask = fold_convert (lntype, lr_mask);
5795 type = lntype;
5796 }
5797 }
5798
5799 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5800 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5801
5802 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5803 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5804
5805 return build2 (wanted_code, truth_type, lhs, rhs);
5806 }
5807
5808 return 0;
5809 }
5810
5811 /* Handle the case of comparisons with constants. If there is something in
5812 common between the masks, those bits of the constants must be the same.
5813 If not, the condition is always false. Test for this to avoid generating
5814 incorrect code below. */
5815 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5816 if (! integer_zerop (result)
5817 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5818 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5819 {
5820 if (wanted_code == NE_EXPR)
5821 {
5822 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5823 return constant_boolean_node (true, truth_type);
5824 }
5825 else
5826 {
5827 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5828 return constant_boolean_node (false, truth_type);
5829 }
5830 }
5831
5832 /* Construct the expression we will return. First get the component
5833 reference we will make. Unless the mask is all ones the width of
5834 that field, perform the mask operation. Then compare with the
5835 merged constant. */
5836 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5837 ll_unsignedp || rl_unsignedp);
5838
5839 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5840 if (! all_ones_mask_p (ll_mask, lnbitsize))
5841 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5842
5843 return build2 (wanted_code, truth_type, result,
5844 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5845 }
5846 \f
5847 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5848 constant. */
5849
5850 static tree
5851 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5852 {
5853 tree arg0 = op0;
5854 enum tree_code op_code;
5855 tree comp_const = op1;
5856 tree minmax_const;
5857 int consts_equal, consts_lt;
5858 tree inner;
5859
5860 STRIP_SIGN_NOPS (arg0);
5861
5862 op_code = TREE_CODE (arg0);
5863 minmax_const = TREE_OPERAND (arg0, 1);
5864 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5865 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5866 inner = TREE_OPERAND (arg0, 0);
5867
5868 /* If something does not permit us to optimize, return the original tree. */
5869 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5870 || TREE_CODE (comp_const) != INTEGER_CST
5871 || TREE_OVERFLOW (comp_const)
5872 || TREE_CODE (minmax_const) != INTEGER_CST
5873 || TREE_OVERFLOW (minmax_const))
5874 return NULL_TREE;
5875
5876 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5877 and GT_EXPR, doing the rest with recursive calls using logical
5878 simplifications. */
5879 switch (code)
5880 {
5881 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5882 {
5883 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5884 type, op0, op1);
5885 if (tem)
5886 return invert_truthvalue (tem);
5887 return NULL_TREE;
5888 }
5889
5890 case GE_EXPR:
5891 return
5892 fold_build2 (TRUTH_ORIF_EXPR, type,
5893 optimize_minmax_comparison
5894 (EQ_EXPR, type, arg0, comp_const),
5895 optimize_minmax_comparison
5896 (GT_EXPR, type, arg0, comp_const));
5897
5898 case EQ_EXPR:
5899 if (op_code == MAX_EXPR && consts_equal)
5900 /* MAX (X, 0) == 0 -> X <= 0 */
5901 return fold_build2 (LE_EXPR, type, inner, comp_const);
5902
5903 else if (op_code == MAX_EXPR && consts_lt)
5904 /* MAX (X, 0) == 5 -> X == 5 */
5905 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5906
5907 else if (op_code == MAX_EXPR)
5908 /* MAX (X, 0) == -1 -> false */
5909 return omit_one_operand (type, integer_zero_node, inner);
5910
5911 else if (consts_equal)
5912 /* MIN (X, 0) == 0 -> X >= 0 */
5913 return fold_build2 (GE_EXPR, type, inner, comp_const);
5914
5915 else if (consts_lt)
5916 /* MIN (X, 0) == 5 -> false */
5917 return omit_one_operand (type, integer_zero_node, inner);
5918
5919 else
5920 /* MIN (X, 0) == -1 -> X == -1 */
5921 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5922
5923 case GT_EXPR:
5924 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5925 /* MAX (X, 0) > 0 -> X > 0
5926 MAX (X, 0) > 5 -> X > 5 */
5927 return fold_build2 (GT_EXPR, type, inner, comp_const);
5928
5929 else if (op_code == MAX_EXPR)
5930 /* MAX (X, 0) > -1 -> true */
5931 return omit_one_operand (type, integer_one_node, inner);
5932
5933 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5934 /* MIN (X, 0) > 0 -> false
5935 MIN (X, 0) > 5 -> false */
5936 return omit_one_operand (type, integer_zero_node, inner);
5937
5938 else
5939 /* MIN (X, 0) > -1 -> X > -1 */
5940 return fold_build2 (GT_EXPR, type, inner, comp_const);
5941
5942 default:
5943 return NULL_TREE;
5944 }
5945 }
5946 \f
5947 /* T is an integer expression that is being multiplied, divided, or taken a
5948 modulus (CODE says which and what kind of divide or modulus) by a
5949 constant C. See if we can eliminate that operation by folding it with
5950 other operations already in T. WIDE_TYPE, if non-null, is a type that
5951 should be used for the computation if wider than our type.
5952
5953 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5954 (X * 2) + (Y * 4). We must, however, be assured that either the original
5955 expression would not overflow or that overflow is undefined for the type
5956 in the language in question.
5957
5958 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5959 the machine has a multiply-accumulate insn or that this is part of an
5960 addressing calculation.
5961
5962 If we return a non-null expression, it is an equivalent form of the
5963 original computation, but need not be in the original type.
5964
5965 We set *STRICT_OVERFLOW_P to true if the return values depends on
5966 signed overflow being undefined. Otherwise we do not change
5967 *STRICT_OVERFLOW_P. */
5968
5969 static tree
5970 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5971 bool *strict_overflow_p)
5972 {
5973 /* To avoid exponential search depth, refuse to allow recursion past
5974 three levels. Beyond that (1) it's highly unlikely that we'll find
5975 something interesting and (2) we've probably processed it before
5976 when we built the inner expression. */
5977
5978 static int depth;
5979 tree ret;
5980
5981 if (depth > 3)
5982 return NULL;
5983
5984 depth++;
5985 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5986 depth--;
5987
5988 return ret;
5989 }
5990
5991 static tree
5992 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5993 bool *strict_overflow_p)
5994 {
5995 tree type = TREE_TYPE (t);
5996 enum tree_code tcode = TREE_CODE (t);
5997 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5998 > GET_MODE_SIZE (TYPE_MODE (type)))
5999 ? wide_type : type);
6000 tree t1, t2;
6001 int same_p = tcode == code;
6002 tree op0 = NULL_TREE, op1 = NULL_TREE;
6003 bool sub_strict_overflow_p;
6004
6005 /* Don't deal with constants of zero here; they confuse the code below. */
6006 if (integer_zerop (c))
6007 return NULL_TREE;
6008
6009 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6010 op0 = TREE_OPERAND (t, 0);
6011
6012 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6013 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6014
6015 /* Note that we need not handle conditional operations here since fold
6016 already handles those cases. So just do arithmetic here. */
6017 switch (tcode)
6018 {
6019 case INTEGER_CST:
6020 /* For a constant, we can always simplify if we are a multiply
6021 or (for divide and modulus) if it is a multiple of our constant. */
6022 if (code == MULT_EXPR
6023 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6024 return const_binop (code, fold_convert (ctype, t),
6025 fold_convert (ctype, c), 0);
6026 break;
6027
6028 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
6029 /* If op0 is an expression ... */
6030 if ((COMPARISON_CLASS_P (op0)
6031 || UNARY_CLASS_P (op0)
6032 || BINARY_CLASS_P (op0)
6033 || VL_EXP_CLASS_P (op0)
6034 || EXPRESSION_CLASS_P (op0))
6035 /* ... and is unsigned, and its type is smaller than ctype,
6036 then we cannot pass through as widening. */
6037 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
6038 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6039 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6040 && (GET_MODE_SIZE (TYPE_MODE (ctype))
6041 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
6042 /* ... or this is a truncation (t is narrower than op0),
6043 then we cannot pass through this narrowing. */
6044 || (GET_MODE_SIZE (TYPE_MODE (type))
6045 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
6046 /* ... or signedness changes for division or modulus,
6047 then we cannot pass through this conversion. */
6048 || (code != MULT_EXPR
6049 && (TYPE_UNSIGNED (ctype)
6050 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
6051 break;
6052
6053 /* Pass the constant down and see if we can make a simplification. If
6054 we can, replace this expression with the inner simplification for
6055 possible later conversion to our or some other type. */
6056 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6057 && TREE_CODE (t2) == INTEGER_CST
6058 && !TREE_OVERFLOW (t2)
6059 && (0 != (t1 = extract_muldiv (op0, t2, code,
6060 code == MULT_EXPR
6061 ? ctype : NULL_TREE,
6062 strict_overflow_p))))
6063 return t1;
6064 break;
6065
6066 case ABS_EXPR:
6067 /* If widening the type changes it from signed to unsigned, then we
6068 must avoid building ABS_EXPR itself as unsigned. */
6069 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6070 {
6071 tree cstype = (*signed_type_for) (ctype);
6072 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6073 != 0)
6074 {
6075 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6076 return fold_convert (ctype, t1);
6077 }
6078 break;
6079 }
6080 /* FALLTHROUGH */
6081 case NEGATE_EXPR:
6082 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6083 != 0)
6084 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6085 break;
6086
6087 case MIN_EXPR: case MAX_EXPR:
6088 /* If widening the type changes the signedness, then we can't perform
6089 this optimization as that changes the result. */
6090 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6091 break;
6092
6093 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6094 sub_strict_overflow_p = false;
6095 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6096 &sub_strict_overflow_p)) != 0
6097 && (t2 = extract_muldiv (op1, c, code, wide_type,
6098 &sub_strict_overflow_p)) != 0)
6099 {
6100 if (tree_int_cst_sgn (c) < 0)
6101 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6102 if (sub_strict_overflow_p)
6103 *strict_overflow_p = true;
6104 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6105 fold_convert (ctype, t2));
6106 }
6107 break;
6108
6109 case LSHIFT_EXPR: case RSHIFT_EXPR:
6110 /* If the second operand is constant, this is a multiplication
6111 or floor division, by a power of two, so we can treat it that
6112 way unless the multiplier or divisor overflows. Signed
6113 left-shift overflow is implementation-defined rather than
6114 undefined in C90, so do not convert signed left shift into
6115 multiplication. */
6116 if (TREE_CODE (op1) == INTEGER_CST
6117 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6118 /* const_binop may not detect overflow correctly,
6119 so check for it explicitly here. */
6120 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6121 && TREE_INT_CST_HIGH (op1) == 0
6122 && 0 != (t1 = fold_convert (ctype,
6123 const_binop (LSHIFT_EXPR,
6124 size_one_node,
6125 op1, 0)))
6126 && !TREE_OVERFLOW (t1))
6127 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6128 ? MULT_EXPR : FLOOR_DIV_EXPR,
6129 ctype, fold_convert (ctype, op0), t1),
6130 c, code, wide_type, strict_overflow_p);
6131 break;
6132
6133 case PLUS_EXPR: case MINUS_EXPR:
6134 /* See if we can eliminate the operation on both sides. If we can, we
6135 can return a new PLUS or MINUS. If we can't, the only remaining
6136 cases where we can do anything are if the second operand is a
6137 constant. */
6138 sub_strict_overflow_p = false;
6139 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6140 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6141 if (t1 != 0 && t2 != 0
6142 && (code == MULT_EXPR
6143 /* If not multiplication, we can only do this if both operands
6144 are divisible by c. */
6145 || (multiple_of_p (ctype, op0, c)
6146 && multiple_of_p (ctype, op1, c))))
6147 {
6148 if (sub_strict_overflow_p)
6149 *strict_overflow_p = true;
6150 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6151 fold_convert (ctype, t2));
6152 }
6153
6154 /* If this was a subtraction, negate OP1 and set it to be an addition.
6155 This simplifies the logic below. */
6156 if (tcode == MINUS_EXPR)
6157 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6158
6159 if (TREE_CODE (op1) != INTEGER_CST)
6160 break;
6161
6162 /* If either OP1 or C are negative, this optimization is not safe for
6163 some of the division and remainder types while for others we need
6164 to change the code. */
6165 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6166 {
6167 if (code == CEIL_DIV_EXPR)
6168 code = FLOOR_DIV_EXPR;
6169 else if (code == FLOOR_DIV_EXPR)
6170 code = CEIL_DIV_EXPR;
6171 else if (code != MULT_EXPR
6172 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6173 break;
6174 }
6175
6176 /* If it's a multiply or a division/modulus operation of a multiple
6177 of our constant, do the operation and verify it doesn't overflow. */
6178 if (code == MULT_EXPR
6179 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6180 {
6181 op1 = const_binop (code, fold_convert (ctype, op1),
6182 fold_convert (ctype, c), 0);
6183 /* We allow the constant to overflow with wrapping semantics. */
6184 if (op1 == 0
6185 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6186 break;
6187 }
6188 else
6189 break;
6190
6191 /* If we have an unsigned type is not a sizetype, we cannot widen
6192 the operation since it will change the result if the original
6193 computation overflowed. */
6194 if (TYPE_UNSIGNED (ctype)
6195 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6196 && ctype != type)
6197 break;
6198
6199 /* If we were able to eliminate our operation from the first side,
6200 apply our operation to the second side and reform the PLUS. */
6201 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6202 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6203
6204 /* The last case is if we are a multiply. In that case, we can
6205 apply the distributive law to commute the multiply and addition
6206 if the multiplication of the constants doesn't overflow. */
6207 if (code == MULT_EXPR)
6208 return fold_build2 (tcode, ctype,
6209 fold_build2 (code, ctype,
6210 fold_convert (ctype, op0),
6211 fold_convert (ctype, c)),
6212 op1);
6213
6214 break;
6215
6216 case MULT_EXPR:
6217 /* We have a special case here if we are doing something like
6218 (C * 8) % 4 since we know that's zero. */
6219 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6220 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6221 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6222 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6223 return omit_one_operand (type, integer_zero_node, op0);
6224
6225 /* ... fall through ... */
6226
6227 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6228 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6229 /* If we can extract our operation from the LHS, do so and return a
6230 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6231 do something only if the second operand is a constant. */
6232 if (same_p
6233 && (t1 = extract_muldiv (op0, c, code, wide_type,
6234 strict_overflow_p)) != 0)
6235 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6236 fold_convert (ctype, op1));
6237 else if (tcode == MULT_EXPR && code == MULT_EXPR
6238 && (t1 = extract_muldiv (op1, c, code, wide_type,
6239 strict_overflow_p)) != 0)
6240 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6241 fold_convert (ctype, t1));
6242 else if (TREE_CODE (op1) != INTEGER_CST)
6243 return 0;
6244
6245 /* If these are the same operation types, we can associate them
6246 assuming no overflow. */
6247 if (tcode == code
6248 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
6249 fold_convert (ctype, c), 0))
6250 && !TREE_OVERFLOW (t1))
6251 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6252
6253 /* If these operations "cancel" each other, we have the main
6254 optimizations of this pass, which occur when either constant is a
6255 multiple of the other, in which case we replace this with either an
6256 operation or CODE or TCODE.
6257
6258 If we have an unsigned type that is not a sizetype, we cannot do
6259 this since it will change the result if the original computation
6260 overflowed. */
6261 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6262 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6263 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6264 || (tcode == MULT_EXPR
6265 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6266 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
6267 {
6268 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6269 {
6270 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6271 *strict_overflow_p = true;
6272 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6273 fold_convert (ctype,
6274 const_binop (TRUNC_DIV_EXPR,
6275 op1, c, 0)));
6276 }
6277 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6278 {
6279 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6280 *strict_overflow_p = true;
6281 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6282 fold_convert (ctype,
6283 const_binop (TRUNC_DIV_EXPR,
6284 c, op1, 0)));
6285 }
6286 }
6287 break;
6288
6289 default:
6290 break;
6291 }
6292
6293 return 0;
6294 }
6295 \f
6296 /* Return a node which has the indicated constant VALUE (either 0 or
6297 1), and is of the indicated TYPE. */
6298
6299 tree
6300 constant_boolean_node (int value, tree type)
6301 {
6302 if (type == integer_type_node)
6303 return value ? integer_one_node : integer_zero_node;
6304 else if (type == boolean_type_node)
6305 return value ? boolean_true_node : boolean_false_node;
6306 else
6307 return build_int_cst (type, value);
6308 }
6309
6310
6311 /* Return true if expr looks like an ARRAY_REF and set base and
6312 offset to the appropriate trees. If there is no offset,
6313 offset is set to NULL_TREE. Base will be canonicalized to
6314 something you can get the element type from using
6315 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
6316 in bytes to the base in sizetype. */
6317
6318 static bool
6319 extract_array_ref (tree expr, tree *base, tree *offset)
6320 {
6321 /* One canonical form is a PLUS_EXPR with the first
6322 argument being an ADDR_EXPR with a possible NOP_EXPR
6323 attached. */
6324 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
6325 {
6326 tree op0 = TREE_OPERAND (expr, 0);
6327 tree inner_base, dummy1;
6328 /* Strip NOP_EXPRs here because the C frontends and/or
6329 folders present us (int *)&x.a p+ 4 possibly. */
6330 STRIP_NOPS (op0);
6331 if (extract_array_ref (op0, &inner_base, &dummy1))
6332 {
6333 *base = inner_base;
6334 *offset = fold_convert (sizetype, TREE_OPERAND (expr, 1));
6335 if (dummy1 != NULL_TREE)
6336 *offset = fold_build2 (PLUS_EXPR, sizetype,
6337 dummy1, *offset);
6338 return true;
6339 }
6340 }
6341 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
6342 which we transform into an ADDR_EXPR with appropriate
6343 offset. For other arguments to the ADDR_EXPR we assume
6344 zero offset and as such do not care about the ADDR_EXPR
6345 type and strip possible nops from it. */
6346 else if (TREE_CODE (expr) == ADDR_EXPR)
6347 {
6348 tree op0 = TREE_OPERAND (expr, 0);
6349 if (TREE_CODE (op0) == ARRAY_REF)
6350 {
6351 tree idx = TREE_OPERAND (op0, 1);
6352 *base = TREE_OPERAND (op0, 0);
6353 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6354 array_ref_element_size (op0));
6355 *offset = fold_convert (sizetype, *offset);
6356 }
6357 else
6358 {
6359 /* Handle array-to-pointer decay as &a. */
6360 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6361 *base = TREE_OPERAND (expr, 0);
6362 else
6363 *base = expr;
6364 *offset = NULL_TREE;
6365 }
6366 return true;
6367 }
6368 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6369 else if (SSA_VAR_P (expr)
6370 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6371 {
6372 *base = expr;
6373 *offset = NULL_TREE;
6374 return true;
6375 }
6376
6377 return false;
6378 }
6379
6380
6381 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6382 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6383 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6384 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6385 COND is the first argument to CODE; otherwise (as in the example
6386 given here), it is the second argument. TYPE is the type of the
6387 original expression. Return NULL_TREE if no simplification is
6388 possible. */
6389
6390 static tree
6391 fold_binary_op_with_conditional_arg (enum tree_code code,
6392 tree type, tree op0, tree op1,
6393 tree cond, tree arg, int cond_first_p)
6394 {
6395 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6396 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6397 tree test, true_value, false_value;
6398 tree lhs = NULL_TREE;
6399 tree rhs = NULL_TREE;
6400
6401 /* This transformation is only worthwhile if we don't have to wrap
6402 arg in a SAVE_EXPR, and the operation can be simplified on at least
6403 one of the branches once its pushed inside the COND_EXPR. */
6404 if (!TREE_CONSTANT (arg))
6405 return NULL_TREE;
6406
6407 if (TREE_CODE (cond) == COND_EXPR)
6408 {
6409 test = TREE_OPERAND (cond, 0);
6410 true_value = TREE_OPERAND (cond, 1);
6411 false_value = TREE_OPERAND (cond, 2);
6412 /* If this operand throws an expression, then it does not make
6413 sense to try to perform a logical or arithmetic operation
6414 involving it. */
6415 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6416 lhs = true_value;
6417 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6418 rhs = false_value;
6419 }
6420 else
6421 {
6422 tree testtype = TREE_TYPE (cond);
6423 test = cond;
6424 true_value = constant_boolean_node (true, testtype);
6425 false_value = constant_boolean_node (false, testtype);
6426 }
6427
6428 arg = fold_convert (arg_type, arg);
6429 if (lhs == 0)
6430 {
6431 true_value = fold_convert (cond_type, true_value);
6432 if (cond_first_p)
6433 lhs = fold_build2 (code, type, true_value, arg);
6434 else
6435 lhs = fold_build2 (code, type, arg, true_value);
6436 }
6437 if (rhs == 0)
6438 {
6439 false_value = fold_convert (cond_type, false_value);
6440 if (cond_first_p)
6441 rhs = fold_build2 (code, type, false_value, arg);
6442 else
6443 rhs = fold_build2 (code, type, arg, false_value);
6444 }
6445
6446 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6447 return fold_convert (type, test);
6448 }
6449
6450 \f
6451 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6452
6453 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6454 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6455 ADDEND is the same as X.
6456
6457 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6458 and finite. The problematic cases are when X is zero, and its mode
6459 has signed zeros. In the case of rounding towards -infinity,
6460 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6461 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6462
6463 static bool
6464 fold_real_zero_addition_p (tree type, tree addend, int negate)
6465 {
6466 if (!real_zerop (addend))
6467 return false;
6468
6469 /* Don't allow the fold with -fsignaling-nans. */
6470 if (HONOR_SNANS (TYPE_MODE (type)))
6471 return false;
6472
6473 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6474 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6475 return true;
6476
6477 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6478 if (TREE_CODE (addend) == REAL_CST
6479 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6480 negate = !negate;
6481
6482 /* The mode has signed zeros, and we have to honor their sign.
6483 In this situation, there is only one case we can return true for.
6484 X - 0 is the same as X unless rounding towards -infinity is
6485 supported. */
6486 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6487 }
6488
6489 /* Subroutine of fold() that checks comparisons of built-in math
6490 functions against real constants.
6491
6492 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6493 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6494 is the type of the result and ARG0 and ARG1 are the operands of the
6495 comparison. ARG1 must be a TREE_REAL_CST.
6496
6497 The function returns the constant folded tree if a simplification
6498 can be made, and NULL_TREE otherwise. */
6499
6500 static tree
6501 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6502 tree type, tree arg0, tree arg1)
6503 {
6504 REAL_VALUE_TYPE c;
6505
6506 if (BUILTIN_SQRT_P (fcode))
6507 {
6508 tree arg = CALL_EXPR_ARG (arg0, 0);
6509 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6510
6511 c = TREE_REAL_CST (arg1);
6512 if (REAL_VALUE_NEGATIVE (c))
6513 {
6514 /* sqrt(x) < y is always false, if y is negative. */
6515 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6516 return omit_one_operand (type, integer_zero_node, arg);
6517
6518 /* sqrt(x) > y is always true, if y is negative and we
6519 don't care about NaNs, i.e. negative values of x. */
6520 if (code == NE_EXPR || !HONOR_NANS (mode))
6521 return omit_one_operand (type, integer_one_node, arg);
6522
6523 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6524 return fold_build2 (GE_EXPR, type, arg,
6525 build_real (TREE_TYPE (arg), dconst0));
6526 }
6527 else if (code == GT_EXPR || code == GE_EXPR)
6528 {
6529 REAL_VALUE_TYPE c2;
6530
6531 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6532 real_convert (&c2, mode, &c2);
6533
6534 if (REAL_VALUE_ISINF (c2))
6535 {
6536 /* sqrt(x) > y is x == +Inf, when y is very large. */
6537 if (HONOR_INFINITIES (mode))
6538 return fold_build2 (EQ_EXPR, type, arg,
6539 build_real (TREE_TYPE (arg), c2));
6540
6541 /* sqrt(x) > y is always false, when y is very large
6542 and we don't care about infinities. */
6543 return omit_one_operand (type, integer_zero_node, arg);
6544 }
6545
6546 /* sqrt(x) > c is the same as x > c*c. */
6547 return fold_build2 (code, type, arg,
6548 build_real (TREE_TYPE (arg), c2));
6549 }
6550 else if (code == LT_EXPR || code == LE_EXPR)
6551 {
6552 REAL_VALUE_TYPE c2;
6553
6554 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6555 real_convert (&c2, mode, &c2);
6556
6557 if (REAL_VALUE_ISINF (c2))
6558 {
6559 /* sqrt(x) < y is always true, when y is a very large
6560 value and we don't care about NaNs or Infinities. */
6561 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6562 return omit_one_operand (type, integer_one_node, arg);
6563
6564 /* sqrt(x) < y is x != +Inf when y is very large and we
6565 don't care about NaNs. */
6566 if (! HONOR_NANS (mode))
6567 return fold_build2 (NE_EXPR, type, arg,
6568 build_real (TREE_TYPE (arg), c2));
6569
6570 /* sqrt(x) < y is x >= 0 when y is very large and we
6571 don't care about Infinities. */
6572 if (! HONOR_INFINITIES (mode))
6573 return fold_build2 (GE_EXPR, type, arg,
6574 build_real (TREE_TYPE (arg), dconst0));
6575
6576 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6577 if (lang_hooks.decls.global_bindings_p () != 0
6578 || CONTAINS_PLACEHOLDER_P (arg))
6579 return NULL_TREE;
6580
6581 arg = save_expr (arg);
6582 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6583 fold_build2 (GE_EXPR, type, arg,
6584 build_real (TREE_TYPE (arg),
6585 dconst0)),
6586 fold_build2 (NE_EXPR, type, arg,
6587 build_real (TREE_TYPE (arg),
6588 c2)));
6589 }
6590
6591 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6592 if (! HONOR_NANS (mode))
6593 return fold_build2 (code, type, arg,
6594 build_real (TREE_TYPE (arg), c2));
6595
6596 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6597 if (lang_hooks.decls.global_bindings_p () == 0
6598 && ! CONTAINS_PLACEHOLDER_P (arg))
6599 {
6600 arg = save_expr (arg);
6601 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6602 fold_build2 (GE_EXPR, type, arg,
6603 build_real (TREE_TYPE (arg),
6604 dconst0)),
6605 fold_build2 (code, type, arg,
6606 build_real (TREE_TYPE (arg),
6607 c2)));
6608 }
6609 }
6610 }
6611
6612 return NULL_TREE;
6613 }
6614
6615 /* Subroutine of fold() that optimizes comparisons against Infinities,
6616 either +Inf or -Inf.
6617
6618 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6619 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6620 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6621
6622 The function returns the constant folded tree if a simplification
6623 can be made, and NULL_TREE otherwise. */
6624
6625 static tree
6626 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6627 {
6628 enum machine_mode mode;
6629 REAL_VALUE_TYPE max;
6630 tree temp;
6631 bool neg;
6632
6633 mode = TYPE_MODE (TREE_TYPE (arg0));
6634
6635 /* For negative infinity swap the sense of the comparison. */
6636 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6637 if (neg)
6638 code = swap_tree_comparison (code);
6639
6640 switch (code)
6641 {
6642 case GT_EXPR:
6643 /* x > +Inf is always false, if with ignore sNANs. */
6644 if (HONOR_SNANS (mode))
6645 return NULL_TREE;
6646 return omit_one_operand (type, integer_zero_node, arg0);
6647
6648 case LE_EXPR:
6649 /* x <= +Inf is always true, if we don't case about NaNs. */
6650 if (! HONOR_NANS (mode))
6651 return omit_one_operand (type, integer_one_node, arg0);
6652
6653 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6654 if (lang_hooks.decls.global_bindings_p () == 0
6655 && ! CONTAINS_PLACEHOLDER_P (arg0))
6656 {
6657 arg0 = save_expr (arg0);
6658 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6659 }
6660 break;
6661
6662 case EQ_EXPR:
6663 case GE_EXPR:
6664 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6665 real_maxval (&max, neg, mode);
6666 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6667 arg0, build_real (TREE_TYPE (arg0), max));
6668
6669 case LT_EXPR:
6670 /* x < +Inf is always equal to x <= DBL_MAX. */
6671 real_maxval (&max, neg, mode);
6672 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6673 arg0, build_real (TREE_TYPE (arg0), max));
6674
6675 case NE_EXPR:
6676 /* x != +Inf is always equal to !(x > DBL_MAX). */
6677 real_maxval (&max, neg, mode);
6678 if (! HONOR_NANS (mode))
6679 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6680 arg0, build_real (TREE_TYPE (arg0), max));
6681
6682 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6683 arg0, build_real (TREE_TYPE (arg0), max));
6684 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6685
6686 default:
6687 break;
6688 }
6689
6690 return NULL_TREE;
6691 }
6692
6693 /* Subroutine of fold() that optimizes comparisons of a division by
6694 a nonzero integer constant against an integer constant, i.e.
6695 X/C1 op C2.
6696
6697 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6698 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6699 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6700
6701 The function returns the constant folded tree if a simplification
6702 can be made, and NULL_TREE otherwise. */
6703
6704 static tree
6705 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6706 {
6707 tree prod, tmp, hi, lo;
6708 tree arg00 = TREE_OPERAND (arg0, 0);
6709 tree arg01 = TREE_OPERAND (arg0, 1);
6710 unsigned HOST_WIDE_INT lpart;
6711 HOST_WIDE_INT hpart;
6712 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6713 bool neg_overflow;
6714 int overflow;
6715
6716 /* We have to do this the hard way to detect unsigned overflow.
6717 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6718 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6719 TREE_INT_CST_HIGH (arg01),
6720 TREE_INT_CST_LOW (arg1),
6721 TREE_INT_CST_HIGH (arg1),
6722 &lpart, &hpart, unsigned_p);
6723 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6724 -1, overflow);
6725 neg_overflow = false;
6726
6727 if (unsigned_p)
6728 {
6729 tmp = int_const_binop (MINUS_EXPR, arg01,
6730 build_int_cst (TREE_TYPE (arg01), 1), 0);
6731 lo = prod;
6732
6733 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6734 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6735 TREE_INT_CST_HIGH (prod),
6736 TREE_INT_CST_LOW (tmp),
6737 TREE_INT_CST_HIGH (tmp),
6738 &lpart, &hpart, unsigned_p);
6739 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6740 -1, overflow | TREE_OVERFLOW (prod));
6741 }
6742 else if (tree_int_cst_sgn (arg01) >= 0)
6743 {
6744 tmp = int_const_binop (MINUS_EXPR, arg01,
6745 build_int_cst (TREE_TYPE (arg01), 1), 0);
6746 switch (tree_int_cst_sgn (arg1))
6747 {
6748 case -1:
6749 neg_overflow = true;
6750 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6751 hi = prod;
6752 break;
6753
6754 case 0:
6755 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6756 hi = tmp;
6757 break;
6758
6759 case 1:
6760 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6761 lo = prod;
6762 break;
6763
6764 default:
6765 gcc_unreachable ();
6766 }
6767 }
6768 else
6769 {
6770 /* A negative divisor reverses the relational operators. */
6771 code = swap_tree_comparison (code);
6772
6773 tmp = int_const_binop (PLUS_EXPR, arg01,
6774 build_int_cst (TREE_TYPE (arg01), 1), 0);
6775 switch (tree_int_cst_sgn (arg1))
6776 {
6777 case -1:
6778 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6779 lo = prod;
6780 break;
6781
6782 case 0:
6783 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6784 lo = tmp;
6785 break;
6786
6787 case 1:
6788 neg_overflow = true;
6789 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6790 hi = prod;
6791 break;
6792
6793 default:
6794 gcc_unreachable ();
6795 }
6796 }
6797
6798 switch (code)
6799 {
6800 case EQ_EXPR:
6801 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6802 return omit_one_operand (type, integer_zero_node, arg00);
6803 if (TREE_OVERFLOW (hi))
6804 return fold_build2 (GE_EXPR, type, arg00, lo);
6805 if (TREE_OVERFLOW (lo))
6806 return fold_build2 (LE_EXPR, type, arg00, hi);
6807 return build_range_check (type, arg00, 1, lo, hi);
6808
6809 case NE_EXPR:
6810 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6811 return omit_one_operand (type, integer_one_node, arg00);
6812 if (TREE_OVERFLOW (hi))
6813 return fold_build2 (LT_EXPR, type, arg00, lo);
6814 if (TREE_OVERFLOW (lo))
6815 return fold_build2 (GT_EXPR, type, arg00, hi);
6816 return build_range_check (type, arg00, 0, lo, hi);
6817
6818 case LT_EXPR:
6819 if (TREE_OVERFLOW (lo))
6820 {
6821 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6822 return omit_one_operand (type, tmp, arg00);
6823 }
6824 return fold_build2 (LT_EXPR, type, arg00, lo);
6825
6826 case LE_EXPR:
6827 if (TREE_OVERFLOW (hi))
6828 {
6829 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6830 return omit_one_operand (type, tmp, arg00);
6831 }
6832 return fold_build2 (LE_EXPR, type, arg00, hi);
6833
6834 case GT_EXPR:
6835 if (TREE_OVERFLOW (hi))
6836 {
6837 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6838 return omit_one_operand (type, tmp, arg00);
6839 }
6840 return fold_build2 (GT_EXPR, type, arg00, hi);
6841
6842 case GE_EXPR:
6843 if (TREE_OVERFLOW (lo))
6844 {
6845 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6846 return omit_one_operand (type, tmp, arg00);
6847 }
6848 return fold_build2 (GE_EXPR, type, arg00, lo);
6849
6850 default:
6851 break;
6852 }
6853
6854 return NULL_TREE;
6855 }
6856
6857
6858 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6859 equality/inequality test, then return a simplified form of the test
6860 using a sign testing. Otherwise return NULL. TYPE is the desired
6861 result type. */
6862
6863 static tree
6864 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6865 tree result_type)
6866 {
6867 /* If this is testing a single bit, we can optimize the test. */
6868 if ((code == NE_EXPR || code == EQ_EXPR)
6869 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6870 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6871 {
6872 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6873 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6874 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6875
6876 if (arg00 != NULL_TREE
6877 /* This is only a win if casting to a signed type is cheap,
6878 i.e. when arg00's type is not a partial mode. */
6879 && TYPE_PRECISION (TREE_TYPE (arg00))
6880 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6881 {
6882 tree stype = signed_type_for (TREE_TYPE (arg00));
6883 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6884 result_type, fold_convert (stype, arg00),
6885 build_int_cst (stype, 0));
6886 }
6887 }
6888
6889 return NULL_TREE;
6890 }
6891
6892 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6893 equality/inequality test, then return a simplified form of
6894 the test using shifts and logical operations. Otherwise return
6895 NULL. TYPE is the desired result type. */
6896
6897 tree
6898 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6899 tree result_type)
6900 {
6901 /* If this is testing a single bit, we can optimize the test. */
6902 if ((code == NE_EXPR || code == EQ_EXPR)
6903 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6904 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6905 {
6906 tree inner = TREE_OPERAND (arg0, 0);
6907 tree type = TREE_TYPE (arg0);
6908 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6909 enum machine_mode operand_mode = TYPE_MODE (type);
6910 int ops_unsigned;
6911 tree signed_type, unsigned_type, intermediate_type;
6912 tree tem, one;
6913
6914 /* First, see if we can fold the single bit test into a sign-bit
6915 test. */
6916 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6917 result_type);
6918 if (tem)
6919 return tem;
6920
6921 /* Otherwise we have (A & C) != 0 where C is a single bit,
6922 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6923 Similarly for (A & C) == 0. */
6924
6925 /* If INNER is a right shift of a constant and it plus BITNUM does
6926 not overflow, adjust BITNUM and INNER. */
6927 if (TREE_CODE (inner) == RSHIFT_EXPR
6928 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6929 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6930 && bitnum < TYPE_PRECISION (type)
6931 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6932 bitnum - TYPE_PRECISION (type)))
6933 {
6934 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6935 inner = TREE_OPERAND (inner, 0);
6936 }
6937
6938 /* If we are going to be able to omit the AND below, we must do our
6939 operations as unsigned. If we must use the AND, we have a choice.
6940 Normally unsigned is faster, but for some machines signed is. */
6941 #ifdef LOAD_EXTEND_OP
6942 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6943 && !flag_syntax_only) ? 0 : 1;
6944 #else
6945 ops_unsigned = 1;
6946 #endif
6947
6948 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6949 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6950 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6951 inner = fold_convert (intermediate_type, inner);
6952
6953 if (bitnum != 0)
6954 inner = build2 (RSHIFT_EXPR, intermediate_type,
6955 inner, size_int (bitnum));
6956
6957 one = build_int_cst (intermediate_type, 1);
6958
6959 if (code == EQ_EXPR)
6960 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6961
6962 /* Put the AND last so it can combine with more things. */
6963 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6964
6965 /* Make sure to return the proper type. */
6966 inner = fold_convert (result_type, inner);
6967
6968 return inner;
6969 }
6970 return NULL_TREE;
6971 }
6972
6973 /* Check whether we are allowed to reorder operands arg0 and arg1,
6974 such that the evaluation of arg1 occurs before arg0. */
6975
6976 static bool
6977 reorder_operands_p (tree arg0, tree arg1)
6978 {
6979 if (! flag_evaluation_order)
6980 return true;
6981 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6982 return true;
6983 return ! TREE_SIDE_EFFECTS (arg0)
6984 && ! TREE_SIDE_EFFECTS (arg1);
6985 }
6986
6987 /* Test whether it is preferable two swap two operands, ARG0 and
6988 ARG1, for example because ARG0 is an integer constant and ARG1
6989 isn't. If REORDER is true, only recommend swapping if we can
6990 evaluate the operands in reverse order. */
6991
6992 bool
6993 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6994 {
6995 STRIP_SIGN_NOPS (arg0);
6996 STRIP_SIGN_NOPS (arg1);
6997
6998 if (TREE_CODE (arg1) == INTEGER_CST)
6999 return 0;
7000 if (TREE_CODE (arg0) == INTEGER_CST)
7001 return 1;
7002
7003 if (TREE_CODE (arg1) == REAL_CST)
7004 return 0;
7005 if (TREE_CODE (arg0) == REAL_CST)
7006 return 1;
7007
7008 if (TREE_CODE (arg1) == FIXED_CST)
7009 return 0;
7010 if (TREE_CODE (arg0) == FIXED_CST)
7011 return 1;
7012
7013 if (TREE_CODE (arg1) == COMPLEX_CST)
7014 return 0;
7015 if (TREE_CODE (arg0) == COMPLEX_CST)
7016 return 1;
7017
7018 if (TREE_CONSTANT (arg1))
7019 return 0;
7020 if (TREE_CONSTANT (arg0))
7021 return 1;
7022
7023 if (optimize_size)
7024 return 0;
7025
7026 if (reorder && flag_evaluation_order
7027 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7028 return 0;
7029
7030 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7031 for commutative and comparison operators. Ensuring a canonical
7032 form allows the optimizers to find additional redundancies without
7033 having to explicitly check for both orderings. */
7034 if (TREE_CODE (arg0) == SSA_NAME
7035 && TREE_CODE (arg1) == SSA_NAME
7036 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7037 return 1;
7038
7039 /* Put SSA_NAMEs last. */
7040 if (TREE_CODE (arg1) == SSA_NAME)
7041 return 0;
7042 if (TREE_CODE (arg0) == SSA_NAME)
7043 return 1;
7044
7045 /* Put variables last. */
7046 if (DECL_P (arg1))
7047 return 0;
7048 if (DECL_P (arg0))
7049 return 1;
7050
7051 return 0;
7052 }
7053
7054 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7055 ARG0 is extended to a wider type. */
7056
7057 static tree
7058 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7059 {
7060 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7061 tree arg1_unw;
7062 tree shorter_type, outer_type;
7063 tree min, max;
7064 bool above, below;
7065
7066 if (arg0_unw == arg0)
7067 return NULL_TREE;
7068 shorter_type = TREE_TYPE (arg0_unw);
7069
7070 #ifdef HAVE_canonicalize_funcptr_for_compare
7071 /* Disable this optimization if we're casting a function pointer
7072 type on targets that require function pointer canonicalization. */
7073 if (HAVE_canonicalize_funcptr_for_compare
7074 && TREE_CODE (shorter_type) == POINTER_TYPE
7075 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7076 return NULL_TREE;
7077 #endif
7078
7079 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7080 return NULL_TREE;
7081
7082 arg1_unw = get_unwidened (arg1, shorter_type);
7083
7084 /* If possible, express the comparison in the shorter mode. */
7085 if ((code == EQ_EXPR || code == NE_EXPR
7086 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7087 && (TREE_TYPE (arg1_unw) == shorter_type
7088 || (TREE_CODE (arg1_unw) == INTEGER_CST
7089 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7090 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7091 && int_fits_type_p (arg1_unw, shorter_type))))
7092 return fold_build2 (code, type, arg0_unw,
7093 fold_convert (shorter_type, arg1_unw));
7094
7095 if (TREE_CODE (arg1_unw) != INTEGER_CST
7096 || TREE_CODE (shorter_type) != INTEGER_TYPE
7097 || !int_fits_type_p (arg1_unw, shorter_type))
7098 return NULL_TREE;
7099
7100 /* If we are comparing with the integer that does not fit into the range
7101 of the shorter type, the result is known. */
7102 outer_type = TREE_TYPE (arg1_unw);
7103 min = lower_bound_in_type (outer_type, shorter_type);
7104 max = upper_bound_in_type (outer_type, shorter_type);
7105
7106 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7107 max, arg1_unw));
7108 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7109 arg1_unw, min));
7110
7111 switch (code)
7112 {
7113 case EQ_EXPR:
7114 if (above || below)
7115 return omit_one_operand (type, integer_zero_node, arg0);
7116 break;
7117
7118 case NE_EXPR:
7119 if (above || below)
7120 return omit_one_operand (type, integer_one_node, arg0);
7121 break;
7122
7123 case LT_EXPR:
7124 case LE_EXPR:
7125 if (above)
7126 return omit_one_operand (type, integer_one_node, arg0);
7127 else if (below)
7128 return omit_one_operand (type, integer_zero_node, arg0);
7129
7130 case GT_EXPR:
7131 case GE_EXPR:
7132 if (above)
7133 return omit_one_operand (type, integer_zero_node, arg0);
7134 else if (below)
7135 return omit_one_operand (type, integer_one_node, arg0);
7136
7137 default:
7138 break;
7139 }
7140
7141 return NULL_TREE;
7142 }
7143
7144 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7145 ARG0 just the signedness is changed. */
7146
7147 static tree
7148 fold_sign_changed_comparison (enum tree_code code, tree type,
7149 tree arg0, tree arg1)
7150 {
7151 tree arg0_inner;
7152 tree inner_type, outer_type;
7153
7154 if (TREE_CODE (arg0) != NOP_EXPR
7155 && TREE_CODE (arg0) != CONVERT_EXPR)
7156 return NULL_TREE;
7157
7158 outer_type = TREE_TYPE (arg0);
7159 arg0_inner = TREE_OPERAND (arg0, 0);
7160 inner_type = TREE_TYPE (arg0_inner);
7161
7162 #ifdef HAVE_canonicalize_funcptr_for_compare
7163 /* Disable this optimization if we're casting a function pointer
7164 type on targets that require function pointer canonicalization. */
7165 if (HAVE_canonicalize_funcptr_for_compare
7166 && TREE_CODE (inner_type) == POINTER_TYPE
7167 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7168 return NULL_TREE;
7169 #endif
7170
7171 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7172 return NULL_TREE;
7173
7174 if (TREE_CODE (arg1) != INTEGER_CST
7175 && !((TREE_CODE (arg1) == NOP_EXPR
7176 || TREE_CODE (arg1) == CONVERT_EXPR)
7177 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7178 return NULL_TREE;
7179
7180 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7181 && code != NE_EXPR
7182 && code != EQ_EXPR)
7183 return NULL_TREE;
7184
7185 if (TREE_CODE (arg1) == INTEGER_CST)
7186 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7187 TREE_INT_CST_HIGH (arg1), 0,
7188 TREE_OVERFLOW (arg1));
7189 else
7190 arg1 = fold_convert (inner_type, arg1);
7191
7192 return fold_build2 (code, type, arg0_inner, arg1);
7193 }
7194
7195 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7196 step of the array. Reconstructs s and delta in the case of s * delta
7197 being an integer constant (and thus already folded).
7198 ADDR is the address. MULT is the multiplicative expression.
7199 If the function succeeds, the new address expression is returned. Otherwise
7200 NULL_TREE is returned. */
7201
7202 static tree
7203 try_move_mult_to_index (tree addr, tree op1)
7204 {
7205 tree s, delta, step;
7206 tree ref = TREE_OPERAND (addr, 0), pref;
7207 tree ret, pos;
7208 tree itype;
7209 bool mdim = false;
7210
7211 /* Strip the nops that might be added when converting op1 to sizetype. */
7212 STRIP_NOPS (op1);
7213
7214 /* Canonicalize op1 into a possibly non-constant delta
7215 and an INTEGER_CST s. */
7216 if (TREE_CODE (op1) == MULT_EXPR)
7217 {
7218 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7219
7220 STRIP_NOPS (arg0);
7221 STRIP_NOPS (arg1);
7222
7223 if (TREE_CODE (arg0) == INTEGER_CST)
7224 {
7225 s = arg0;
7226 delta = arg1;
7227 }
7228 else if (TREE_CODE (arg1) == INTEGER_CST)
7229 {
7230 s = arg1;
7231 delta = arg0;
7232 }
7233 else
7234 return NULL_TREE;
7235 }
7236 else if (TREE_CODE (op1) == INTEGER_CST)
7237 {
7238 delta = op1;
7239 s = NULL_TREE;
7240 }
7241 else
7242 {
7243 /* Simulate we are delta * 1. */
7244 delta = op1;
7245 s = integer_one_node;
7246 }
7247
7248 for (;; ref = TREE_OPERAND (ref, 0))
7249 {
7250 if (TREE_CODE (ref) == ARRAY_REF)
7251 {
7252 /* Remember if this was a multi-dimensional array. */
7253 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7254 mdim = true;
7255
7256 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7257 if (! itype)
7258 continue;
7259
7260 step = array_ref_element_size (ref);
7261 if (TREE_CODE (step) != INTEGER_CST)
7262 continue;
7263
7264 if (s)
7265 {
7266 if (! tree_int_cst_equal (step, s))
7267 continue;
7268 }
7269 else
7270 {
7271 /* Try if delta is a multiple of step. */
7272 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
7273 if (! tmp)
7274 continue;
7275 delta = tmp;
7276 }
7277
7278 /* Only fold here if we can verify we do not overflow one
7279 dimension of a multi-dimensional array. */
7280 if (mdim)
7281 {
7282 tree tmp;
7283
7284 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7285 || !INTEGRAL_TYPE_P (itype)
7286 || !TYPE_MAX_VALUE (itype)
7287 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7288 continue;
7289
7290 tmp = fold_binary (PLUS_EXPR, itype,
7291 fold_convert (itype,
7292 TREE_OPERAND (ref, 1)),
7293 fold_convert (itype, delta));
7294 if (!tmp
7295 || TREE_CODE (tmp) != INTEGER_CST
7296 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7297 continue;
7298 }
7299
7300 break;
7301 }
7302 else
7303 mdim = false;
7304
7305 if (!handled_component_p (ref))
7306 return NULL_TREE;
7307 }
7308
7309 /* We found the suitable array reference. So copy everything up to it,
7310 and replace the index. */
7311
7312 pref = TREE_OPERAND (addr, 0);
7313 ret = copy_node (pref);
7314 pos = ret;
7315
7316 while (pref != ref)
7317 {
7318 pref = TREE_OPERAND (pref, 0);
7319 TREE_OPERAND (pos, 0) = copy_node (pref);
7320 pos = TREE_OPERAND (pos, 0);
7321 }
7322
7323 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7324 fold_convert (itype,
7325 TREE_OPERAND (pos, 1)),
7326 fold_convert (itype, delta));
7327
7328 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7329 }
7330
7331
7332 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7333 means A >= Y && A != MAX, but in this case we know that
7334 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7335
7336 static tree
7337 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7338 {
7339 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7340
7341 if (TREE_CODE (bound) == LT_EXPR)
7342 a = TREE_OPERAND (bound, 0);
7343 else if (TREE_CODE (bound) == GT_EXPR)
7344 a = TREE_OPERAND (bound, 1);
7345 else
7346 return NULL_TREE;
7347
7348 typea = TREE_TYPE (a);
7349 if (!INTEGRAL_TYPE_P (typea)
7350 && !POINTER_TYPE_P (typea))
7351 return NULL_TREE;
7352
7353 if (TREE_CODE (ineq) == LT_EXPR)
7354 {
7355 a1 = TREE_OPERAND (ineq, 1);
7356 y = TREE_OPERAND (ineq, 0);
7357 }
7358 else if (TREE_CODE (ineq) == GT_EXPR)
7359 {
7360 a1 = TREE_OPERAND (ineq, 0);
7361 y = TREE_OPERAND (ineq, 1);
7362 }
7363 else
7364 return NULL_TREE;
7365
7366 if (TREE_TYPE (a1) != typea)
7367 return NULL_TREE;
7368
7369 if (POINTER_TYPE_P (typea))
7370 {
7371 /* Convert the pointer types into integer before taking the difference. */
7372 tree ta = fold_convert (ssizetype, a);
7373 tree ta1 = fold_convert (ssizetype, a1);
7374 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7375 }
7376 else
7377 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7378
7379 if (!diff || !integer_onep (diff))
7380 return NULL_TREE;
7381
7382 return fold_build2 (GE_EXPR, type, a, y);
7383 }
7384
7385 /* Fold a sum or difference of at least one multiplication.
7386 Returns the folded tree or NULL if no simplification could be made. */
7387
7388 static tree
7389 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7390 {
7391 tree arg00, arg01, arg10, arg11;
7392 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7393
7394 /* (A * C) +- (B * C) -> (A+-B) * C.
7395 (A * C) +- A -> A * (C+-1).
7396 We are most concerned about the case where C is a constant,
7397 but other combinations show up during loop reduction. Since
7398 it is not difficult, try all four possibilities. */
7399
7400 if (TREE_CODE (arg0) == MULT_EXPR)
7401 {
7402 arg00 = TREE_OPERAND (arg0, 0);
7403 arg01 = TREE_OPERAND (arg0, 1);
7404 }
7405 else if (TREE_CODE (arg0) == INTEGER_CST)
7406 {
7407 arg00 = build_one_cst (type);
7408 arg01 = arg0;
7409 }
7410 else
7411 {
7412 /* We cannot generate constant 1 for fract. */
7413 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7414 return NULL_TREE;
7415 arg00 = arg0;
7416 arg01 = build_one_cst (type);
7417 }
7418 if (TREE_CODE (arg1) == MULT_EXPR)
7419 {
7420 arg10 = TREE_OPERAND (arg1, 0);
7421 arg11 = TREE_OPERAND (arg1, 1);
7422 }
7423 else if (TREE_CODE (arg1) == INTEGER_CST)
7424 {
7425 arg10 = build_one_cst (type);
7426 arg11 = arg1;
7427 }
7428 else
7429 {
7430 /* We cannot generate constant 1 for fract. */
7431 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7432 return NULL_TREE;
7433 arg10 = arg1;
7434 arg11 = build_one_cst (type);
7435 }
7436 same = NULL_TREE;
7437
7438 if (operand_equal_p (arg01, arg11, 0))
7439 same = arg01, alt0 = arg00, alt1 = arg10;
7440 else if (operand_equal_p (arg00, arg10, 0))
7441 same = arg00, alt0 = arg01, alt1 = arg11;
7442 else if (operand_equal_p (arg00, arg11, 0))
7443 same = arg00, alt0 = arg01, alt1 = arg10;
7444 else if (operand_equal_p (arg01, arg10, 0))
7445 same = arg01, alt0 = arg00, alt1 = arg11;
7446
7447 /* No identical multiplicands; see if we can find a common
7448 power-of-two factor in non-power-of-two multiplies. This
7449 can help in multi-dimensional array access. */
7450 else if (host_integerp (arg01, 0)
7451 && host_integerp (arg11, 0))
7452 {
7453 HOST_WIDE_INT int01, int11, tmp;
7454 bool swap = false;
7455 tree maybe_same;
7456 int01 = TREE_INT_CST_LOW (arg01);
7457 int11 = TREE_INT_CST_LOW (arg11);
7458
7459 /* Move min of absolute values to int11. */
7460 if ((int01 >= 0 ? int01 : -int01)
7461 < (int11 >= 0 ? int11 : -int11))
7462 {
7463 tmp = int01, int01 = int11, int11 = tmp;
7464 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7465 maybe_same = arg01;
7466 swap = true;
7467 }
7468 else
7469 maybe_same = arg11;
7470
7471 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7472 {
7473 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7474 build_int_cst (TREE_TYPE (arg00),
7475 int01 / int11));
7476 alt1 = arg10;
7477 same = maybe_same;
7478 if (swap)
7479 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7480 }
7481 }
7482
7483 if (same)
7484 return fold_build2 (MULT_EXPR, type,
7485 fold_build2 (code, type,
7486 fold_convert (type, alt0),
7487 fold_convert (type, alt1)),
7488 fold_convert (type, same));
7489
7490 return NULL_TREE;
7491 }
7492
7493 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7494 specified by EXPR into the buffer PTR of length LEN bytes.
7495 Return the number of bytes placed in the buffer, or zero
7496 upon failure. */
7497
7498 static int
7499 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7500 {
7501 tree type = TREE_TYPE (expr);
7502 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7503 int byte, offset, word, words;
7504 unsigned char value;
7505
7506 if (total_bytes > len)
7507 return 0;
7508 words = total_bytes / UNITS_PER_WORD;
7509
7510 for (byte = 0; byte < total_bytes; byte++)
7511 {
7512 int bitpos = byte * BITS_PER_UNIT;
7513 if (bitpos < HOST_BITS_PER_WIDE_INT)
7514 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7515 else
7516 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7517 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7518
7519 if (total_bytes > UNITS_PER_WORD)
7520 {
7521 word = byte / UNITS_PER_WORD;
7522 if (WORDS_BIG_ENDIAN)
7523 word = (words - 1) - word;
7524 offset = word * UNITS_PER_WORD;
7525 if (BYTES_BIG_ENDIAN)
7526 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7527 else
7528 offset += byte % UNITS_PER_WORD;
7529 }
7530 else
7531 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7532 ptr[offset] = value;
7533 }
7534 return total_bytes;
7535 }
7536
7537
7538 /* Subroutine of native_encode_expr. Encode the REAL_CST
7539 specified by EXPR into the buffer PTR of length LEN bytes.
7540 Return the number of bytes placed in the buffer, or zero
7541 upon failure. */
7542
7543 static int
7544 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7545 {
7546 tree type = TREE_TYPE (expr);
7547 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7548 int byte, offset, word, words, bitpos;
7549 unsigned char value;
7550
7551 /* There are always 32 bits in each long, no matter the size of
7552 the hosts long. We handle floating point representations with
7553 up to 192 bits. */
7554 long tmp[6];
7555
7556 if (total_bytes > len)
7557 return 0;
7558 words = 32 / UNITS_PER_WORD;
7559
7560 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7561
7562 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7563 bitpos += BITS_PER_UNIT)
7564 {
7565 byte = (bitpos / BITS_PER_UNIT) & 3;
7566 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7567
7568 if (UNITS_PER_WORD < 4)
7569 {
7570 word = byte / UNITS_PER_WORD;
7571 if (WORDS_BIG_ENDIAN)
7572 word = (words - 1) - word;
7573 offset = word * UNITS_PER_WORD;
7574 if (BYTES_BIG_ENDIAN)
7575 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7576 else
7577 offset += byte % UNITS_PER_WORD;
7578 }
7579 else
7580 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7581 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7582 }
7583 return total_bytes;
7584 }
7585
7586 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7587 specified by EXPR into the buffer PTR of length LEN bytes.
7588 Return the number of bytes placed in the buffer, or zero
7589 upon failure. */
7590
7591 static int
7592 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7593 {
7594 int rsize, isize;
7595 tree part;
7596
7597 part = TREE_REALPART (expr);
7598 rsize = native_encode_expr (part, ptr, len);
7599 if (rsize == 0)
7600 return 0;
7601 part = TREE_IMAGPART (expr);
7602 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7603 if (isize != rsize)
7604 return 0;
7605 return rsize + isize;
7606 }
7607
7608
7609 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7610 specified by EXPR into the buffer PTR of length LEN bytes.
7611 Return the number of bytes placed in the buffer, or zero
7612 upon failure. */
7613
7614 static int
7615 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7616 {
7617 int i, size, offset, count;
7618 tree itype, elem, elements;
7619
7620 offset = 0;
7621 elements = TREE_VECTOR_CST_ELTS (expr);
7622 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7623 itype = TREE_TYPE (TREE_TYPE (expr));
7624 size = GET_MODE_SIZE (TYPE_MODE (itype));
7625 for (i = 0; i < count; i++)
7626 {
7627 if (elements)
7628 {
7629 elem = TREE_VALUE (elements);
7630 elements = TREE_CHAIN (elements);
7631 }
7632 else
7633 elem = NULL_TREE;
7634
7635 if (elem)
7636 {
7637 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7638 return 0;
7639 }
7640 else
7641 {
7642 if (offset + size > len)
7643 return 0;
7644 memset (ptr+offset, 0, size);
7645 }
7646 offset += size;
7647 }
7648 return offset;
7649 }
7650
7651
7652 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7653 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7654 buffer PTR of length LEN bytes. Return the number of bytes
7655 placed in the buffer, or zero upon failure. */
7656
7657 int
7658 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7659 {
7660 switch (TREE_CODE (expr))
7661 {
7662 case INTEGER_CST:
7663 return native_encode_int (expr, ptr, len);
7664
7665 case REAL_CST:
7666 return native_encode_real (expr, ptr, len);
7667
7668 case COMPLEX_CST:
7669 return native_encode_complex (expr, ptr, len);
7670
7671 case VECTOR_CST:
7672 return native_encode_vector (expr, ptr, len);
7673
7674 default:
7675 return 0;
7676 }
7677 }
7678
7679
7680 /* Subroutine of native_interpret_expr. Interpret the contents of
7681 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7682 If the buffer cannot be interpreted, return NULL_TREE. */
7683
7684 static tree
7685 native_interpret_int (tree type, const unsigned char *ptr, int len)
7686 {
7687 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7688 int byte, offset, word, words;
7689 unsigned char value;
7690 unsigned int HOST_WIDE_INT lo = 0;
7691 HOST_WIDE_INT hi = 0;
7692
7693 if (total_bytes > len)
7694 return NULL_TREE;
7695 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7696 return NULL_TREE;
7697 words = total_bytes / UNITS_PER_WORD;
7698
7699 for (byte = 0; byte < total_bytes; byte++)
7700 {
7701 int bitpos = byte * BITS_PER_UNIT;
7702 if (total_bytes > UNITS_PER_WORD)
7703 {
7704 word = byte / UNITS_PER_WORD;
7705 if (WORDS_BIG_ENDIAN)
7706 word = (words - 1) - word;
7707 offset = word * UNITS_PER_WORD;
7708 if (BYTES_BIG_ENDIAN)
7709 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7710 else
7711 offset += byte % UNITS_PER_WORD;
7712 }
7713 else
7714 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7715 value = ptr[offset];
7716
7717 if (bitpos < HOST_BITS_PER_WIDE_INT)
7718 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7719 else
7720 hi |= (unsigned HOST_WIDE_INT) value
7721 << (bitpos - HOST_BITS_PER_WIDE_INT);
7722 }
7723
7724 return build_int_cst_wide_type (type, lo, hi);
7725 }
7726
7727
7728 /* Subroutine of native_interpret_expr. Interpret the contents of
7729 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7730 If the buffer cannot be interpreted, return NULL_TREE. */
7731
7732 static tree
7733 native_interpret_real (tree type, const unsigned char *ptr, int len)
7734 {
7735 enum machine_mode mode = TYPE_MODE (type);
7736 int total_bytes = GET_MODE_SIZE (mode);
7737 int byte, offset, word, words, bitpos;
7738 unsigned char value;
7739 /* There are always 32 bits in each long, no matter the size of
7740 the hosts long. We handle floating point representations with
7741 up to 192 bits. */
7742 REAL_VALUE_TYPE r;
7743 long tmp[6];
7744
7745 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7746 if (total_bytes > len || total_bytes > 24)
7747 return NULL_TREE;
7748 words = 32 / UNITS_PER_WORD;
7749
7750 memset (tmp, 0, sizeof (tmp));
7751 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7752 bitpos += BITS_PER_UNIT)
7753 {
7754 byte = (bitpos / BITS_PER_UNIT) & 3;
7755 if (UNITS_PER_WORD < 4)
7756 {
7757 word = byte / UNITS_PER_WORD;
7758 if (WORDS_BIG_ENDIAN)
7759 word = (words - 1) - word;
7760 offset = word * UNITS_PER_WORD;
7761 if (BYTES_BIG_ENDIAN)
7762 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7763 else
7764 offset += byte % UNITS_PER_WORD;
7765 }
7766 else
7767 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7768 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7769
7770 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7771 }
7772
7773 real_from_target (&r, tmp, mode);
7774 return build_real (type, r);
7775 }
7776
7777
7778 /* Subroutine of native_interpret_expr. Interpret the contents of
7779 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7780 If the buffer cannot be interpreted, return NULL_TREE. */
7781
7782 static tree
7783 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7784 {
7785 tree etype, rpart, ipart;
7786 int size;
7787
7788 etype = TREE_TYPE (type);
7789 size = GET_MODE_SIZE (TYPE_MODE (etype));
7790 if (size * 2 > len)
7791 return NULL_TREE;
7792 rpart = native_interpret_expr (etype, ptr, size);
7793 if (!rpart)
7794 return NULL_TREE;
7795 ipart = native_interpret_expr (etype, ptr+size, size);
7796 if (!ipart)
7797 return NULL_TREE;
7798 return build_complex (type, rpart, ipart);
7799 }
7800
7801
7802 /* Subroutine of native_interpret_expr. Interpret the contents of
7803 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7804 If the buffer cannot be interpreted, return NULL_TREE. */
7805
7806 static tree
7807 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7808 {
7809 tree etype, elem, elements;
7810 int i, size, count;
7811
7812 etype = TREE_TYPE (type);
7813 size = GET_MODE_SIZE (TYPE_MODE (etype));
7814 count = TYPE_VECTOR_SUBPARTS (type);
7815 if (size * count > len)
7816 return NULL_TREE;
7817
7818 elements = NULL_TREE;
7819 for (i = count - 1; i >= 0; i--)
7820 {
7821 elem = native_interpret_expr (etype, ptr+(i*size), size);
7822 if (!elem)
7823 return NULL_TREE;
7824 elements = tree_cons (NULL_TREE, elem, elements);
7825 }
7826 return build_vector (type, elements);
7827 }
7828
7829
7830 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7831 the buffer PTR of length LEN as a constant of type TYPE. For
7832 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7833 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7834 return NULL_TREE. */
7835
7836 tree
7837 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7838 {
7839 switch (TREE_CODE (type))
7840 {
7841 case INTEGER_TYPE:
7842 case ENUMERAL_TYPE:
7843 case BOOLEAN_TYPE:
7844 return native_interpret_int (type, ptr, len);
7845
7846 case REAL_TYPE:
7847 return native_interpret_real (type, ptr, len);
7848
7849 case COMPLEX_TYPE:
7850 return native_interpret_complex (type, ptr, len);
7851
7852 case VECTOR_TYPE:
7853 return native_interpret_vector (type, ptr, len);
7854
7855 default:
7856 return NULL_TREE;
7857 }
7858 }
7859
7860
7861 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7862 TYPE at compile-time. If we're unable to perform the conversion
7863 return NULL_TREE. */
7864
7865 static tree
7866 fold_view_convert_expr (tree type, tree expr)
7867 {
7868 /* We support up to 512-bit values (for V8DFmode). */
7869 unsigned char buffer[64];
7870 int len;
7871
7872 /* Check that the host and target are sane. */
7873 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7874 return NULL_TREE;
7875
7876 len = native_encode_expr (expr, buffer, sizeof (buffer));
7877 if (len == 0)
7878 return NULL_TREE;
7879
7880 return native_interpret_expr (type, buffer, len);
7881 }
7882
7883 /* Build an expression for the address of T. Folds away INDIRECT_REF
7884 to avoid confusing the gimplify process. When IN_FOLD is true
7885 avoid modifications of T. */
7886
7887 static tree
7888 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7889 {
7890 /* The size of the object is not relevant when talking about its address. */
7891 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7892 t = TREE_OPERAND (t, 0);
7893
7894 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7895 if (TREE_CODE (t) == INDIRECT_REF
7896 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7897 {
7898 t = TREE_OPERAND (t, 0);
7899
7900 if (TREE_TYPE (t) != ptrtype)
7901 t = build1 (NOP_EXPR, ptrtype, t);
7902 }
7903 else if (!in_fold)
7904 {
7905 tree base = t;
7906
7907 while (handled_component_p (base))
7908 base = TREE_OPERAND (base, 0);
7909
7910 if (DECL_P (base))
7911 TREE_ADDRESSABLE (base) = 1;
7912
7913 t = build1 (ADDR_EXPR, ptrtype, t);
7914 }
7915 else
7916 t = build1 (ADDR_EXPR, ptrtype, t);
7917
7918 return t;
7919 }
7920
7921 /* Build an expression for the address of T with type PTRTYPE. This
7922 function modifies the input parameter 'T' by sometimes setting the
7923 TREE_ADDRESSABLE flag. */
7924
7925 tree
7926 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7927 {
7928 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7929 }
7930
7931 /* Build an expression for the address of T. This function modifies
7932 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7933 flag. When called from fold functions, use fold_addr_expr instead. */
7934
7935 tree
7936 build_fold_addr_expr (tree t)
7937 {
7938 return build_fold_addr_expr_with_type_1 (t,
7939 build_pointer_type (TREE_TYPE (t)),
7940 false);
7941 }
7942
7943 /* Same as build_fold_addr_expr, builds an expression for the address
7944 of T, but avoids touching the input node 't'. Fold functions
7945 should use this version. */
7946
7947 static tree
7948 fold_addr_expr (tree t)
7949 {
7950 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7951
7952 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7953 }
7954
7955 /* Fold a unary expression of code CODE and type TYPE with operand
7956 OP0. Return the folded expression if folding is successful.
7957 Otherwise, return NULL_TREE. */
7958
7959 tree
7960 fold_unary (enum tree_code code, tree type, tree op0)
7961 {
7962 tree tem;
7963 tree arg0;
7964 enum tree_code_class kind = TREE_CODE_CLASS (code);
7965
7966 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7967 && TREE_CODE_LENGTH (code) == 1);
7968
7969 arg0 = op0;
7970 if (arg0)
7971 {
7972 if (code == NOP_EXPR || code == CONVERT_EXPR
7973 || code == FLOAT_EXPR || code == ABS_EXPR)
7974 {
7975 /* Don't use STRIP_NOPS, because signedness of argument type
7976 matters. */
7977 STRIP_SIGN_NOPS (arg0);
7978 }
7979 else
7980 {
7981 /* Strip any conversions that don't change the mode. This
7982 is safe for every expression, except for a comparison
7983 expression because its signedness is derived from its
7984 operands.
7985
7986 Note that this is done as an internal manipulation within
7987 the constant folder, in order to find the simplest
7988 representation of the arguments so that their form can be
7989 studied. In any cases, the appropriate type conversions
7990 should be put back in the tree that will get out of the
7991 constant folder. */
7992 STRIP_NOPS (arg0);
7993 }
7994 }
7995
7996 if (TREE_CODE_CLASS (code) == tcc_unary)
7997 {
7998 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7999 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8000 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
8001 else if (TREE_CODE (arg0) == COND_EXPR)
8002 {
8003 tree arg01 = TREE_OPERAND (arg0, 1);
8004 tree arg02 = TREE_OPERAND (arg0, 2);
8005 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8006 arg01 = fold_build1 (code, type, arg01);
8007 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8008 arg02 = fold_build1 (code, type, arg02);
8009 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8010 arg01, arg02);
8011
8012 /* If this was a conversion, and all we did was to move into
8013 inside the COND_EXPR, bring it back out. But leave it if
8014 it is a conversion from integer to integer and the
8015 result precision is no wider than a word since such a
8016 conversion is cheap and may be optimized away by combine,
8017 while it couldn't if it were outside the COND_EXPR. Then return
8018 so we don't get into an infinite recursion loop taking the
8019 conversion out and then back in. */
8020
8021 if ((code == NOP_EXPR || code == CONVERT_EXPR
8022 || code == NON_LVALUE_EXPR)
8023 && TREE_CODE (tem) == COND_EXPR
8024 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8025 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8026 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8027 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8028 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8029 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8030 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8031 && (INTEGRAL_TYPE_P
8032 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8033 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8034 || flag_syntax_only))
8035 tem = build1 (code, type,
8036 build3 (COND_EXPR,
8037 TREE_TYPE (TREE_OPERAND
8038 (TREE_OPERAND (tem, 1), 0)),
8039 TREE_OPERAND (tem, 0),
8040 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8041 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8042 return tem;
8043 }
8044 else if (COMPARISON_CLASS_P (arg0))
8045 {
8046 if (TREE_CODE (type) == BOOLEAN_TYPE)
8047 {
8048 arg0 = copy_node (arg0);
8049 TREE_TYPE (arg0) = type;
8050 return arg0;
8051 }
8052 else if (TREE_CODE (type) != INTEGER_TYPE)
8053 return fold_build3 (COND_EXPR, type, arg0,
8054 fold_build1 (code, type,
8055 integer_one_node),
8056 fold_build1 (code, type,
8057 integer_zero_node));
8058 }
8059 }
8060
8061 switch (code)
8062 {
8063 case NOP_EXPR:
8064 case FLOAT_EXPR:
8065 case CONVERT_EXPR:
8066 case FIX_TRUNC_EXPR:
8067 if (TREE_TYPE (op0) == type)
8068 return op0;
8069
8070 /* If we have (type) (a CMP b) and type is an integral type, return
8071 new expression involving the new type. */
8072 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8073 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8074 TREE_OPERAND (op0, 1));
8075
8076 /* Handle cases of two conversions in a row. */
8077 if (TREE_CODE (op0) == NOP_EXPR
8078 || TREE_CODE (op0) == CONVERT_EXPR)
8079 {
8080 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8081 tree inter_type = TREE_TYPE (op0);
8082 int inside_int = INTEGRAL_TYPE_P (inside_type);
8083 int inside_ptr = POINTER_TYPE_P (inside_type);
8084 int inside_float = FLOAT_TYPE_P (inside_type);
8085 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8086 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8087 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8088 int inter_int = INTEGRAL_TYPE_P (inter_type);
8089 int inter_ptr = POINTER_TYPE_P (inter_type);
8090 int inter_float = FLOAT_TYPE_P (inter_type);
8091 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8092 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8093 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8094 int final_int = INTEGRAL_TYPE_P (type);
8095 int final_ptr = POINTER_TYPE_P (type);
8096 int final_float = FLOAT_TYPE_P (type);
8097 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8098 unsigned int final_prec = TYPE_PRECISION (type);
8099 int final_unsignedp = TYPE_UNSIGNED (type);
8100
8101 /* In addition to the cases of two conversions in a row
8102 handled below, if we are converting something to its own
8103 type via an object of identical or wider precision, neither
8104 conversion is needed. */
8105 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8106 && (((inter_int || inter_ptr) && final_int)
8107 || (inter_float && final_float))
8108 && inter_prec >= final_prec)
8109 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8110
8111 /* Likewise, if the intermediate and final types are either both
8112 float or both integer, we don't need the middle conversion if
8113 it is wider than the final type and doesn't change the signedness
8114 (for integers). Avoid this if the final type is a pointer
8115 since then we sometimes need the inner conversion. Likewise if
8116 the outer has a precision not equal to the size of its mode. */
8117 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
8118 || (inter_float && inside_float)
8119 || (inter_vec && inside_vec))
8120 && inter_prec >= inside_prec
8121 && (inter_float || inter_vec
8122 || inter_unsignedp == inside_unsignedp)
8123 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8124 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8125 && ! final_ptr
8126 && (! final_vec || inter_prec == inside_prec))
8127 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8128
8129 /* If we have a sign-extension of a zero-extended value, we can
8130 replace that by a single zero-extension. */
8131 if (inside_int && inter_int && final_int
8132 && inside_prec < inter_prec && inter_prec < final_prec
8133 && inside_unsignedp && !inter_unsignedp)
8134 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8135
8136 /* Two conversions in a row are not needed unless:
8137 - some conversion is floating-point (overstrict for now), or
8138 - some conversion is a vector (overstrict for now), or
8139 - the intermediate type is narrower than both initial and
8140 final, or
8141 - the intermediate type and innermost type differ in signedness,
8142 and the outermost type is wider than the intermediate, or
8143 - the initial type is a pointer type and the precisions of the
8144 intermediate and final types differ, or
8145 - the final type is a pointer type and the precisions of the
8146 initial and intermediate types differ.
8147 - the final type is a pointer type and the initial type not
8148 - the initial type is a pointer to an array and the final type
8149 not. */
8150 if (! inside_float && ! inter_float && ! final_float
8151 && ! inside_vec && ! inter_vec && ! final_vec
8152 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8153 && ! (inside_int && inter_int
8154 && inter_unsignedp != inside_unsignedp
8155 && inter_prec < final_prec)
8156 && ((inter_unsignedp && inter_prec > inside_prec)
8157 == (final_unsignedp && final_prec > inter_prec))
8158 && ! (inside_ptr && inter_prec != final_prec)
8159 && ! (final_ptr && inside_prec != inter_prec)
8160 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8161 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8162 && final_ptr == inside_ptr
8163 && ! (inside_ptr
8164 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
8165 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
8166 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8167 }
8168
8169 /* Handle (T *)&A.B.C for A being of type T and B and C
8170 living at offset zero. This occurs frequently in
8171 C++ upcasting and then accessing the base. */
8172 if (TREE_CODE (op0) == ADDR_EXPR
8173 && POINTER_TYPE_P (type)
8174 && handled_component_p (TREE_OPERAND (op0, 0)))
8175 {
8176 HOST_WIDE_INT bitsize, bitpos;
8177 tree offset;
8178 enum machine_mode mode;
8179 int unsignedp, volatilep;
8180 tree base = TREE_OPERAND (op0, 0);
8181 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8182 &mode, &unsignedp, &volatilep, false);
8183 /* If the reference was to a (constant) zero offset, we can use
8184 the address of the base if it has the same base type
8185 as the result type. */
8186 if (! offset && bitpos == 0
8187 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8188 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8189 return fold_convert (type, fold_addr_expr (base));
8190 }
8191
8192 if ((TREE_CODE (op0) == MODIFY_EXPR
8193 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
8194 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
8195 /* Detect assigning a bitfield. */
8196 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
8197 && DECL_BIT_FIELD
8198 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
8199 {
8200 /* Don't leave an assignment inside a conversion
8201 unless assigning a bitfield. */
8202 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
8203 /* First do the assignment, then return converted constant. */
8204 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8205 TREE_NO_WARNING (tem) = 1;
8206 TREE_USED (tem) = 1;
8207 return tem;
8208 }
8209
8210 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8211 constants (if x has signed type, the sign bit cannot be set
8212 in c). This folds extension into the BIT_AND_EXPR. */
8213 if (INTEGRAL_TYPE_P (type)
8214 && TREE_CODE (type) != BOOLEAN_TYPE
8215 && TREE_CODE (op0) == BIT_AND_EXPR
8216 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8217 {
8218 tree and = op0;
8219 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8220 int change = 0;
8221
8222 if (TYPE_UNSIGNED (TREE_TYPE (and))
8223 || (TYPE_PRECISION (type)
8224 <= TYPE_PRECISION (TREE_TYPE (and))))
8225 change = 1;
8226 else if (TYPE_PRECISION (TREE_TYPE (and1))
8227 <= HOST_BITS_PER_WIDE_INT
8228 && host_integerp (and1, 1))
8229 {
8230 unsigned HOST_WIDE_INT cst;
8231
8232 cst = tree_low_cst (and1, 1);
8233 cst &= (HOST_WIDE_INT) -1
8234 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8235 change = (cst == 0);
8236 #ifdef LOAD_EXTEND_OP
8237 if (change
8238 && !flag_syntax_only
8239 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8240 == ZERO_EXTEND))
8241 {
8242 tree uns = unsigned_type_for (TREE_TYPE (and0));
8243 and0 = fold_convert (uns, and0);
8244 and1 = fold_convert (uns, and1);
8245 }
8246 #endif
8247 }
8248 if (change)
8249 {
8250 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8251 TREE_INT_CST_HIGH (and1), 0,
8252 TREE_OVERFLOW (and1));
8253 return fold_build2 (BIT_AND_EXPR, type,
8254 fold_convert (type, and0), tem);
8255 }
8256 }
8257
8258 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8259 when one of the new casts will fold away. Conservatively we assume
8260 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8261 if (POINTER_TYPE_P (type)
8262 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8263 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8264 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8265 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8266 {
8267 tree arg00 = TREE_OPERAND (arg0, 0);
8268 tree arg01 = TREE_OPERAND (arg0, 1);
8269
8270 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8271 fold_convert (sizetype, arg01));
8272 }
8273
8274 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8275 of the same precision, and X is an integer type not narrower than
8276 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8277 if (INTEGRAL_TYPE_P (type)
8278 && TREE_CODE (op0) == BIT_NOT_EXPR
8279 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8280 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
8281 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
8282 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8283 {
8284 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8285 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8286 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8287 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8288 }
8289
8290 tem = fold_convert_const (code, type, op0);
8291 return tem ? tem : NULL_TREE;
8292
8293 case FIXED_CONVERT_EXPR:
8294 tem = fold_convert_const (code, type, arg0);
8295 return tem ? tem : NULL_TREE;
8296
8297 case VIEW_CONVERT_EXPR:
8298 if (TREE_TYPE (op0) == type)
8299 return op0;
8300 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8301 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8302 return fold_view_convert_expr (type, op0);
8303
8304 case NEGATE_EXPR:
8305 tem = fold_negate_expr (arg0);
8306 if (tem)
8307 return fold_convert (type, tem);
8308 return NULL_TREE;
8309
8310 case ABS_EXPR:
8311 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8312 return fold_abs_const (arg0, type);
8313 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8314 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8315 /* Convert fabs((double)float) into (double)fabsf(float). */
8316 else if (TREE_CODE (arg0) == NOP_EXPR
8317 && TREE_CODE (type) == REAL_TYPE)
8318 {
8319 tree targ0 = strip_float_extensions (arg0);
8320 if (targ0 != arg0)
8321 return fold_convert (type, fold_build1 (ABS_EXPR,
8322 TREE_TYPE (targ0),
8323 targ0));
8324 }
8325 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8326 else if (TREE_CODE (arg0) == ABS_EXPR)
8327 return arg0;
8328 else if (tree_expr_nonnegative_p (arg0))
8329 return arg0;
8330
8331 /* Strip sign ops from argument. */
8332 if (TREE_CODE (type) == REAL_TYPE)
8333 {
8334 tem = fold_strip_sign_ops (arg0);
8335 if (tem)
8336 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8337 }
8338 return NULL_TREE;
8339
8340 case CONJ_EXPR:
8341 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8342 return fold_convert (type, arg0);
8343 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8344 {
8345 tree itype = TREE_TYPE (type);
8346 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8347 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8348 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8349 }
8350 if (TREE_CODE (arg0) == COMPLEX_CST)
8351 {
8352 tree itype = TREE_TYPE (type);
8353 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8354 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8355 return build_complex (type, rpart, negate_expr (ipart));
8356 }
8357 if (TREE_CODE (arg0) == CONJ_EXPR)
8358 return fold_convert (type, TREE_OPERAND (arg0, 0));
8359 return NULL_TREE;
8360
8361 case BIT_NOT_EXPR:
8362 if (TREE_CODE (arg0) == INTEGER_CST)
8363 return fold_not_const (arg0, type);
8364 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8365 return TREE_OPERAND (arg0, 0);
8366 /* Convert ~ (-A) to A - 1. */
8367 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8368 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
8369 build_int_cst (type, 1));
8370 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8371 else if (INTEGRAL_TYPE_P (type)
8372 && ((TREE_CODE (arg0) == MINUS_EXPR
8373 && integer_onep (TREE_OPERAND (arg0, 1)))
8374 || (TREE_CODE (arg0) == PLUS_EXPR
8375 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8376 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8377 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8378 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8379 && (tem = fold_unary (BIT_NOT_EXPR, type,
8380 fold_convert (type,
8381 TREE_OPERAND (arg0, 0)))))
8382 return fold_build2 (BIT_XOR_EXPR, type, tem,
8383 fold_convert (type, TREE_OPERAND (arg0, 1)));
8384 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8385 && (tem = fold_unary (BIT_NOT_EXPR, type,
8386 fold_convert (type,
8387 TREE_OPERAND (arg0, 1)))))
8388 return fold_build2 (BIT_XOR_EXPR, type,
8389 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8390
8391 return NULL_TREE;
8392
8393 case TRUTH_NOT_EXPR:
8394 /* The argument to invert_truthvalue must have Boolean type. */
8395 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8396 arg0 = fold_convert (boolean_type_node, arg0);
8397
8398 /* Note that the operand of this must be an int
8399 and its values must be 0 or 1.
8400 ("true" is a fixed value perhaps depending on the language,
8401 but we don't handle values other than 1 correctly yet.) */
8402 tem = fold_truth_not_expr (arg0);
8403 if (!tem)
8404 return NULL_TREE;
8405 return fold_convert (type, tem);
8406
8407 case REALPART_EXPR:
8408 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8409 return fold_convert (type, arg0);
8410 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8411 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8412 TREE_OPERAND (arg0, 1));
8413 if (TREE_CODE (arg0) == COMPLEX_CST)
8414 return fold_convert (type, TREE_REALPART (arg0));
8415 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8416 {
8417 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8418 tem = fold_build2 (TREE_CODE (arg0), itype,
8419 fold_build1 (REALPART_EXPR, itype,
8420 TREE_OPERAND (arg0, 0)),
8421 fold_build1 (REALPART_EXPR, itype,
8422 TREE_OPERAND (arg0, 1)));
8423 return fold_convert (type, tem);
8424 }
8425 if (TREE_CODE (arg0) == CONJ_EXPR)
8426 {
8427 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8428 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8429 return fold_convert (type, tem);
8430 }
8431 if (TREE_CODE (arg0) == CALL_EXPR)
8432 {
8433 tree fn = get_callee_fndecl (arg0);
8434 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8435 switch (DECL_FUNCTION_CODE (fn))
8436 {
8437 CASE_FLT_FN (BUILT_IN_CEXPI):
8438 fn = mathfn_built_in (type, BUILT_IN_COS);
8439 if (fn)
8440 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8441 break;
8442
8443 default:
8444 break;
8445 }
8446 }
8447 return NULL_TREE;
8448
8449 case IMAGPART_EXPR:
8450 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8451 return fold_convert (type, integer_zero_node);
8452 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8453 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8454 TREE_OPERAND (arg0, 0));
8455 if (TREE_CODE (arg0) == COMPLEX_CST)
8456 return fold_convert (type, TREE_IMAGPART (arg0));
8457 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8458 {
8459 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8460 tem = fold_build2 (TREE_CODE (arg0), itype,
8461 fold_build1 (IMAGPART_EXPR, itype,
8462 TREE_OPERAND (arg0, 0)),
8463 fold_build1 (IMAGPART_EXPR, itype,
8464 TREE_OPERAND (arg0, 1)));
8465 return fold_convert (type, tem);
8466 }
8467 if (TREE_CODE (arg0) == CONJ_EXPR)
8468 {
8469 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8470 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8471 return fold_convert (type, negate_expr (tem));
8472 }
8473 if (TREE_CODE (arg0) == CALL_EXPR)
8474 {
8475 tree fn = get_callee_fndecl (arg0);
8476 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8477 switch (DECL_FUNCTION_CODE (fn))
8478 {
8479 CASE_FLT_FN (BUILT_IN_CEXPI):
8480 fn = mathfn_built_in (type, BUILT_IN_SIN);
8481 if (fn)
8482 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8483 break;
8484
8485 default:
8486 break;
8487 }
8488 }
8489 return NULL_TREE;
8490
8491 default:
8492 return NULL_TREE;
8493 } /* switch (code) */
8494 }
8495
8496 /* Fold a binary expression of code CODE and type TYPE with operands
8497 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8498 Return the folded expression if folding is successful. Otherwise,
8499 return NULL_TREE. */
8500
8501 static tree
8502 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8503 {
8504 enum tree_code compl_code;
8505
8506 if (code == MIN_EXPR)
8507 compl_code = MAX_EXPR;
8508 else if (code == MAX_EXPR)
8509 compl_code = MIN_EXPR;
8510 else
8511 gcc_unreachable ();
8512
8513 /* MIN (MAX (a, b), b) == b. */
8514 if (TREE_CODE (op0) == compl_code
8515 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8516 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8517
8518 /* MIN (MAX (b, a), b) == b. */
8519 if (TREE_CODE (op0) == compl_code
8520 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8521 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8522 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8523
8524 /* MIN (a, MAX (a, b)) == a. */
8525 if (TREE_CODE (op1) == compl_code
8526 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8527 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8528 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8529
8530 /* MIN (a, MAX (b, a)) == a. */
8531 if (TREE_CODE (op1) == compl_code
8532 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8533 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8534 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8535
8536 return NULL_TREE;
8537 }
8538
8539 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8540 by changing CODE to reduce the magnitude of constants involved in
8541 ARG0 of the comparison.
8542 Returns a canonicalized comparison tree if a simplification was
8543 possible, otherwise returns NULL_TREE.
8544 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8545 valid if signed overflow is undefined. */
8546
8547 static tree
8548 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8549 tree arg0, tree arg1,
8550 bool *strict_overflow_p)
8551 {
8552 enum tree_code code0 = TREE_CODE (arg0);
8553 tree t, cst0 = NULL_TREE;
8554 int sgn0;
8555 bool swap = false;
8556
8557 /* Match A +- CST code arg1 and CST code arg1. */
8558 if (!(((code0 == MINUS_EXPR
8559 || code0 == PLUS_EXPR)
8560 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8561 || code0 == INTEGER_CST))
8562 return NULL_TREE;
8563
8564 /* Identify the constant in arg0 and its sign. */
8565 if (code0 == INTEGER_CST)
8566 cst0 = arg0;
8567 else
8568 cst0 = TREE_OPERAND (arg0, 1);
8569 sgn0 = tree_int_cst_sgn (cst0);
8570
8571 /* Overflowed constants and zero will cause problems. */
8572 if (integer_zerop (cst0)
8573 || TREE_OVERFLOW (cst0))
8574 return NULL_TREE;
8575
8576 /* See if we can reduce the magnitude of the constant in
8577 arg0 by changing the comparison code. */
8578 if (code0 == INTEGER_CST)
8579 {
8580 /* CST <= arg1 -> CST-1 < arg1. */
8581 if (code == LE_EXPR && sgn0 == 1)
8582 code = LT_EXPR;
8583 /* -CST < arg1 -> -CST-1 <= arg1. */
8584 else if (code == LT_EXPR && sgn0 == -1)
8585 code = LE_EXPR;
8586 /* CST > arg1 -> CST-1 >= arg1. */
8587 else if (code == GT_EXPR && sgn0 == 1)
8588 code = GE_EXPR;
8589 /* -CST >= arg1 -> -CST-1 > arg1. */
8590 else if (code == GE_EXPR && sgn0 == -1)
8591 code = GT_EXPR;
8592 else
8593 return NULL_TREE;
8594 /* arg1 code' CST' might be more canonical. */
8595 swap = true;
8596 }
8597 else
8598 {
8599 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8600 if (code == LT_EXPR
8601 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8602 code = LE_EXPR;
8603 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8604 else if (code == GT_EXPR
8605 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8606 code = GE_EXPR;
8607 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8608 else if (code == LE_EXPR
8609 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8610 code = LT_EXPR;
8611 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8612 else if (code == GE_EXPR
8613 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8614 code = GT_EXPR;
8615 else
8616 return NULL_TREE;
8617 *strict_overflow_p = true;
8618 }
8619
8620 /* Now build the constant reduced in magnitude. */
8621 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8622 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8623 if (code0 != INTEGER_CST)
8624 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8625
8626 /* If swapping might yield to a more canonical form, do so. */
8627 if (swap)
8628 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8629 else
8630 return fold_build2 (code, type, t, arg1);
8631 }
8632
8633 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8634 overflow further. Try to decrease the magnitude of constants involved
8635 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8636 and put sole constants at the second argument position.
8637 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8638
8639 static tree
8640 maybe_canonicalize_comparison (enum tree_code code, tree type,
8641 tree arg0, tree arg1)
8642 {
8643 tree t;
8644 bool strict_overflow_p;
8645 const char * const warnmsg = G_("assuming signed overflow does not occur "
8646 "when reducing constant in comparison");
8647
8648 /* In principle pointers also have undefined overflow behavior,
8649 but that causes problems elsewhere. */
8650 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8651 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8652 return NULL_TREE;
8653
8654 /* Try canonicalization by simplifying arg0. */
8655 strict_overflow_p = false;
8656 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8657 &strict_overflow_p);
8658 if (t)
8659 {
8660 if (strict_overflow_p)
8661 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8662 return t;
8663 }
8664
8665 /* Try canonicalization by simplifying arg1 using the swapped
8666 comparison. */
8667 code = swap_tree_comparison (code);
8668 strict_overflow_p = false;
8669 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8670 &strict_overflow_p);
8671 if (t && strict_overflow_p)
8672 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8673 return t;
8674 }
8675
8676 /* Subroutine of fold_binary. This routine performs all of the
8677 transformations that are common to the equality/inequality
8678 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8679 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8680 fold_binary should call fold_binary. Fold a comparison with
8681 tree code CODE and type TYPE with operands OP0 and OP1. Return
8682 the folded comparison or NULL_TREE. */
8683
8684 static tree
8685 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8686 {
8687 tree arg0, arg1, tem;
8688
8689 arg0 = op0;
8690 arg1 = op1;
8691
8692 STRIP_SIGN_NOPS (arg0);
8693 STRIP_SIGN_NOPS (arg1);
8694
8695 tem = fold_relational_const (code, type, arg0, arg1);
8696 if (tem != NULL_TREE)
8697 return tem;
8698
8699 /* If one arg is a real or integer constant, put it last. */
8700 if (tree_swap_operands_p (arg0, arg1, true))
8701 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8702
8703 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8704 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8705 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8706 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8707 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8708 && (TREE_CODE (arg1) == INTEGER_CST
8709 && !TREE_OVERFLOW (arg1)))
8710 {
8711 tree const1 = TREE_OPERAND (arg0, 1);
8712 tree const2 = arg1;
8713 tree variable = TREE_OPERAND (arg0, 0);
8714 tree lhs;
8715 int lhs_add;
8716 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8717
8718 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8719 TREE_TYPE (arg1), const2, const1);
8720
8721 /* If the constant operation overflowed this can be
8722 simplified as a comparison against INT_MAX/INT_MIN. */
8723 if (TREE_CODE (lhs) == INTEGER_CST
8724 && TREE_OVERFLOW (lhs))
8725 {
8726 int const1_sgn = tree_int_cst_sgn (const1);
8727 enum tree_code code2 = code;
8728
8729 /* Get the sign of the constant on the lhs if the
8730 operation were VARIABLE + CONST1. */
8731 if (TREE_CODE (arg0) == MINUS_EXPR)
8732 const1_sgn = -const1_sgn;
8733
8734 /* The sign of the constant determines if we overflowed
8735 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8736 Canonicalize to the INT_MIN overflow by swapping the comparison
8737 if necessary. */
8738 if (const1_sgn == -1)
8739 code2 = swap_tree_comparison (code);
8740
8741 /* We now can look at the canonicalized case
8742 VARIABLE + 1 CODE2 INT_MIN
8743 and decide on the result. */
8744 if (code2 == LT_EXPR
8745 || code2 == LE_EXPR
8746 || code2 == EQ_EXPR)
8747 return omit_one_operand (type, boolean_false_node, variable);
8748 else if (code2 == NE_EXPR
8749 || code2 == GE_EXPR
8750 || code2 == GT_EXPR)
8751 return omit_one_operand (type, boolean_true_node, variable);
8752 }
8753
8754 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8755 && (TREE_CODE (lhs) != INTEGER_CST
8756 || !TREE_OVERFLOW (lhs)))
8757 {
8758 fold_overflow_warning (("assuming signed overflow does not occur "
8759 "when changing X +- C1 cmp C2 to "
8760 "X cmp C1 +- C2"),
8761 WARN_STRICT_OVERFLOW_COMPARISON);
8762 return fold_build2 (code, type, variable, lhs);
8763 }
8764 }
8765
8766 /* For comparisons of pointers we can decompose it to a compile time
8767 comparison of the base objects and the offsets into the object.
8768 This requires at least one operand being an ADDR_EXPR to do more
8769 than the operand_equal_p test below. */
8770 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8771 && (TREE_CODE (arg0) == ADDR_EXPR
8772 || TREE_CODE (arg1) == ADDR_EXPR))
8773 {
8774 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8775 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8776 enum machine_mode mode;
8777 int volatilep, unsignedp;
8778 bool indirect_base0 = false;
8779
8780 /* Get base and offset for the access. Strip ADDR_EXPR for
8781 get_inner_reference, but put it back by stripping INDIRECT_REF
8782 off the base object if possible. */
8783 base0 = arg0;
8784 if (TREE_CODE (arg0) == ADDR_EXPR)
8785 {
8786 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8787 &bitsize, &bitpos0, &offset0, &mode,
8788 &unsignedp, &volatilep, false);
8789 if (TREE_CODE (base0) == INDIRECT_REF)
8790 base0 = TREE_OPERAND (base0, 0);
8791 else
8792 indirect_base0 = true;
8793 }
8794
8795 base1 = arg1;
8796 if (TREE_CODE (arg1) == ADDR_EXPR)
8797 {
8798 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8799 &bitsize, &bitpos1, &offset1, &mode,
8800 &unsignedp, &volatilep, false);
8801 /* We have to make sure to have an indirect/non-indirect base1
8802 just the same as we did for base0. */
8803 if (TREE_CODE (base1) == INDIRECT_REF
8804 && !indirect_base0)
8805 base1 = TREE_OPERAND (base1, 0);
8806 else if (!indirect_base0)
8807 base1 = NULL_TREE;
8808 }
8809 else if (indirect_base0)
8810 base1 = NULL_TREE;
8811
8812 /* If we have equivalent bases we might be able to simplify. */
8813 if (base0 && base1
8814 && operand_equal_p (base0, base1, 0))
8815 {
8816 /* We can fold this expression to a constant if the non-constant
8817 offset parts are equal. */
8818 if (offset0 == offset1
8819 || (offset0 && offset1
8820 && operand_equal_p (offset0, offset1, 0)))
8821 {
8822 switch (code)
8823 {
8824 case EQ_EXPR:
8825 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8826 case NE_EXPR:
8827 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8828 case LT_EXPR:
8829 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8830 case LE_EXPR:
8831 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8832 case GE_EXPR:
8833 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8834 case GT_EXPR:
8835 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8836 default:;
8837 }
8838 }
8839 /* We can simplify the comparison to a comparison of the variable
8840 offset parts if the constant offset parts are equal.
8841 Be careful to use signed size type here because otherwise we
8842 mess with array offsets in the wrong way. This is possible
8843 because pointer arithmetic is restricted to retain within an
8844 object and overflow on pointer differences is undefined as of
8845 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8846 else if (bitpos0 == bitpos1)
8847 {
8848 tree signed_size_type_node;
8849 signed_size_type_node = signed_type_for (size_type_node);
8850
8851 /* By converting to signed size type we cover middle-end pointer
8852 arithmetic which operates on unsigned pointer types of size
8853 type size and ARRAY_REF offsets which are properly sign or
8854 zero extended from their type in case it is narrower than
8855 size type. */
8856 if (offset0 == NULL_TREE)
8857 offset0 = build_int_cst (signed_size_type_node, 0);
8858 else
8859 offset0 = fold_convert (signed_size_type_node, offset0);
8860 if (offset1 == NULL_TREE)
8861 offset1 = build_int_cst (signed_size_type_node, 0);
8862 else
8863 offset1 = fold_convert (signed_size_type_node, offset1);
8864
8865 return fold_build2 (code, type, offset0, offset1);
8866 }
8867 }
8868 }
8869
8870 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8871 same object, then we can fold this to a comparison of the two offsets in
8872 signed size type. This is possible because pointer arithmetic is
8873 restricted to retain within an object and overflow on pointer differences
8874 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8875
8876 We check flag_wrapv directly because pointers types are unsigned,
8877 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8878 normally what we want to avoid certain odd overflow cases, but
8879 not here. */
8880 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8881 && !flag_wrapv
8882 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8883 {
8884 tree base0, offset0, base1, offset1;
8885
8886 if (extract_array_ref (arg0, &base0, &offset0)
8887 && extract_array_ref (arg1, &base1, &offset1)
8888 && operand_equal_p (base0, base1, 0))
8889 {
8890 tree signed_size_type_node;
8891 signed_size_type_node = signed_type_for (size_type_node);
8892
8893 /* By converting to signed size type we cover middle-end pointer
8894 arithmetic which operates on unsigned pointer types of size
8895 type size and ARRAY_REF offsets which are properly sign or
8896 zero extended from their type in case it is narrower than
8897 size type. */
8898 if (offset0 == NULL_TREE)
8899 offset0 = build_int_cst (signed_size_type_node, 0);
8900 else
8901 offset0 = fold_convert (signed_size_type_node, offset0);
8902 if (offset1 == NULL_TREE)
8903 offset1 = build_int_cst (signed_size_type_node, 0);
8904 else
8905 offset1 = fold_convert (signed_size_type_node, offset1);
8906
8907 return fold_build2 (code, type, offset0, offset1);
8908 }
8909 }
8910
8911 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8912 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8913 the resulting offset is smaller in absolute value than the
8914 original one. */
8915 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8916 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8917 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8918 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8919 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8920 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8921 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8922 {
8923 tree const1 = TREE_OPERAND (arg0, 1);
8924 tree const2 = TREE_OPERAND (arg1, 1);
8925 tree variable1 = TREE_OPERAND (arg0, 0);
8926 tree variable2 = TREE_OPERAND (arg1, 0);
8927 tree cst;
8928 const char * const warnmsg = G_("assuming signed overflow does not "
8929 "occur when combining constants around "
8930 "a comparison");
8931
8932 /* Put the constant on the side where it doesn't overflow and is
8933 of lower absolute value than before. */
8934 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8935 ? MINUS_EXPR : PLUS_EXPR,
8936 const2, const1, 0);
8937 if (!TREE_OVERFLOW (cst)
8938 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8939 {
8940 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8941 return fold_build2 (code, type,
8942 variable1,
8943 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8944 variable2, cst));
8945 }
8946
8947 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8948 ? MINUS_EXPR : PLUS_EXPR,
8949 const1, const2, 0);
8950 if (!TREE_OVERFLOW (cst)
8951 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8952 {
8953 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8954 return fold_build2 (code, type,
8955 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8956 variable1, cst),
8957 variable2);
8958 }
8959 }
8960
8961 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8962 signed arithmetic case. That form is created by the compiler
8963 often enough for folding it to be of value. One example is in
8964 computing loop trip counts after Operator Strength Reduction. */
8965 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8966 && TREE_CODE (arg0) == MULT_EXPR
8967 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8968 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8969 && integer_zerop (arg1))
8970 {
8971 tree const1 = TREE_OPERAND (arg0, 1);
8972 tree const2 = arg1; /* zero */
8973 tree variable1 = TREE_OPERAND (arg0, 0);
8974 enum tree_code cmp_code = code;
8975
8976 gcc_assert (!integer_zerop (const1));
8977
8978 fold_overflow_warning (("assuming signed overflow does not occur when "
8979 "eliminating multiplication in comparison "
8980 "with zero"),
8981 WARN_STRICT_OVERFLOW_COMPARISON);
8982
8983 /* If const1 is negative we swap the sense of the comparison. */
8984 if (tree_int_cst_sgn (const1) < 0)
8985 cmp_code = swap_tree_comparison (cmp_code);
8986
8987 return fold_build2 (cmp_code, type, variable1, const2);
8988 }
8989
8990 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8991 if (tem)
8992 return tem;
8993
8994 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8995 {
8996 tree targ0 = strip_float_extensions (arg0);
8997 tree targ1 = strip_float_extensions (arg1);
8998 tree newtype = TREE_TYPE (targ0);
8999
9000 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9001 newtype = TREE_TYPE (targ1);
9002
9003 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9004 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9005 return fold_build2 (code, type, fold_convert (newtype, targ0),
9006 fold_convert (newtype, targ1));
9007
9008 /* (-a) CMP (-b) -> b CMP a */
9009 if (TREE_CODE (arg0) == NEGATE_EXPR
9010 && TREE_CODE (arg1) == NEGATE_EXPR)
9011 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9012 TREE_OPERAND (arg0, 0));
9013
9014 if (TREE_CODE (arg1) == REAL_CST)
9015 {
9016 REAL_VALUE_TYPE cst;
9017 cst = TREE_REAL_CST (arg1);
9018
9019 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9020 if (TREE_CODE (arg0) == NEGATE_EXPR)
9021 return fold_build2 (swap_tree_comparison (code), type,
9022 TREE_OPERAND (arg0, 0),
9023 build_real (TREE_TYPE (arg1),
9024 REAL_VALUE_NEGATE (cst)));
9025
9026 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9027 /* a CMP (-0) -> a CMP 0 */
9028 if (REAL_VALUE_MINUS_ZERO (cst))
9029 return fold_build2 (code, type, arg0,
9030 build_real (TREE_TYPE (arg1), dconst0));
9031
9032 /* x != NaN is always true, other ops are always false. */
9033 if (REAL_VALUE_ISNAN (cst)
9034 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9035 {
9036 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9037 return omit_one_operand (type, tem, arg0);
9038 }
9039
9040 /* Fold comparisons against infinity. */
9041 if (REAL_VALUE_ISINF (cst))
9042 {
9043 tem = fold_inf_compare (code, type, arg0, arg1);
9044 if (tem != NULL_TREE)
9045 return tem;
9046 }
9047 }
9048
9049 /* If this is a comparison of a real constant with a PLUS_EXPR
9050 or a MINUS_EXPR of a real constant, we can convert it into a
9051 comparison with a revised real constant as long as no overflow
9052 occurs when unsafe_math_optimizations are enabled. */
9053 if (flag_unsafe_math_optimizations
9054 && TREE_CODE (arg1) == REAL_CST
9055 && (TREE_CODE (arg0) == PLUS_EXPR
9056 || TREE_CODE (arg0) == MINUS_EXPR)
9057 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9058 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9059 ? MINUS_EXPR : PLUS_EXPR,
9060 arg1, TREE_OPERAND (arg0, 1), 0))
9061 && !TREE_OVERFLOW (tem))
9062 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9063
9064 /* Likewise, we can simplify a comparison of a real constant with
9065 a MINUS_EXPR whose first operand is also a real constant, i.e.
9066 (c1 - x) < c2 becomes x > c1-c2. */
9067 if (flag_unsafe_math_optimizations
9068 && TREE_CODE (arg1) == REAL_CST
9069 && TREE_CODE (arg0) == MINUS_EXPR
9070 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9071 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9072 arg1, 0))
9073 && !TREE_OVERFLOW (tem))
9074 return fold_build2 (swap_tree_comparison (code), type,
9075 TREE_OPERAND (arg0, 1), tem);
9076
9077 /* Fold comparisons against built-in math functions. */
9078 if (TREE_CODE (arg1) == REAL_CST
9079 && flag_unsafe_math_optimizations
9080 && ! flag_errno_math)
9081 {
9082 enum built_in_function fcode = builtin_mathfn_code (arg0);
9083
9084 if (fcode != END_BUILTINS)
9085 {
9086 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9087 if (tem != NULL_TREE)
9088 return tem;
9089 }
9090 }
9091 }
9092
9093 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9094 && (TREE_CODE (arg0) == NOP_EXPR
9095 || TREE_CODE (arg0) == CONVERT_EXPR))
9096 {
9097 /* If we are widening one operand of an integer comparison,
9098 see if the other operand is similarly being widened. Perhaps we
9099 can do the comparison in the narrower type. */
9100 tem = fold_widened_comparison (code, type, arg0, arg1);
9101 if (tem)
9102 return tem;
9103
9104 /* Or if we are changing signedness. */
9105 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9106 if (tem)
9107 return tem;
9108 }
9109
9110 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9111 constant, we can simplify it. */
9112 if (TREE_CODE (arg1) == INTEGER_CST
9113 && (TREE_CODE (arg0) == MIN_EXPR
9114 || TREE_CODE (arg0) == MAX_EXPR)
9115 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9116 {
9117 tem = optimize_minmax_comparison (code, type, op0, op1);
9118 if (tem)
9119 return tem;
9120 }
9121
9122 /* Simplify comparison of something with itself. (For IEEE
9123 floating-point, we can only do some of these simplifications.) */
9124 if (operand_equal_p (arg0, arg1, 0))
9125 {
9126 switch (code)
9127 {
9128 case EQ_EXPR:
9129 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9130 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9131 return constant_boolean_node (1, type);
9132 break;
9133
9134 case GE_EXPR:
9135 case LE_EXPR:
9136 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9137 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9138 return constant_boolean_node (1, type);
9139 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9140
9141 case NE_EXPR:
9142 /* For NE, we can only do this simplification if integer
9143 or we don't honor IEEE floating point NaNs. */
9144 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9145 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9146 break;
9147 /* ... fall through ... */
9148 case GT_EXPR:
9149 case LT_EXPR:
9150 return constant_boolean_node (0, type);
9151 default:
9152 gcc_unreachable ();
9153 }
9154 }
9155
9156 /* If we are comparing an expression that just has comparisons
9157 of two integer values, arithmetic expressions of those comparisons,
9158 and constants, we can simplify it. There are only three cases
9159 to check: the two values can either be equal, the first can be
9160 greater, or the second can be greater. Fold the expression for
9161 those three values. Since each value must be 0 or 1, we have
9162 eight possibilities, each of which corresponds to the constant 0
9163 or 1 or one of the six possible comparisons.
9164
9165 This handles common cases like (a > b) == 0 but also handles
9166 expressions like ((x > y) - (y > x)) > 0, which supposedly
9167 occur in macroized code. */
9168
9169 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9170 {
9171 tree cval1 = 0, cval2 = 0;
9172 int save_p = 0;
9173
9174 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9175 /* Don't handle degenerate cases here; they should already
9176 have been handled anyway. */
9177 && cval1 != 0 && cval2 != 0
9178 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9179 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9180 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9181 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9182 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9183 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9184 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9185 {
9186 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9187 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9188
9189 /* We can't just pass T to eval_subst in case cval1 or cval2
9190 was the same as ARG1. */
9191
9192 tree high_result
9193 = fold_build2 (code, type,
9194 eval_subst (arg0, cval1, maxval,
9195 cval2, minval),
9196 arg1);
9197 tree equal_result
9198 = fold_build2 (code, type,
9199 eval_subst (arg0, cval1, maxval,
9200 cval2, maxval),
9201 arg1);
9202 tree low_result
9203 = fold_build2 (code, type,
9204 eval_subst (arg0, cval1, minval,
9205 cval2, maxval),
9206 arg1);
9207
9208 /* All three of these results should be 0 or 1. Confirm they are.
9209 Then use those values to select the proper code to use. */
9210
9211 if (TREE_CODE (high_result) == INTEGER_CST
9212 && TREE_CODE (equal_result) == INTEGER_CST
9213 && TREE_CODE (low_result) == INTEGER_CST)
9214 {
9215 /* Make a 3-bit mask with the high-order bit being the
9216 value for `>', the next for '=', and the low for '<'. */
9217 switch ((integer_onep (high_result) * 4)
9218 + (integer_onep (equal_result) * 2)
9219 + integer_onep (low_result))
9220 {
9221 case 0:
9222 /* Always false. */
9223 return omit_one_operand (type, integer_zero_node, arg0);
9224 case 1:
9225 code = LT_EXPR;
9226 break;
9227 case 2:
9228 code = EQ_EXPR;
9229 break;
9230 case 3:
9231 code = LE_EXPR;
9232 break;
9233 case 4:
9234 code = GT_EXPR;
9235 break;
9236 case 5:
9237 code = NE_EXPR;
9238 break;
9239 case 6:
9240 code = GE_EXPR;
9241 break;
9242 case 7:
9243 /* Always true. */
9244 return omit_one_operand (type, integer_one_node, arg0);
9245 }
9246
9247 if (save_p)
9248 return save_expr (build2 (code, type, cval1, cval2));
9249 return fold_build2 (code, type, cval1, cval2);
9250 }
9251 }
9252 }
9253
9254 /* Fold a comparison of the address of COMPONENT_REFs with the same
9255 type and component to a comparison of the address of the base
9256 object. In short, &x->a OP &y->a to x OP y and
9257 &x->a OP &y.a to x OP &y */
9258 if (TREE_CODE (arg0) == ADDR_EXPR
9259 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9260 && TREE_CODE (arg1) == ADDR_EXPR
9261 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9262 {
9263 tree cref0 = TREE_OPERAND (arg0, 0);
9264 tree cref1 = TREE_OPERAND (arg1, 0);
9265 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9266 {
9267 tree op0 = TREE_OPERAND (cref0, 0);
9268 tree op1 = TREE_OPERAND (cref1, 0);
9269 return fold_build2 (code, type,
9270 fold_addr_expr (op0),
9271 fold_addr_expr (op1));
9272 }
9273 }
9274
9275 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9276 into a single range test. */
9277 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9278 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9279 && TREE_CODE (arg1) == INTEGER_CST
9280 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9281 && !integer_zerop (TREE_OPERAND (arg0, 1))
9282 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9283 && !TREE_OVERFLOW (arg1))
9284 {
9285 tem = fold_div_compare (code, type, arg0, arg1);
9286 if (tem != NULL_TREE)
9287 return tem;
9288 }
9289
9290 /* Fold ~X op ~Y as Y op X. */
9291 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9292 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9293 {
9294 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9295 return fold_build2 (code, type,
9296 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9297 TREE_OPERAND (arg0, 0));
9298 }
9299
9300 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9301 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9302 && TREE_CODE (arg1) == INTEGER_CST)
9303 {
9304 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9305 return fold_build2 (swap_tree_comparison (code), type,
9306 TREE_OPERAND (arg0, 0),
9307 fold_build1 (BIT_NOT_EXPR, cmp_type,
9308 fold_convert (cmp_type, arg1)));
9309 }
9310
9311 return NULL_TREE;
9312 }
9313
9314
9315 /* Subroutine of fold_binary. Optimize complex multiplications of the
9316 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9317 argument EXPR represents the expression "z" of type TYPE. */
9318
9319 static tree
9320 fold_mult_zconjz (tree type, tree expr)
9321 {
9322 tree itype = TREE_TYPE (type);
9323 tree rpart, ipart, tem;
9324
9325 if (TREE_CODE (expr) == COMPLEX_EXPR)
9326 {
9327 rpart = TREE_OPERAND (expr, 0);
9328 ipart = TREE_OPERAND (expr, 1);
9329 }
9330 else if (TREE_CODE (expr) == COMPLEX_CST)
9331 {
9332 rpart = TREE_REALPART (expr);
9333 ipart = TREE_IMAGPART (expr);
9334 }
9335 else
9336 {
9337 expr = save_expr (expr);
9338 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9339 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9340 }
9341
9342 rpart = save_expr (rpart);
9343 ipart = save_expr (ipart);
9344 tem = fold_build2 (PLUS_EXPR, itype,
9345 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9346 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9347 return fold_build2 (COMPLEX_EXPR, type, tem,
9348 fold_convert (itype, integer_zero_node));
9349 }
9350
9351
9352 /* Fold a binary expression of code CODE and type TYPE with operands
9353 OP0 and OP1. Return the folded expression if folding is
9354 successful. Otherwise, return NULL_TREE. */
9355
9356 tree
9357 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9358 {
9359 enum tree_code_class kind = TREE_CODE_CLASS (code);
9360 tree arg0, arg1, tem;
9361 tree t1 = NULL_TREE;
9362 bool strict_overflow_p;
9363
9364 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9365 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9366 && TREE_CODE_LENGTH (code) == 2
9367 && op0 != NULL_TREE
9368 && op1 != NULL_TREE);
9369
9370 arg0 = op0;
9371 arg1 = op1;
9372
9373 /* Strip any conversions that don't change the mode. This is
9374 safe for every expression, except for a comparison expression
9375 because its signedness is derived from its operands. So, in
9376 the latter case, only strip conversions that don't change the
9377 signedness.
9378
9379 Note that this is done as an internal manipulation within the
9380 constant folder, in order to find the simplest representation
9381 of the arguments so that their form can be studied. In any
9382 cases, the appropriate type conversions should be put back in
9383 the tree that will get out of the constant folder. */
9384
9385 if (kind == tcc_comparison)
9386 {
9387 STRIP_SIGN_NOPS (arg0);
9388 STRIP_SIGN_NOPS (arg1);
9389 }
9390 else
9391 {
9392 STRIP_NOPS (arg0);
9393 STRIP_NOPS (arg1);
9394 }
9395
9396 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9397 constant but we can't do arithmetic on them. */
9398 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9399 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9400 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9401 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9402 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9403 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9404 {
9405 if (kind == tcc_binary)
9406 {
9407 /* Make sure type and arg0 have the same saturating flag. */
9408 gcc_assert (TYPE_SATURATING (type)
9409 == TYPE_SATURATING (TREE_TYPE (arg0)));
9410 tem = const_binop (code, arg0, arg1, 0);
9411 }
9412 else if (kind == tcc_comparison)
9413 tem = fold_relational_const (code, type, arg0, arg1);
9414 else
9415 tem = NULL_TREE;
9416
9417 if (tem != NULL_TREE)
9418 {
9419 if (TREE_TYPE (tem) != type)
9420 tem = fold_convert (type, tem);
9421 return tem;
9422 }
9423 }
9424
9425 /* If this is a commutative operation, and ARG0 is a constant, move it
9426 to ARG1 to reduce the number of tests below. */
9427 if (commutative_tree_code (code)
9428 && tree_swap_operands_p (arg0, arg1, true))
9429 return fold_build2 (code, type, op1, op0);
9430
9431 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9432
9433 First check for cases where an arithmetic operation is applied to a
9434 compound, conditional, or comparison operation. Push the arithmetic
9435 operation inside the compound or conditional to see if any folding
9436 can then be done. Convert comparison to conditional for this purpose.
9437 The also optimizes non-constant cases that used to be done in
9438 expand_expr.
9439
9440 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9441 one of the operands is a comparison and the other is a comparison, a
9442 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9443 code below would make the expression more complex. Change it to a
9444 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9445 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9446
9447 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9448 || code == EQ_EXPR || code == NE_EXPR)
9449 && ((truth_value_p (TREE_CODE (arg0))
9450 && (truth_value_p (TREE_CODE (arg1))
9451 || (TREE_CODE (arg1) == BIT_AND_EXPR
9452 && integer_onep (TREE_OPERAND (arg1, 1)))))
9453 || (truth_value_p (TREE_CODE (arg1))
9454 && (truth_value_p (TREE_CODE (arg0))
9455 || (TREE_CODE (arg0) == BIT_AND_EXPR
9456 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9457 {
9458 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9459 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9460 : TRUTH_XOR_EXPR,
9461 boolean_type_node,
9462 fold_convert (boolean_type_node, arg0),
9463 fold_convert (boolean_type_node, arg1));
9464
9465 if (code == EQ_EXPR)
9466 tem = invert_truthvalue (tem);
9467
9468 return fold_convert (type, tem);
9469 }
9470
9471 if (TREE_CODE_CLASS (code) == tcc_binary
9472 || TREE_CODE_CLASS (code) == tcc_comparison)
9473 {
9474 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9475 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9476 fold_build2 (code, type,
9477 TREE_OPERAND (arg0, 1), op1));
9478 if (TREE_CODE (arg1) == COMPOUND_EXPR
9479 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9480 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9481 fold_build2 (code, type,
9482 op0, TREE_OPERAND (arg1, 1)));
9483
9484 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9485 {
9486 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9487 arg0, arg1,
9488 /*cond_first_p=*/1);
9489 if (tem != NULL_TREE)
9490 return tem;
9491 }
9492
9493 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9494 {
9495 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9496 arg1, arg0,
9497 /*cond_first_p=*/0);
9498 if (tem != NULL_TREE)
9499 return tem;
9500 }
9501 }
9502
9503 switch (code)
9504 {
9505 case POINTER_PLUS_EXPR:
9506 /* 0 +p index -> (type)index */
9507 if (integer_zerop (arg0))
9508 return non_lvalue (fold_convert (type, arg1));
9509
9510 /* PTR +p 0 -> PTR */
9511 if (integer_zerop (arg1))
9512 return non_lvalue (fold_convert (type, arg0));
9513
9514 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9515 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9516 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9517 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9518 fold_convert (sizetype, arg1),
9519 fold_convert (sizetype, arg0)));
9520
9521 /* index +p PTR -> PTR +p index */
9522 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9523 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9524 return fold_build2 (POINTER_PLUS_EXPR, type,
9525 fold_convert (type, arg1), fold_convert (sizetype, arg0));
9526
9527 /* (PTR +p B) +p A -> PTR +p (B + A) */
9528 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9529 {
9530 tree inner;
9531 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9532 tree arg00 = TREE_OPERAND (arg0, 0);
9533 inner = fold_build2 (PLUS_EXPR, sizetype, arg01, fold_convert (sizetype, arg1));
9534 return fold_build2 (POINTER_PLUS_EXPR, type, arg00, inner);
9535 }
9536
9537 /* PTR_CST +p CST -> CST1 */
9538 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9539 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9540
9541 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9542 of the array. Loop optimizer sometimes produce this type of
9543 expressions. */
9544 if (TREE_CODE (arg0) == ADDR_EXPR)
9545 {
9546 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9547 if (tem)
9548 return fold_convert (type, tem);
9549 }
9550
9551 return NULL_TREE;
9552 case PLUS_EXPR:
9553 /* PTR + INT -> (INT)(PTR p+ INT) */
9554 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9555 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9556 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9557 TREE_TYPE (arg0),
9558 arg0,
9559 fold_convert (sizetype, arg1)));
9560 /* INT + PTR -> (INT)(PTR p+ INT) */
9561 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9562 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9563 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9564 TREE_TYPE (arg1),
9565 arg1,
9566 fold_convert (sizetype, arg0)));
9567 /* A + (-B) -> A - B */
9568 if (TREE_CODE (arg1) == NEGATE_EXPR)
9569 return fold_build2 (MINUS_EXPR, type,
9570 fold_convert (type, arg0),
9571 fold_convert (type, TREE_OPERAND (arg1, 0)));
9572 /* (-A) + B -> B - A */
9573 if (TREE_CODE (arg0) == NEGATE_EXPR
9574 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9575 return fold_build2 (MINUS_EXPR, type,
9576 fold_convert (type, arg1),
9577 fold_convert (type, TREE_OPERAND (arg0, 0)));
9578
9579 if (INTEGRAL_TYPE_P (type))
9580 {
9581 /* Convert ~A + 1 to -A. */
9582 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9583 && integer_onep (arg1))
9584 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9585
9586 /* ~X + X is -1. */
9587 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9588 && !TYPE_OVERFLOW_TRAPS (type))
9589 {
9590 tree tem = TREE_OPERAND (arg0, 0);
9591
9592 STRIP_NOPS (tem);
9593 if (operand_equal_p (tem, arg1, 0))
9594 {
9595 t1 = build_int_cst_type (type, -1);
9596 return omit_one_operand (type, t1, arg1);
9597 }
9598 }
9599
9600 /* X + ~X is -1. */
9601 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9602 && !TYPE_OVERFLOW_TRAPS (type))
9603 {
9604 tree tem = TREE_OPERAND (arg1, 0);
9605
9606 STRIP_NOPS (tem);
9607 if (operand_equal_p (arg0, tem, 0))
9608 {
9609 t1 = build_int_cst_type (type, -1);
9610 return omit_one_operand (type, t1, arg0);
9611 }
9612 }
9613 }
9614
9615 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9616 same or one. Make sure type is not saturating. */
9617 if ((TREE_CODE (arg0) == MULT_EXPR
9618 || TREE_CODE (arg1) == MULT_EXPR)
9619 && !TYPE_SATURATING (type)
9620 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9621 {
9622 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9623 if (tem)
9624 return tem;
9625 }
9626
9627 if (! FLOAT_TYPE_P (type))
9628 {
9629 if (integer_zerop (arg1))
9630 return non_lvalue (fold_convert (type, arg0));
9631
9632 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9633 with a constant, and the two constants have no bits in common,
9634 we should treat this as a BIT_IOR_EXPR since this may produce more
9635 simplifications. */
9636 if (TREE_CODE (arg0) == BIT_AND_EXPR
9637 && TREE_CODE (arg1) == BIT_AND_EXPR
9638 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9639 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9640 && integer_zerop (const_binop (BIT_AND_EXPR,
9641 TREE_OPERAND (arg0, 1),
9642 TREE_OPERAND (arg1, 1), 0)))
9643 {
9644 code = BIT_IOR_EXPR;
9645 goto bit_ior;
9646 }
9647
9648 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9649 (plus (plus (mult) (mult)) (foo)) so that we can
9650 take advantage of the factoring cases below. */
9651 if (((TREE_CODE (arg0) == PLUS_EXPR
9652 || TREE_CODE (arg0) == MINUS_EXPR)
9653 && TREE_CODE (arg1) == MULT_EXPR)
9654 || ((TREE_CODE (arg1) == PLUS_EXPR
9655 || TREE_CODE (arg1) == MINUS_EXPR)
9656 && TREE_CODE (arg0) == MULT_EXPR))
9657 {
9658 tree parg0, parg1, parg, marg;
9659 enum tree_code pcode;
9660
9661 if (TREE_CODE (arg1) == MULT_EXPR)
9662 parg = arg0, marg = arg1;
9663 else
9664 parg = arg1, marg = arg0;
9665 pcode = TREE_CODE (parg);
9666 parg0 = TREE_OPERAND (parg, 0);
9667 parg1 = TREE_OPERAND (parg, 1);
9668 STRIP_NOPS (parg0);
9669 STRIP_NOPS (parg1);
9670
9671 if (TREE_CODE (parg0) == MULT_EXPR
9672 && TREE_CODE (parg1) != MULT_EXPR)
9673 return fold_build2 (pcode, type,
9674 fold_build2 (PLUS_EXPR, type,
9675 fold_convert (type, parg0),
9676 fold_convert (type, marg)),
9677 fold_convert (type, parg1));
9678 if (TREE_CODE (parg0) != MULT_EXPR
9679 && TREE_CODE (parg1) == MULT_EXPR)
9680 return fold_build2 (PLUS_EXPR, type,
9681 fold_convert (type, parg0),
9682 fold_build2 (pcode, type,
9683 fold_convert (type, marg),
9684 fold_convert (type,
9685 parg1)));
9686 }
9687 }
9688 else
9689 {
9690 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9691 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9692 return non_lvalue (fold_convert (type, arg0));
9693
9694 /* Likewise if the operands are reversed. */
9695 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9696 return non_lvalue (fold_convert (type, arg1));
9697
9698 /* Convert X + -C into X - C. */
9699 if (TREE_CODE (arg1) == REAL_CST
9700 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9701 {
9702 tem = fold_negate_const (arg1, type);
9703 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9704 return fold_build2 (MINUS_EXPR, type,
9705 fold_convert (type, arg0),
9706 fold_convert (type, tem));
9707 }
9708
9709 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9710 to __complex__ ( x, y ). This is not the same for SNaNs or
9711 if signed zeros are involved. */
9712 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9713 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9714 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9715 {
9716 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9717 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9718 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9719 bool arg0rz = false, arg0iz = false;
9720 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9721 || (arg0i && (arg0iz = real_zerop (arg0i))))
9722 {
9723 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9724 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9725 if (arg0rz && arg1i && real_zerop (arg1i))
9726 {
9727 tree rp = arg1r ? arg1r
9728 : build1 (REALPART_EXPR, rtype, arg1);
9729 tree ip = arg0i ? arg0i
9730 : build1 (IMAGPART_EXPR, rtype, arg0);
9731 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9732 }
9733 else if (arg0iz && arg1r && real_zerop (arg1r))
9734 {
9735 tree rp = arg0r ? arg0r
9736 : build1 (REALPART_EXPR, rtype, arg0);
9737 tree ip = arg1i ? arg1i
9738 : build1 (IMAGPART_EXPR, rtype, arg1);
9739 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9740 }
9741 }
9742 }
9743
9744 if (flag_unsafe_math_optimizations
9745 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9746 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9747 && (tem = distribute_real_division (code, type, arg0, arg1)))
9748 return tem;
9749
9750 /* Convert x+x into x*2.0. */
9751 if (operand_equal_p (arg0, arg1, 0)
9752 && SCALAR_FLOAT_TYPE_P (type))
9753 return fold_build2 (MULT_EXPR, type, arg0,
9754 build_real (type, dconst2));
9755
9756 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9757 if (flag_unsafe_math_optimizations
9758 && TREE_CODE (arg1) == PLUS_EXPR
9759 && TREE_CODE (arg0) != MULT_EXPR)
9760 {
9761 tree tree10 = TREE_OPERAND (arg1, 0);
9762 tree tree11 = TREE_OPERAND (arg1, 1);
9763 if (TREE_CODE (tree11) == MULT_EXPR
9764 && TREE_CODE (tree10) == MULT_EXPR)
9765 {
9766 tree tree0;
9767 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9768 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9769 }
9770 }
9771 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9772 if (flag_unsafe_math_optimizations
9773 && TREE_CODE (arg0) == PLUS_EXPR
9774 && TREE_CODE (arg1) != MULT_EXPR)
9775 {
9776 tree tree00 = TREE_OPERAND (arg0, 0);
9777 tree tree01 = TREE_OPERAND (arg0, 1);
9778 if (TREE_CODE (tree01) == MULT_EXPR
9779 && TREE_CODE (tree00) == MULT_EXPR)
9780 {
9781 tree tree0;
9782 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9783 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9784 }
9785 }
9786 }
9787
9788 bit_rotate:
9789 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9790 is a rotate of A by C1 bits. */
9791 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9792 is a rotate of A by B bits. */
9793 {
9794 enum tree_code code0, code1;
9795 code0 = TREE_CODE (arg0);
9796 code1 = TREE_CODE (arg1);
9797 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9798 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9799 && operand_equal_p (TREE_OPERAND (arg0, 0),
9800 TREE_OPERAND (arg1, 0), 0)
9801 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9802 {
9803 tree tree01, tree11;
9804 enum tree_code code01, code11;
9805
9806 tree01 = TREE_OPERAND (arg0, 1);
9807 tree11 = TREE_OPERAND (arg1, 1);
9808 STRIP_NOPS (tree01);
9809 STRIP_NOPS (tree11);
9810 code01 = TREE_CODE (tree01);
9811 code11 = TREE_CODE (tree11);
9812 if (code01 == INTEGER_CST
9813 && code11 == INTEGER_CST
9814 && TREE_INT_CST_HIGH (tree01) == 0
9815 && TREE_INT_CST_HIGH (tree11) == 0
9816 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9817 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9818 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9819 code0 == LSHIFT_EXPR ? tree01 : tree11);
9820 else if (code11 == MINUS_EXPR)
9821 {
9822 tree tree110, tree111;
9823 tree110 = TREE_OPERAND (tree11, 0);
9824 tree111 = TREE_OPERAND (tree11, 1);
9825 STRIP_NOPS (tree110);
9826 STRIP_NOPS (tree111);
9827 if (TREE_CODE (tree110) == INTEGER_CST
9828 && 0 == compare_tree_int (tree110,
9829 TYPE_PRECISION
9830 (TREE_TYPE (TREE_OPERAND
9831 (arg0, 0))))
9832 && operand_equal_p (tree01, tree111, 0))
9833 return build2 ((code0 == LSHIFT_EXPR
9834 ? LROTATE_EXPR
9835 : RROTATE_EXPR),
9836 type, TREE_OPERAND (arg0, 0), tree01);
9837 }
9838 else if (code01 == MINUS_EXPR)
9839 {
9840 tree tree010, tree011;
9841 tree010 = TREE_OPERAND (tree01, 0);
9842 tree011 = TREE_OPERAND (tree01, 1);
9843 STRIP_NOPS (tree010);
9844 STRIP_NOPS (tree011);
9845 if (TREE_CODE (tree010) == INTEGER_CST
9846 && 0 == compare_tree_int (tree010,
9847 TYPE_PRECISION
9848 (TREE_TYPE (TREE_OPERAND
9849 (arg0, 0))))
9850 && operand_equal_p (tree11, tree011, 0))
9851 return build2 ((code0 != LSHIFT_EXPR
9852 ? LROTATE_EXPR
9853 : RROTATE_EXPR),
9854 type, TREE_OPERAND (arg0, 0), tree11);
9855 }
9856 }
9857 }
9858
9859 associate:
9860 /* In most languages, can't associate operations on floats through
9861 parentheses. Rather than remember where the parentheses were, we
9862 don't associate floats at all, unless the user has specified
9863 -funsafe-math-optimizations.
9864 And, we need to make sure type is not saturating. */
9865
9866 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9867 && !TYPE_SATURATING (type))
9868 {
9869 tree var0, con0, lit0, minus_lit0;
9870 tree var1, con1, lit1, minus_lit1;
9871 bool ok = true;
9872
9873 /* Split both trees into variables, constants, and literals. Then
9874 associate each group together, the constants with literals,
9875 then the result with variables. This increases the chances of
9876 literals being recombined later and of generating relocatable
9877 expressions for the sum of a constant and literal. */
9878 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9879 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9880 code == MINUS_EXPR);
9881
9882 /* With undefined overflow we can only associate constants
9883 with one variable. */
9884 if ((POINTER_TYPE_P (type)
9885 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9886 && var0 && var1)
9887 {
9888 tree tmp0 = var0;
9889 tree tmp1 = var1;
9890
9891 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9892 tmp0 = TREE_OPERAND (tmp0, 0);
9893 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9894 tmp1 = TREE_OPERAND (tmp1, 0);
9895 /* The only case we can still associate with two variables
9896 is if they are the same, modulo negation. */
9897 if (!operand_equal_p (tmp0, tmp1, 0))
9898 ok = false;
9899 }
9900
9901 /* Only do something if we found more than two objects. Otherwise,
9902 nothing has changed and we risk infinite recursion. */
9903 if (ok
9904 && (2 < ((var0 != 0) + (var1 != 0)
9905 + (con0 != 0) + (con1 != 0)
9906 + (lit0 != 0) + (lit1 != 0)
9907 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9908 {
9909 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9910 if (code == MINUS_EXPR)
9911 code = PLUS_EXPR;
9912
9913 var0 = associate_trees (var0, var1, code, type);
9914 con0 = associate_trees (con0, con1, code, type);
9915 lit0 = associate_trees (lit0, lit1, code, type);
9916 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9917
9918 /* Preserve the MINUS_EXPR if the negative part of the literal is
9919 greater than the positive part. Otherwise, the multiplicative
9920 folding code (i.e extract_muldiv) may be fooled in case
9921 unsigned constants are subtracted, like in the following
9922 example: ((X*2 + 4) - 8U)/2. */
9923 if (minus_lit0 && lit0)
9924 {
9925 if (TREE_CODE (lit0) == INTEGER_CST
9926 && TREE_CODE (minus_lit0) == INTEGER_CST
9927 && tree_int_cst_lt (lit0, minus_lit0))
9928 {
9929 minus_lit0 = associate_trees (minus_lit0, lit0,
9930 MINUS_EXPR, type);
9931 lit0 = 0;
9932 }
9933 else
9934 {
9935 lit0 = associate_trees (lit0, minus_lit0,
9936 MINUS_EXPR, type);
9937 minus_lit0 = 0;
9938 }
9939 }
9940 if (minus_lit0)
9941 {
9942 if (con0 == 0)
9943 return fold_convert (type,
9944 associate_trees (var0, minus_lit0,
9945 MINUS_EXPR, type));
9946 else
9947 {
9948 con0 = associate_trees (con0, minus_lit0,
9949 MINUS_EXPR, type);
9950 return fold_convert (type,
9951 associate_trees (var0, con0,
9952 PLUS_EXPR, type));
9953 }
9954 }
9955
9956 con0 = associate_trees (con0, lit0, code, type);
9957 return fold_convert (type, associate_trees (var0, con0,
9958 code, type));
9959 }
9960 }
9961
9962 return NULL_TREE;
9963
9964 case MINUS_EXPR:
9965 /* Pointer simplifications for subtraction, simple reassociations. */
9966 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9967 {
9968 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9969 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9970 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9971 {
9972 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9973 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9974 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9975 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9976 return fold_build2 (PLUS_EXPR, type,
9977 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9978 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9979 }
9980 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9981 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9982 {
9983 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9984 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9985 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9986 if (tmp)
9987 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9988 }
9989 }
9990 /* A - (-B) -> A + B */
9991 if (TREE_CODE (arg1) == NEGATE_EXPR)
9992 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9993 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9994 if (TREE_CODE (arg0) == NEGATE_EXPR
9995 && (FLOAT_TYPE_P (type)
9996 || INTEGRAL_TYPE_P (type))
9997 && negate_expr_p (arg1)
9998 && reorder_operands_p (arg0, arg1))
9999 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
10000 TREE_OPERAND (arg0, 0));
10001 /* Convert -A - 1 to ~A. */
10002 if (INTEGRAL_TYPE_P (type)
10003 && TREE_CODE (arg0) == NEGATE_EXPR
10004 && integer_onep (arg1)
10005 && !TYPE_OVERFLOW_TRAPS (type))
10006 return fold_build1 (BIT_NOT_EXPR, type,
10007 fold_convert (type, TREE_OPERAND (arg0, 0)));
10008
10009 /* Convert -1 - A to ~A. */
10010 if (INTEGRAL_TYPE_P (type)
10011 && integer_all_onesp (arg0))
10012 return fold_build1 (BIT_NOT_EXPR, type, op1);
10013
10014 if (! FLOAT_TYPE_P (type))
10015 {
10016 if (integer_zerop (arg0))
10017 return negate_expr (fold_convert (type, arg1));
10018 if (integer_zerop (arg1))
10019 return non_lvalue (fold_convert (type, arg0));
10020
10021 /* Fold A - (A & B) into ~B & A. */
10022 if (!TREE_SIDE_EFFECTS (arg0)
10023 && TREE_CODE (arg1) == BIT_AND_EXPR)
10024 {
10025 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10026 {
10027 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10028 return fold_build2 (BIT_AND_EXPR, type,
10029 fold_build1 (BIT_NOT_EXPR, type, arg10),
10030 fold_convert (type, arg0));
10031 }
10032 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10033 {
10034 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10035 return fold_build2 (BIT_AND_EXPR, type,
10036 fold_build1 (BIT_NOT_EXPR, type, arg11),
10037 fold_convert (type, arg0));
10038 }
10039 }
10040
10041 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10042 any power of 2 minus 1. */
10043 if (TREE_CODE (arg0) == BIT_AND_EXPR
10044 && TREE_CODE (arg1) == BIT_AND_EXPR
10045 && operand_equal_p (TREE_OPERAND (arg0, 0),
10046 TREE_OPERAND (arg1, 0), 0))
10047 {
10048 tree mask0 = TREE_OPERAND (arg0, 1);
10049 tree mask1 = TREE_OPERAND (arg1, 1);
10050 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10051
10052 if (operand_equal_p (tem, mask1, 0))
10053 {
10054 tem = fold_build2 (BIT_XOR_EXPR, type,
10055 TREE_OPERAND (arg0, 0), mask1);
10056 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10057 }
10058 }
10059 }
10060
10061 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10062 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10063 return non_lvalue (fold_convert (type, arg0));
10064
10065 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10066 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10067 (-ARG1 + ARG0) reduces to -ARG1. */
10068 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10069 return negate_expr (fold_convert (type, arg1));
10070
10071 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10072 __complex__ ( x, -y ). This is not the same for SNaNs or if
10073 signed zeros are involved. */
10074 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10075 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10076 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10077 {
10078 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10079 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10080 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10081 bool arg0rz = false, arg0iz = false;
10082 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10083 || (arg0i && (arg0iz = real_zerop (arg0i))))
10084 {
10085 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10086 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10087 if (arg0rz && arg1i && real_zerop (arg1i))
10088 {
10089 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10090 arg1r ? arg1r
10091 : build1 (REALPART_EXPR, rtype, arg1));
10092 tree ip = arg0i ? arg0i
10093 : build1 (IMAGPART_EXPR, rtype, arg0);
10094 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10095 }
10096 else if (arg0iz && arg1r && real_zerop (arg1r))
10097 {
10098 tree rp = arg0r ? arg0r
10099 : build1 (REALPART_EXPR, rtype, arg0);
10100 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10101 arg1i ? arg1i
10102 : build1 (IMAGPART_EXPR, rtype, arg1));
10103 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10104 }
10105 }
10106 }
10107
10108 /* Fold &x - &x. This can happen from &x.foo - &x.
10109 This is unsafe for certain floats even in non-IEEE formats.
10110 In IEEE, it is unsafe because it does wrong for NaNs.
10111 Also note that operand_equal_p is always false if an operand
10112 is volatile. */
10113
10114 if ((! FLOAT_TYPE_P (type)
10115 || (flag_unsafe_math_optimizations
10116 && !HONOR_NANS (TYPE_MODE (type))
10117 && !HONOR_INFINITIES (TYPE_MODE (type))))
10118 && operand_equal_p (arg0, arg1, 0))
10119 return fold_convert (type, integer_zero_node);
10120
10121 /* A - B -> A + (-B) if B is easily negatable. */
10122 if (negate_expr_p (arg1)
10123 && ((FLOAT_TYPE_P (type)
10124 /* Avoid this transformation if B is a positive REAL_CST. */
10125 && (TREE_CODE (arg1) != REAL_CST
10126 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10127 || INTEGRAL_TYPE_P (type)))
10128 return fold_build2 (PLUS_EXPR, type,
10129 fold_convert (type, arg0),
10130 fold_convert (type, negate_expr (arg1)));
10131
10132 /* Try folding difference of addresses. */
10133 {
10134 HOST_WIDE_INT diff;
10135
10136 if ((TREE_CODE (arg0) == ADDR_EXPR
10137 || TREE_CODE (arg1) == ADDR_EXPR)
10138 && ptr_difference_const (arg0, arg1, &diff))
10139 return build_int_cst_type (type, diff);
10140 }
10141
10142 /* Fold &a[i] - &a[j] to i-j. */
10143 if (TREE_CODE (arg0) == ADDR_EXPR
10144 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10145 && TREE_CODE (arg1) == ADDR_EXPR
10146 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10147 {
10148 tree aref0 = TREE_OPERAND (arg0, 0);
10149 tree aref1 = TREE_OPERAND (arg1, 0);
10150 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10151 TREE_OPERAND (aref1, 0), 0))
10152 {
10153 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10154 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10155 tree esz = array_ref_element_size (aref0);
10156 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10157 return fold_build2 (MULT_EXPR, type, diff,
10158 fold_convert (type, esz));
10159
10160 }
10161 }
10162
10163 if (flag_unsafe_math_optimizations
10164 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10165 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10166 && (tem = distribute_real_division (code, type, arg0, arg1)))
10167 return tem;
10168
10169 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10170 same or one. Make sure type is not saturating. */
10171 if ((TREE_CODE (arg0) == MULT_EXPR
10172 || TREE_CODE (arg1) == MULT_EXPR)
10173 && !TYPE_SATURATING (type)
10174 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
10175 {
10176 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10177 if (tem)
10178 return tem;
10179 }
10180
10181 goto associate;
10182
10183 case MULT_EXPR:
10184 /* (-A) * (-B) -> A * B */
10185 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10186 return fold_build2 (MULT_EXPR, type,
10187 fold_convert (type, TREE_OPERAND (arg0, 0)),
10188 fold_convert (type, negate_expr (arg1)));
10189 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10190 return fold_build2 (MULT_EXPR, type,
10191 fold_convert (type, negate_expr (arg0)),
10192 fold_convert (type, TREE_OPERAND (arg1, 0)));
10193
10194 if (! FLOAT_TYPE_P (type))
10195 {
10196 if (integer_zerop (arg1))
10197 return omit_one_operand (type, arg1, arg0);
10198 if (integer_onep (arg1))
10199 return non_lvalue (fold_convert (type, arg0));
10200 /* Transform x * -1 into -x. */
10201 if (integer_all_onesp (arg1))
10202 return fold_convert (type, negate_expr (arg0));
10203 /* Transform x * -C into -x * C if x is easily negatable. */
10204 if (TREE_CODE (arg1) == INTEGER_CST
10205 && tree_int_cst_sgn (arg1) == -1
10206 && negate_expr_p (arg0)
10207 && (tem = negate_expr (arg1)) != arg1
10208 && !TREE_OVERFLOW (tem))
10209 return fold_build2 (MULT_EXPR, type,
10210 negate_expr (arg0), tem);
10211
10212 /* (a * (1 << b)) is (a << b) */
10213 if (TREE_CODE (arg1) == LSHIFT_EXPR
10214 && integer_onep (TREE_OPERAND (arg1, 0)))
10215 return fold_build2 (LSHIFT_EXPR, type, arg0,
10216 TREE_OPERAND (arg1, 1));
10217 if (TREE_CODE (arg0) == LSHIFT_EXPR
10218 && integer_onep (TREE_OPERAND (arg0, 0)))
10219 return fold_build2 (LSHIFT_EXPR, type, arg1,
10220 TREE_OPERAND (arg0, 1));
10221
10222 strict_overflow_p = false;
10223 if (TREE_CODE (arg1) == INTEGER_CST
10224 && 0 != (tem = extract_muldiv (op0,
10225 fold_convert (type, arg1),
10226 code, NULL_TREE,
10227 &strict_overflow_p)))
10228 {
10229 if (strict_overflow_p)
10230 fold_overflow_warning (("assuming signed overflow does not "
10231 "occur when simplifying "
10232 "multiplication"),
10233 WARN_STRICT_OVERFLOW_MISC);
10234 return fold_convert (type, tem);
10235 }
10236
10237 /* Optimize z * conj(z) for integer complex numbers. */
10238 if (TREE_CODE (arg0) == CONJ_EXPR
10239 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10240 return fold_mult_zconjz (type, arg1);
10241 if (TREE_CODE (arg1) == CONJ_EXPR
10242 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10243 return fold_mult_zconjz (type, arg0);
10244 }
10245 else
10246 {
10247 /* Maybe fold x * 0 to 0. The expressions aren't the same
10248 when x is NaN, since x * 0 is also NaN. Nor are they the
10249 same in modes with signed zeros, since multiplying a
10250 negative value by 0 gives -0, not +0. */
10251 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10252 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10253 && real_zerop (arg1))
10254 return omit_one_operand (type, arg1, arg0);
10255 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10256 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10257 && real_onep (arg1))
10258 return non_lvalue (fold_convert (type, arg0));
10259
10260 /* Transform x * -1.0 into -x. */
10261 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10262 && real_minus_onep (arg1))
10263 return fold_convert (type, negate_expr (arg0));
10264
10265 /* Convert (C1/X)*C2 into (C1*C2)/X. */
10266 if (flag_unsafe_math_optimizations
10267 && TREE_CODE (arg0) == RDIV_EXPR
10268 && TREE_CODE (arg1) == REAL_CST
10269 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10270 {
10271 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10272 arg1, 0);
10273 if (tem)
10274 return fold_build2 (RDIV_EXPR, type, tem,
10275 TREE_OPERAND (arg0, 1));
10276 }
10277
10278 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10279 if (operand_equal_p (arg0, arg1, 0))
10280 {
10281 tree tem = fold_strip_sign_ops (arg0);
10282 if (tem != NULL_TREE)
10283 {
10284 tem = fold_convert (type, tem);
10285 return fold_build2 (MULT_EXPR, type, tem, tem);
10286 }
10287 }
10288
10289 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10290 This is not the same for NaNs or if signed zeros are
10291 involved. */
10292 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10293 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10294 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10295 && TREE_CODE (arg1) == COMPLEX_CST
10296 && real_zerop (TREE_REALPART (arg1)))
10297 {
10298 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10299 if (real_onep (TREE_IMAGPART (arg1)))
10300 return fold_build2 (COMPLEX_EXPR, type,
10301 negate_expr (fold_build1 (IMAGPART_EXPR,
10302 rtype, arg0)),
10303 fold_build1 (REALPART_EXPR, rtype, arg0));
10304 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10305 return fold_build2 (COMPLEX_EXPR, type,
10306 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10307 negate_expr (fold_build1 (REALPART_EXPR,
10308 rtype, arg0)));
10309 }
10310
10311 /* Optimize z * conj(z) for floating point complex numbers.
10312 Guarded by flag_unsafe_math_optimizations as non-finite
10313 imaginary components don't produce scalar results. */
10314 if (flag_unsafe_math_optimizations
10315 && TREE_CODE (arg0) == CONJ_EXPR
10316 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10317 return fold_mult_zconjz (type, arg1);
10318 if (flag_unsafe_math_optimizations
10319 && TREE_CODE (arg1) == CONJ_EXPR
10320 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10321 return fold_mult_zconjz (type, arg0);
10322
10323 if (flag_unsafe_math_optimizations)
10324 {
10325 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10326 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10327
10328 /* Optimizations of root(...)*root(...). */
10329 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10330 {
10331 tree rootfn, arg;
10332 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10333 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10334
10335 /* Optimize sqrt(x)*sqrt(x) as x. */
10336 if (BUILTIN_SQRT_P (fcode0)
10337 && operand_equal_p (arg00, arg10, 0)
10338 && ! HONOR_SNANS (TYPE_MODE (type)))
10339 return arg00;
10340
10341 /* Optimize root(x)*root(y) as root(x*y). */
10342 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10343 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10344 return build_call_expr (rootfn, 1, arg);
10345 }
10346
10347 /* Optimize expN(x)*expN(y) as expN(x+y). */
10348 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10349 {
10350 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10351 tree arg = fold_build2 (PLUS_EXPR, type,
10352 CALL_EXPR_ARG (arg0, 0),
10353 CALL_EXPR_ARG (arg1, 0));
10354 return build_call_expr (expfn, 1, arg);
10355 }
10356
10357 /* Optimizations of pow(...)*pow(...). */
10358 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10359 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10360 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10361 {
10362 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10363 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10364 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10365 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10366
10367 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10368 if (operand_equal_p (arg01, arg11, 0))
10369 {
10370 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10371 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10372 return build_call_expr (powfn, 2, arg, arg01);
10373 }
10374
10375 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10376 if (operand_equal_p (arg00, arg10, 0))
10377 {
10378 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10379 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10380 return build_call_expr (powfn, 2, arg00, arg);
10381 }
10382 }
10383
10384 /* Optimize tan(x)*cos(x) as sin(x). */
10385 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10386 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10387 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10388 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10389 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10390 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10391 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10392 CALL_EXPR_ARG (arg1, 0), 0))
10393 {
10394 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10395
10396 if (sinfn != NULL_TREE)
10397 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10398 }
10399
10400 /* Optimize x*pow(x,c) as pow(x,c+1). */
10401 if (fcode1 == BUILT_IN_POW
10402 || fcode1 == BUILT_IN_POWF
10403 || fcode1 == BUILT_IN_POWL)
10404 {
10405 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10406 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10407 if (TREE_CODE (arg11) == REAL_CST
10408 && !TREE_OVERFLOW (arg11)
10409 && operand_equal_p (arg0, arg10, 0))
10410 {
10411 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10412 REAL_VALUE_TYPE c;
10413 tree arg;
10414
10415 c = TREE_REAL_CST (arg11);
10416 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10417 arg = build_real (type, c);
10418 return build_call_expr (powfn, 2, arg0, arg);
10419 }
10420 }
10421
10422 /* Optimize pow(x,c)*x as pow(x,c+1). */
10423 if (fcode0 == BUILT_IN_POW
10424 || fcode0 == BUILT_IN_POWF
10425 || fcode0 == BUILT_IN_POWL)
10426 {
10427 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10428 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10429 if (TREE_CODE (arg01) == REAL_CST
10430 && !TREE_OVERFLOW (arg01)
10431 && operand_equal_p (arg1, arg00, 0))
10432 {
10433 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10434 REAL_VALUE_TYPE c;
10435 tree arg;
10436
10437 c = TREE_REAL_CST (arg01);
10438 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10439 arg = build_real (type, c);
10440 return build_call_expr (powfn, 2, arg1, arg);
10441 }
10442 }
10443
10444 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10445 if (! optimize_size
10446 && operand_equal_p (arg0, arg1, 0))
10447 {
10448 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10449
10450 if (powfn)
10451 {
10452 tree arg = build_real (type, dconst2);
10453 return build_call_expr (powfn, 2, arg0, arg);
10454 }
10455 }
10456 }
10457 }
10458 goto associate;
10459
10460 case BIT_IOR_EXPR:
10461 bit_ior:
10462 if (integer_all_onesp (arg1))
10463 return omit_one_operand (type, arg1, arg0);
10464 if (integer_zerop (arg1))
10465 return non_lvalue (fold_convert (type, arg0));
10466 if (operand_equal_p (arg0, arg1, 0))
10467 return non_lvalue (fold_convert (type, arg0));
10468
10469 /* ~X | X is -1. */
10470 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10471 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10472 {
10473 t1 = build_int_cst_type (type, -1);
10474 return omit_one_operand (type, t1, arg1);
10475 }
10476
10477 /* X | ~X is -1. */
10478 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10479 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10480 {
10481 t1 = build_int_cst_type (type, -1);
10482 return omit_one_operand (type, t1, arg0);
10483 }
10484
10485 /* Canonicalize (X & C1) | C2. */
10486 if (TREE_CODE (arg0) == BIT_AND_EXPR
10487 && TREE_CODE (arg1) == INTEGER_CST
10488 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10489 {
10490 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
10491 int width = TYPE_PRECISION (type);
10492 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10493 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10494 hi2 = TREE_INT_CST_HIGH (arg1);
10495 lo2 = TREE_INT_CST_LOW (arg1);
10496
10497 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10498 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10499 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10500
10501 if (width > HOST_BITS_PER_WIDE_INT)
10502 {
10503 mhi = (unsigned HOST_WIDE_INT) -1
10504 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10505 mlo = -1;
10506 }
10507 else
10508 {
10509 mhi = 0;
10510 mlo = (unsigned HOST_WIDE_INT) -1
10511 >> (HOST_BITS_PER_WIDE_INT - width);
10512 }
10513
10514 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10515 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10516 return fold_build2 (BIT_IOR_EXPR, type,
10517 TREE_OPERAND (arg0, 0), arg1);
10518
10519 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10520 hi1 &= mhi;
10521 lo1 &= mlo;
10522 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10523 return fold_build2 (BIT_IOR_EXPR, type,
10524 fold_build2 (BIT_AND_EXPR, type,
10525 TREE_OPERAND (arg0, 0),
10526 build_int_cst_wide (type,
10527 lo1 & ~lo2,
10528 hi1 & ~hi2)),
10529 arg1);
10530 }
10531
10532 /* (X & Y) | Y is (X, Y). */
10533 if (TREE_CODE (arg0) == BIT_AND_EXPR
10534 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10535 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10536 /* (X & Y) | X is (Y, X). */
10537 if (TREE_CODE (arg0) == BIT_AND_EXPR
10538 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10539 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10540 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10541 /* X | (X & Y) is (Y, X). */
10542 if (TREE_CODE (arg1) == BIT_AND_EXPR
10543 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10544 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10545 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10546 /* X | (Y & X) is (Y, X). */
10547 if (TREE_CODE (arg1) == BIT_AND_EXPR
10548 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10549 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10550 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10551
10552 t1 = distribute_bit_expr (code, type, arg0, arg1);
10553 if (t1 != NULL_TREE)
10554 return t1;
10555
10556 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10557
10558 This results in more efficient code for machines without a NAND
10559 instruction. Combine will canonicalize to the first form
10560 which will allow use of NAND instructions provided by the
10561 backend if they exist. */
10562 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10563 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10564 {
10565 return fold_build1 (BIT_NOT_EXPR, type,
10566 build2 (BIT_AND_EXPR, type,
10567 TREE_OPERAND (arg0, 0),
10568 TREE_OPERAND (arg1, 0)));
10569 }
10570
10571 /* See if this can be simplified into a rotate first. If that
10572 is unsuccessful continue in the association code. */
10573 goto bit_rotate;
10574
10575 case BIT_XOR_EXPR:
10576 if (integer_zerop (arg1))
10577 return non_lvalue (fold_convert (type, arg0));
10578 if (integer_all_onesp (arg1))
10579 return fold_build1 (BIT_NOT_EXPR, type, op0);
10580 if (operand_equal_p (arg0, arg1, 0))
10581 return omit_one_operand (type, integer_zero_node, arg0);
10582
10583 /* ~X ^ X is -1. */
10584 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10585 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10586 {
10587 t1 = build_int_cst_type (type, -1);
10588 return omit_one_operand (type, t1, arg1);
10589 }
10590
10591 /* X ^ ~X is -1. */
10592 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10593 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10594 {
10595 t1 = build_int_cst_type (type, -1);
10596 return omit_one_operand (type, t1, arg0);
10597 }
10598
10599 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10600 with a constant, and the two constants have no bits in common,
10601 we should treat this as a BIT_IOR_EXPR since this may produce more
10602 simplifications. */
10603 if (TREE_CODE (arg0) == BIT_AND_EXPR
10604 && TREE_CODE (arg1) == BIT_AND_EXPR
10605 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10606 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10607 && integer_zerop (const_binop (BIT_AND_EXPR,
10608 TREE_OPERAND (arg0, 1),
10609 TREE_OPERAND (arg1, 1), 0)))
10610 {
10611 code = BIT_IOR_EXPR;
10612 goto bit_ior;
10613 }
10614
10615 /* (X | Y) ^ X -> Y & ~ X*/
10616 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10617 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10618 {
10619 tree t2 = TREE_OPERAND (arg0, 1);
10620 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10621 arg1);
10622 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10623 fold_convert (type, t1));
10624 return t1;
10625 }
10626
10627 /* (Y | X) ^ X -> Y & ~ X*/
10628 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10629 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10630 {
10631 tree t2 = TREE_OPERAND (arg0, 0);
10632 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10633 arg1);
10634 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10635 fold_convert (type, t1));
10636 return t1;
10637 }
10638
10639 /* X ^ (X | Y) -> Y & ~ X*/
10640 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10641 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10642 {
10643 tree t2 = TREE_OPERAND (arg1, 1);
10644 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10645 arg0);
10646 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10647 fold_convert (type, t1));
10648 return t1;
10649 }
10650
10651 /* X ^ (Y | X) -> Y & ~ X*/
10652 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10653 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10654 {
10655 tree t2 = TREE_OPERAND (arg1, 0);
10656 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10657 arg0);
10658 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10659 fold_convert (type, t1));
10660 return t1;
10661 }
10662
10663 /* Convert ~X ^ ~Y to X ^ Y. */
10664 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10665 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10666 return fold_build2 (code, type,
10667 fold_convert (type, TREE_OPERAND (arg0, 0)),
10668 fold_convert (type, TREE_OPERAND (arg1, 0)));
10669
10670 /* Convert ~X ^ C to X ^ ~C. */
10671 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10672 && TREE_CODE (arg1) == INTEGER_CST)
10673 return fold_build2 (code, type,
10674 fold_convert (type, TREE_OPERAND (arg0, 0)),
10675 fold_build1 (BIT_NOT_EXPR, type, arg1));
10676
10677 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10678 if (TREE_CODE (arg0) == BIT_AND_EXPR
10679 && integer_onep (TREE_OPERAND (arg0, 1))
10680 && integer_onep (arg1))
10681 return fold_build2 (EQ_EXPR, type, arg0,
10682 build_int_cst (TREE_TYPE (arg0), 0));
10683
10684 /* Fold (X & Y) ^ Y as ~X & Y. */
10685 if (TREE_CODE (arg0) == BIT_AND_EXPR
10686 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10687 {
10688 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10689 return fold_build2 (BIT_AND_EXPR, type,
10690 fold_build1 (BIT_NOT_EXPR, type, tem),
10691 fold_convert (type, arg1));
10692 }
10693 /* Fold (X & Y) ^ X as ~Y & X. */
10694 if (TREE_CODE (arg0) == BIT_AND_EXPR
10695 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10696 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10697 {
10698 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10699 return fold_build2 (BIT_AND_EXPR, type,
10700 fold_build1 (BIT_NOT_EXPR, type, tem),
10701 fold_convert (type, arg1));
10702 }
10703 /* Fold X ^ (X & Y) as X & ~Y. */
10704 if (TREE_CODE (arg1) == BIT_AND_EXPR
10705 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10706 {
10707 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10708 return fold_build2 (BIT_AND_EXPR, type,
10709 fold_convert (type, arg0),
10710 fold_build1 (BIT_NOT_EXPR, type, tem));
10711 }
10712 /* Fold X ^ (Y & X) as ~Y & X. */
10713 if (TREE_CODE (arg1) == BIT_AND_EXPR
10714 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10715 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10716 {
10717 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10718 return fold_build2 (BIT_AND_EXPR, type,
10719 fold_build1 (BIT_NOT_EXPR, type, tem),
10720 fold_convert (type, arg0));
10721 }
10722
10723 /* See if this can be simplified into a rotate first. If that
10724 is unsuccessful continue in the association code. */
10725 goto bit_rotate;
10726
10727 case BIT_AND_EXPR:
10728 if (integer_all_onesp (arg1))
10729 return non_lvalue (fold_convert (type, arg0));
10730 if (integer_zerop (arg1))
10731 return omit_one_operand (type, arg1, arg0);
10732 if (operand_equal_p (arg0, arg1, 0))
10733 return non_lvalue (fold_convert (type, arg0));
10734
10735 /* ~X & X is always zero. */
10736 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10737 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10738 return omit_one_operand (type, integer_zero_node, arg1);
10739
10740 /* X & ~X is always zero. */
10741 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10742 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10743 return omit_one_operand (type, integer_zero_node, arg0);
10744
10745 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10746 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10747 && TREE_CODE (arg1) == INTEGER_CST
10748 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10749 return fold_build2 (BIT_IOR_EXPR, type,
10750 fold_build2 (BIT_AND_EXPR, type,
10751 TREE_OPERAND (arg0, 0), arg1),
10752 fold_build2 (BIT_AND_EXPR, type,
10753 TREE_OPERAND (arg0, 1), arg1));
10754
10755 /* (X | Y) & Y is (X, Y). */
10756 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10757 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10758 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10759 /* (X | Y) & X is (Y, X). */
10760 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10761 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10762 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10763 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10764 /* X & (X | Y) is (Y, X). */
10765 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10766 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10767 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10768 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10769 /* X & (Y | X) is (Y, X). */
10770 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10771 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10772 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10773 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10774
10775 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10776 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10777 && integer_onep (TREE_OPERAND (arg0, 1))
10778 && integer_onep (arg1))
10779 {
10780 tem = TREE_OPERAND (arg0, 0);
10781 return fold_build2 (EQ_EXPR, type,
10782 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10783 build_int_cst (TREE_TYPE (tem), 1)),
10784 build_int_cst (TREE_TYPE (tem), 0));
10785 }
10786 /* Fold ~X & 1 as (X & 1) == 0. */
10787 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10788 && integer_onep (arg1))
10789 {
10790 tem = TREE_OPERAND (arg0, 0);
10791 return fold_build2 (EQ_EXPR, type,
10792 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10793 build_int_cst (TREE_TYPE (tem), 1)),
10794 build_int_cst (TREE_TYPE (tem), 0));
10795 }
10796
10797 /* Fold (X ^ Y) & Y as ~X & Y. */
10798 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10799 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10800 {
10801 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10802 return fold_build2 (BIT_AND_EXPR, type,
10803 fold_build1 (BIT_NOT_EXPR, type, tem),
10804 fold_convert (type, arg1));
10805 }
10806 /* Fold (X ^ Y) & X as ~Y & X. */
10807 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10808 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10809 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10810 {
10811 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10812 return fold_build2 (BIT_AND_EXPR, type,
10813 fold_build1 (BIT_NOT_EXPR, type, tem),
10814 fold_convert (type, arg1));
10815 }
10816 /* Fold X & (X ^ Y) as X & ~Y. */
10817 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10818 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10819 {
10820 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10821 return fold_build2 (BIT_AND_EXPR, type,
10822 fold_convert (type, arg0),
10823 fold_build1 (BIT_NOT_EXPR, type, tem));
10824 }
10825 /* Fold X & (Y ^ X) as ~Y & X. */
10826 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10827 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10828 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10829 {
10830 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10831 return fold_build2 (BIT_AND_EXPR, type,
10832 fold_build1 (BIT_NOT_EXPR, type, tem),
10833 fold_convert (type, arg0));
10834 }
10835
10836 t1 = distribute_bit_expr (code, type, arg0, arg1);
10837 if (t1 != NULL_TREE)
10838 return t1;
10839 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10840 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10841 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10842 {
10843 unsigned int prec
10844 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10845
10846 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10847 && (~TREE_INT_CST_LOW (arg1)
10848 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10849 return fold_convert (type, TREE_OPERAND (arg0, 0));
10850 }
10851
10852 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10853
10854 This results in more efficient code for machines without a NOR
10855 instruction. Combine will canonicalize to the first form
10856 which will allow use of NOR instructions provided by the
10857 backend if they exist. */
10858 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10859 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10860 {
10861 return fold_build1 (BIT_NOT_EXPR, type,
10862 build2 (BIT_IOR_EXPR, type,
10863 TREE_OPERAND (arg0, 0),
10864 TREE_OPERAND (arg1, 0)));
10865 }
10866
10867 goto associate;
10868
10869 case RDIV_EXPR:
10870 /* Don't touch a floating-point divide by zero unless the mode
10871 of the constant can represent infinity. */
10872 if (TREE_CODE (arg1) == REAL_CST
10873 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10874 && real_zerop (arg1))
10875 return NULL_TREE;
10876
10877 /* Optimize A / A to 1.0 if we don't care about
10878 NaNs or Infinities. Skip the transformation
10879 for non-real operands. */
10880 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10881 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10882 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10883 && operand_equal_p (arg0, arg1, 0))
10884 {
10885 tree r = build_real (TREE_TYPE (arg0), dconst1);
10886
10887 return omit_two_operands (type, r, arg0, arg1);
10888 }
10889
10890 /* The complex version of the above A / A optimization. */
10891 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10892 && operand_equal_p (arg0, arg1, 0))
10893 {
10894 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10895 if (! HONOR_NANS (TYPE_MODE (elem_type))
10896 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10897 {
10898 tree r = build_real (elem_type, dconst1);
10899 /* omit_two_operands will call fold_convert for us. */
10900 return omit_two_operands (type, r, arg0, arg1);
10901 }
10902 }
10903
10904 /* (-A) / (-B) -> A / B */
10905 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10906 return fold_build2 (RDIV_EXPR, type,
10907 TREE_OPERAND (arg0, 0),
10908 negate_expr (arg1));
10909 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10910 return fold_build2 (RDIV_EXPR, type,
10911 negate_expr (arg0),
10912 TREE_OPERAND (arg1, 0));
10913
10914 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10915 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10916 && real_onep (arg1))
10917 return non_lvalue (fold_convert (type, arg0));
10918
10919 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10920 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10921 && real_minus_onep (arg1))
10922 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10923
10924 /* If ARG1 is a constant, we can convert this to a multiply by the
10925 reciprocal. This does not have the same rounding properties,
10926 so only do this if -funsafe-math-optimizations. We can actually
10927 always safely do it if ARG1 is a power of two, but it's hard to
10928 tell if it is or not in a portable manner. */
10929 if (TREE_CODE (arg1) == REAL_CST)
10930 {
10931 if (flag_unsafe_math_optimizations
10932 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10933 arg1, 0)))
10934 return fold_build2 (MULT_EXPR, type, arg0, tem);
10935 /* Find the reciprocal if optimizing and the result is exact. */
10936 if (optimize)
10937 {
10938 REAL_VALUE_TYPE r;
10939 r = TREE_REAL_CST (arg1);
10940 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10941 {
10942 tem = build_real (type, r);
10943 return fold_build2 (MULT_EXPR, type,
10944 fold_convert (type, arg0), tem);
10945 }
10946 }
10947 }
10948 /* Convert A/B/C to A/(B*C). */
10949 if (flag_unsafe_math_optimizations
10950 && TREE_CODE (arg0) == RDIV_EXPR)
10951 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10952 fold_build2 (MULT_EXPR, type,
10953 TREE_OPERAND (arg0, 1), arg1));
10954
10955 /* Convert A/(B/C) to (A/B)*C. */
10956 if (flag_unsafe_math_optimizations
10957 && TREE_CODE (arg1) == RDIV_EXPR)
10958 return fold_build2 (MULT_EXPR, type,
10959 fold_build2 (RDIV_EXPR, type, arg0,
10960 TREE_OPERAND (arg1, 0)),
10961 TREE_OPERAND (arg1, 1));
10962
10963 /* Convert C1/(X*C2) into (C1/C2)/X. */
10964 if (flag_unsafe_math_optimizations
10965 && TREE_CODE (arg1) == MULT_EXPR
10966 && TREE_CODE (arg0) == REAL_CST
10967 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10968 {
10969 tree tem = const_binop (RDIV_EXPR, arg0,
10970 TREE_OPERAND (arg1, 1), 0);
10971 if (tem)
10972 return fold_build2 (RDIV_EXPR, type, tem,
10973 TREE_OPERAND (arg1, 0));
10974 }
10975
10976 if (flag_unsafe_math_optimizations)
10977 {
10978 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10979 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10980
10981 /* Optimize sin(x)/cos(x) as tan(x). */
10982 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10983 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10984 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10985 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10986 CALL_EXPR_ARG (arg1, 0), 0))
10987 {
10988 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10989
10990 if (tanfn != NULL_TREE)
10991 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10992 }
10993
10994 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10995 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10996 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10997 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10998 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10999 CALL_EXPR_ARG (arg1, 0), 0))
11000 {
11001 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11002
11003 if (tanfn != NULL_TREE)
11004 {
11005 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11006 return fold_build2 (RDIV_EXPR, type,
11007 build_real (type, dconst1), tmp);
11008 }
11009 }
11010
11011 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11012 NaNs or Infinities. */
11013 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11014 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11015 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11016 {
11017 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11018 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11019
11020 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11021 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11022 && operand_equal_p (arg00, arg01, 0))
11023 {
11024 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11025
11026 if (cosfn != NULL_TREE)
11027 return build_call_expr (cosfn, 1, arg00);
11028 }
11029 }
11030
11031 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11032 NaNs or Infinities. */
11033 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11034 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11035 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11036 {
11037 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11038 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11039
11040 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11041 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11042 && operand_equal_p (arg00, arg01, 0))
11043 {
11044 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11045
11046 if (cosfn != NULL_TREE)
11047 {
11048 tree tmp = build_call_expr (cosfn, 1, arg00);
11049 return fold_build2 (RDIV_EXPR, type,
11050 build_real (type, dconst1),
11051 tmp);
11052 }
11053 }
11054 }
11055
11056 /* Optimize pow(x,c)/x as pow(x,c-1). */
11057 if (fcode0 == BUILT_IN_POW
11058 || fcode0 == BUILT_IN_POWF
11059 || fcode0 == BUILT_IN_POWL)
11060 {
11061 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11062 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11063 if (TREE_CODE (arg01) == REAL_CST
11064 && !TREE_OVERFLOW (arg01)
11065 && operand_equal_p (arg1, arg00, 0))
11066 {
11067 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11068 REAL_VALUE_TYPE c;
11069 tree arg;
11070
11071 c = TREE_REAL_CST (arg01);
11072 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11073 arg = build_real (type, c);
11074 return build_call_expr (powfn, 2, arg1, arg);
11075 }
11076 }
11077
11078 /* Optimize a/root(b/c) into a*root(c/b). */
11079 if (BUILTIN_ROOT_P (fcode1))
11080 {
11081 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11082
11083 if (TREE_CODE (rootarg) == RDIV_EXPR)
11084 {
11085 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11086 tree b = TREE_OPERAND (rootarg, 0);
11087 tree c = TREE_OPERAND (rootarg, 1);
11088
11089 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11090
11091 tmp = build_call_expr (rootfn, 1, tmp);
11092 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11093 }
11094 }
11095
11096 /* Optimize x/expN(y) into x*expN(-y). */
11097 if (BUILTIN_EXPONENT_P (fcode1))
11098 {
11099 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11100 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11101 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11102 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11103 }
11104
11105 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11106 if (fcode1 == BUILT_IN_POW
11107 || fcode1 == BUILT_IN_POWF
11108 || fcode1 == BUILT_IN_POWL)
11109 {
11110 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11111 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11112 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11113 tree neg11 = fold_convert (type, negate_expr (arg11));
11114 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11115 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11116 }
11117 }
11118 return NULL_TREE;
11119
11120 case TRUNC_DIV_EXPR:
11121 case FLOOR_DIV_EXPR:
11122 /* Simplify A / (B << N) where A and B are positive and B is
11123 a power of 2, to A >> (N + log2(B)). */
11124 strict_overflow_p = false;
11125 if (TREE_CODE (arg1) == LSHIFT_EXPR
11126 && (TYPE_UNSIGNED (type)
11127 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
11128 {
11129 tree sval = TREE_OPERAND (arg1, 0);
11130 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11131 {
11132 tree sh_cnt = TREE_OPERAND (arg1, 1);
11133 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11134
11135 if (strict_overflow_p)
11136 fold_overflow_warning (("assuming signed overflow does not "
11137 "occur when simplifying A / (B << N)"),
11138 WARN_STRICT_OVERFLOW_MISC);
11139
11140 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11141 sh_cnt, build_int_cst (NULL_TREE, pow2));
11142 return fold_build2 (RSHIFT_EXPR, type,
11143 fold_convert (type, arg0), sh_cnt);
11144 }
11145 }
11146 /* Fall thru */
11147
11148 case ROUND_DIV_EXPR:
11149 case CEIL_DIV_EXPR:
11150 case EXACT_DIV_EXPR:
11151 if (integer_onep (arg1))
11152 return non_lvalue (fold_convert (type, arg0));
11153 if (integer_zerop (arg1))
11154 return NULL_TREE;
11155 /* X / -1 is -X. */
11156 if (!TYPE_UNSIGNED (type)
11157 && TREE_CODE (arg1) == INTEGER_CST
11158 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11159 && TREE_INT_CST_HIGH (arg1) == -1)
11160 return fold_convert (type, negate_expr (arg0));
11161
11162 /* Convert -A / -B to A / B when the type is signed and overflow is
11163 undefined. */
11164 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11165 && TREE_CODE (arg0) == NEGATE_EXPR
11166 && negate_expr_p (arg1))
11167 {
11168 if (INTEGRAL_TYPE_P (type))
11169 fold_overflow_warning (("assuming signed overflow does not occur "
11170 "when distributing negation across "
11171 "division"),
11172 WARN_STRICT_OVERFLOW_MISC);
11173 return fold_build2 (code, type,
11174 fold_convert (type, TREE_OPERAND (arg0, 0)),
11175 negate_expr (arg1));
11176 }
11177 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11178 && TREE_CODE (arg1) == NEGATE_EXPR
11179 && negate_expr_p (arg0))
11180 {
11181 if (INTEGRAL_TYPE_P (type))
11182 fold_overflow_warning (("assuming signed overflow does not occur "
11183 "when distributing negation across "
11184 "division"),
11185 WARN_STRICT_OVERFLOW_MISC);
11186 return fold_build2 (code, type, negate_expr (arg0),
11187 TREE_OPERAND (arg1, 0));
11188 }
11189
11190 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11191 operation, EXACT_DIV_EXPR.
11192
11193 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11194 At one time others generated faster code, it's not clear if they do
11195 after the last round to changes to the DIV code in expmed.c. */
11196 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11197 && multiple_of_p (type, arg0, arg1))
11198 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11199
11200 strict_overflow_p = false;
11201 if (TREE_CODE (arg1) == INTEGER_CST
11202 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11203 &strict_overflow_p)))
11204 {
11205 if (strict_overflow_p)
11206 fold_overflow_warning (("assuming signed overflow does not occur "
11207 "when simplifying division"),
11208 WARN_STRICT_OVERFLOW_MISC);
11209 return fold_convert (type, tem);
11210 }
11211
11212 return NULL_TREE;
11213
11214 case CEIL_MOD_EXPR:
11215 case FLOOR_MOD_EXPR:
11216 case ROUND_MOD_EXPR:
11217 case TRUNC_MOD_EXPR:
11218 /* X % 1 is always zero, but be sure to preserve any side
11219 effects in X. */
11220 if (integer_onep (arg1))
11221 return omit_one_operand (type, integer_zero_node, arg0);
11222
11223 /* X % 0, return X % 0 unchanged so that we can get the
11224 proper warnings and errors. */
11225 if (integer_zerop (arg1))
11226 return NULL_TREE;
11227
11228 /* 0 % X is always zero, but be sure to preserve any side
11229 effects in X. Place this after checking for X == 0. */
11230 if (integer_zerop (arg0))
11231 return omit_one_operand (type, integer_zero_node, arg1);
11232
11233 /* X % -1 is zero. */
11234 if (!TYPE_UNSIGNED (type)
11235 && TREE_CODE (arg1) == INTEGER_CST
11236 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11237 && TREE_INT_CST_HIGH (arg1) == -1)
11238 return omit_one_operand (type, integer_zero_node, arg0);
11239
11240 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11241 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11242 strict_overflow_p = false;
11243 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11244 && (TYPE_UNSIGNED (type)
11245 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
11246 {
11247 tree c = arg1;
11248 /* Also optimize A % (C << N) where C is a power of 2,
11249 to A & ((C << N) - 1). */
11250 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11251 c = TREE_OPERAND (arg1, 0);
11252
11253 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11254 {
11255 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11256 build_int_cst (TREE_TYPE (arg1), 1));
11257 if (strict_overflow_p)
11258 fold_overflow_warning (("assuming signed overflow does not "
11259 "occur when simplifying "
11260 "X % (power of two)"),
11261 WARN_STRICT_OVERFLOW_MISC);
11262 return fold_build2 (BIT_AND_EXPR, type,
11263 fold_convert (type, arg0),
11264 fold_convert (type, mask));
11265 }
11266 }
11267
11268 /* X % -C is the same as X % C. */
11269 if (code == TRUNC_MOD_EXPR
11270 && !TYPE_UNSIGNED (type)
11271 && TREE_CODE (arg1) == INTEGER_CST
11272 && !TREE_OVERFLOW (arg1)
11273 && TREE_INT_CST_HIGH (arg1) < 0
11274 && !TYPE_OVERFLOW_TRAPS (type)
11275 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11276 && !sign_bit_p (arg1, arg1))
11277 return fold_build2 (code, type, fold_convert (type, arg0),
11278 fold_convert (type, negate_expr (arg1)));
11279
11280 /* X % -Y is the same as X % Y. */
11281 if (code == TRUNC_MOD_EXPR
11282 && !TYPE_UNSIGNED (type)
11283 && TREE_CODE (arg1) == NEGATE_EXPR
11284 && !TYPE_OVERFLOW_TRAPS (type))
11285 return fold_build2 (code, type, fold_convert (type, arg0),
11286 fold_convert (type, TREE_OPERAND (arg1, 0)));
11287
11288 if (TREE_CODE (arg1) == INTEGER_CST
11289 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11290 &strict_overflow_p)))
11291 {
11292 if (strict_overflow_p)
11293 fold_overflow_warning (("assuming signed overflow does not occur "
11294 "when simplifying modulos"),
11295 WARN_STRICT_OVERFLOW_MISC);
11296 return fold_convert (type, tem);
11297 }
11298
11299 return NULL_TREE;
11300
11301 case LROTATE_EXPR:
11302 case RROTATE_EXPR:
11303 if (integer_all_onesp (arg0))
11304 return omit_one_operand (type, arg0, arg1);
11305 goto shift;
11306
11307 case RSHIFT_EXPR:
11308 /* Optimize -1 >> x for arithmetic right shifts. */
11309 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11310 return omit_one_operand (type, arg0, arg1);
11311 /* ... fall through ... */
11312
11313 case LSHIFT_EXPR:
11314 shift:
11315 if (integer_zerop (arg1))
11316 return non_lvalue (fold_convert (type, arg0));
11317 if (integer_zerop (arg0))
11318 return omit_one_operand (type, arg0, arg1);
11319
11320 /* Since negative shift count is not well-defined,
11321 don't try to compute it in the compiler. */
11322 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11323 return NULL_TREE;
11324
11325 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11326 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11327 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11328 && host_integerp (TREE_OPERAND (arg0, 1), false)
11329 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11330 {
11331 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11332 + TREE_INT_CST_LOW (arg1));
11333
11334 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11335 being well defined. */
11336 if (low >= TYPE_PRECISION (type))
11337 {
11338 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11339 low = low % TYPE_PRECISION (type);
11340 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11341 return build_int_cst (type, 0);
11342 else
11343 low = TYPE_PRECISION (type) - 1;
11344 }
11345
11346 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11347 build_int_cst (type, low));
11348 }
11349
11350 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11351 into x & ((unsigned)-1 >> c) for unsigned types. */
11352 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11353 || (TYPE_UNSIGNED (type)
11354 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11355 && host_integerp (arg1, false)
11356 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11357 && host_integerp (TREE_OPERAND (arg0, 1), false)
11358 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11359 {
11360 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11361 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11362 tree lshift;
11363 tree arg00;
11364
11365 if (low0 == low1)
11366 {
11367 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11368
11369 lshift = build_int_cst (type, -1);
11370 lshift = int_const_binop (code, lshift, arg1, 0);
11371
11372 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11373 }
11374 }
11375
11376 /* Rewrite an LROTATE_EXPR by a constant into an
11377 RROTATE_EXPR by a new constant. */
11378 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11379 {
11380 tree tem = build_int_cst (TREE_TYPE (arg1),
11381 GET_MODE_BITSIZE (TYPE_MODE (type)));
11382 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11383 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
11384 }
11385
11386 /* If we have a rotate of a bit operation with the rotate count and
11387 the second operand of the bit operation both constant,
11388 permute the two operations. */
11389 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11390 && (TREE_CODE (arg0) == BIT_AND_EXPR
11391 || TREE_CODE (arg0) == BIT_IOR_EXPR
11392 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11393 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11394 return fold_build2 (TREE_CODE (arg0), type,
11395 fold_build2 (code, type,
11396 TREE_OPERAND (arg0, 0), arg1),
11397 fold_build2 (code, type,
11398 TREE_OPERAND (arg0, 1), arg1));
11399
11400 /* Two consecutive rotates adding up to the width of the mode can
11401 be ignored. */
11402 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11403 && TREE_CODE (arg0) == RROTATE_EXPR
11404 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11405 && TREE_INT_CST_HIGH (arg1) == 0
11406 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11407 && ((TREE_INT_CST_LOW (arg1)
11408 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11409 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
11410 return TREE_OPERAND (arg0, 0);
11411
11412 return NULL_TREE;
11413
11414 case MIN_EXPR:
11415 if (operand_equal_p (arg0, arg1, 0))
11416 return omit_one_operand (type, arg0, arg1);
11417 if (INTEGRAL_TYPE_P (type)
11418 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11419 return omit_one_operand (type, arg1, arg0);
11420 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11421 if (tem)
11422 return tem;
11423 goto associate;
11424
11425 case MAX_EXPR:
11426 if (operand_equal_p (arg0, arg1, 0))
11427 return omit_one_operand (type, arg0, arg1);
11428 if (INTEGRAL_TYPE_P (type)
11429 && TYPE_MAX_VALUE (type)
11430 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11431 return omit_one_operand (type, arg1, arg0);
11432 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11433 if (tem)
11434 return tem;
11435 goto associate;
11436
11437 case TRUTH_ANDIF_EXPR:
11438 /* Note that the operands of this must be ints
11439 and their values must be 0 or 1.
11440 ("true" is a fixed value perhaps depending on the language.) */
11441 /* If first arg is constant zero, return it. */
11442 if (integer_zerop (arg0))
11443 return fold_convert (type, arg0);
11444 case TRUTH_AND_EXPR:
11445 /* If either arg is constant true, drop it. */
11446 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11447 return non_lvalue (fold_convert (type, arg1));
11448 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11449 /* Preserve sequence points. */
11450 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11451 return non_lvalue (fold_convert (type, arg0));
11452 /* If second arg is constant zero, result is zero, but first arg
11453 must be evaluated. */
11454 if (integer_zerop (arg1))
11455 return omit_one_operand (type, arg1, arg0);
11456 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11457 case will be handled here. */
11458 if (integer_zerop (arg0))
11459 return omit_one_operand (type, arg0, arg1);
11460
11461 /* !X && X is always false. */
11462 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11463 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11464 return omit_one_operand (type, integer_zero_node, arg1);
11465 /* X && !X is always false. */
11466 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11467 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11468 return omit_one_operand (type, integer_zero_node, arg0);
11469
11470 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11471 means A >= Y && A != MAX, but in this case we know that
11472 A < X <= MAX. */
11473
11474 if (!TREE_SIDE_EFFECTS (arg0)
11475 && !TREE_SIDE_EFFECTS (arg1))
11476 {
11477 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11478 if (tem && !operand_equal_p (tem, arg0, 0))
11479 return fold_build2 (code, type, tem, arg1);
11480
11481 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11482 if (tem && !operand_equal_p (tem, arg1, 0))
11483 return fold_build2 (code, type, arg0, tem);
11484 }
11485
11486 truth_andor:
11487 /* We only do these simplifications if we are optimizing. */
11488 if (!optimize)
11489 return NULL_TREE;
11490
11491 /* Check for things like (A || B) && (A || C). We can convert this
11492 to A || (B && C). Note that either operator can be any of the four
11493 truth and/or operations and the transformation will still be
11494 valid. Also note that we only care about order for the
11495 ANDIF and ORIF operators. If B contains side effects, this
11496 might change the truth-value of A. */
11497 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11498 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11499 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11500 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11501 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11502 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11503 {
11504 tree a00 = TREE_OPERAND (arg0, 0);
11505 tree a01 = TREE_OPERAND (arg0, 1);
11506 tree a10 = TREE_OPERAND (arg1, 0);
11507 tree a11 = TREE_OPERAND (arg1, 1);
11508 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11509 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11510 && (code == TRUTH_AND_EXPR
11511 || code == TRUTH_OR_EXPR));
11512
11513 if (operand_equal_p (a00, a10, 0))
11514 return fold_build2 (TREE_CODE (arg0), type, a00,
11515 fold_build2 (code, type, a01, a11));
11516 else if (commutative && operand_equal_p (a00, a11, 0))
11517 return fold_build2 (TREE_CODE (arg0), type, a00,
11518 fold_build2 (code, type, a01, a10));
11519 else if (commutative && operand_equal_p (a01, a10, 0))
11520 return fold_build2 (TREE_CODE (arg0), type, a01,
11521 fold_build2 (code, type, a00, a11));
11522
11523 /* This case if tricky because we must either have commutative
11524 operators or else A10 must not have side-effects. */
11525
11526 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11527 && operand_equal_p (a01, a11, 0))
11528 return fold_build2 (TREE_CODE (arg0), type,
11529 fold_build2 (code, type, a00, a10),
11530 a01);
11531 }
11532
11533 /* See if we can build a range comparison. */
11534 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11535 return tem;
11536
11537 /* Check for the possibility of merging component references. If our
11538 lhs is another similar operation, try to merge its rhs with our
11539 rhs. Then try to merge our lhs and rhs. */
11540 if (TREE_CODE (arg0) == code
11541 && 0 != (tem = fold_truthop (code, type,
11542 TREE_OPERAND (arg0, 1), arg1)))
11543 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11544
11545 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11546 return tem;
11547
11548 return NULL_TREE;
11549
11550 case TRUTH_ORIF_EXPR:
11551 /* Note that the operands of this must be ints
11552 and their values must be 0 or true.
11553 ("true" is a fixed value perhaps depending on the language.) */
11554 /* If first arg is constant true, return it. */
11555 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11556 return fold_convert (type, arg0);
11557 case TRUTH_OR_EXPR:
11558 /* If either arg is constant zero, drop it. */
11559 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11560 return non_lvalue (fold_convert (type, arg1));
11561 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11562 /* Preserve sequence points. */
11563 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11564 return non_lvalue (fold_convert (type, arg0));
11565 /* If second arg is constant true, result is true, but we must
11566 evaluate first arg. */
11567 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11568 return omit_one_operand (type, arg1, arg0);
11569 /* Likewise for first arg, but note this only occurs here for
11570 TRUTH_OR_EXPR. */
11571 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11572 return omit_one_operand (type, arg0, arg1);
11573
11574 /* !X || X is always true. */
11575 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11576 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11577 return omit_one_operand (type, integer_one_node, arg1);
11578 /* X || !X is always true. */
11579 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11580 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11581 return omit_one_operand (type, integer_one_node, arg0);
11582
11583 goto truth_andor;
11584
11585 case TRUTH_XOR_EXPR:
11586 /* If the second arg is constant zero, drop it. */
11587 if (integer_zerop (arg1))
11588 return non_lvalue (fold_convert (type, arg0));
11589 /* If the second arg is constant true, this is a logical inversion. */
11590 if (integer_onep (arg1))
11591 {
11592 /* Only call invert_truthvalue if operand is a truth value. */
11593 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11594 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11595 else
11596 tem = invert_truthvalue (arg0);
11597 return non_lvalue (fold_convert (type, tem));
11598 }
11599 /* Identical arguments cancel to zero. */
11600 if (operand_equal_p (arg0, arg1, 0))
11601 return omit_one_operand (type, integer_zero_node, arg0);
11602
11603 /* !X ^ X is always true. */
11604 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11605 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11606 return omit_one_operand (type, integer_one_node, arg1);
11607
11608 /* X ^ !X is always true. */
11609 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11610 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11611 return omit_one_operand (type, integer_one_node, arg0);
11612
11613 return NULL_TREE;
11614
11615 case EQ_EXPR:
11616 case NE_EXPR:
11617 tem = fold_comparison (code, type, op0, op1);
11618 if (tem != NULL_TREE)
11619 return tem;
11620
11621 /* bool_var != 0 becomes bool_var. */
11622 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11623 && code == NE_EXPR)
11624 return non_lvalue (fold_convert (type, arg0));
11625
11626 /* bool_var == 1 becomes bool_var. */
11627 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11628 && code == EQ_EXPR)
11629 return non_lvalue (fold_convert (type, arg0));
11630
11631 /* bool_var != 1 becomes !bool_var. */
11632 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11633 && code == NE_EXPR)
11634 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11635
11636 /* bool_var == 0 becomes !bool_var. */
11637 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11638 && code == EQ_EXPR)
11639 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11640
11641 /* If this is an equality comparison of the address of two non-weak,
11642 unaliased symbols neither of which are extern (since we do not
11643 have access to attributes for externs), then we know the result. */
11644 if (TREE_CODE (arg0) == ADDR_EXPR
11645 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11646 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11647 && ! lookup_attribute ("alias",
11648 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11649 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11650 && TREE_CODE (arg1) == ADDR_EXPR
11651 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11652 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11653 && ! lookup_attribute ("alias",
11654 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11655 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11656 {
11657 /* We know that we're looking at the address of two
11658 non-weak, unaliased, static _DECL nodes.
11659
11660 It is both wasteful and incorrect to call operand_equal_p
11661 to compare the two ADDR_EXPR nodes. It is wasteful in that
11662 all we need to do is test pointer equality for the arguments
11663 to the two ADDR_EXPR nodes. It is incorrect to use
11664 operand_equal_p as that function is NOT equivalent to a
11665 C equality test. It can in fact return false for two
11666 objects which would test as equal using the C equality
11667 operator. */
11668 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11669 return constant_boolean_node (equal
11670 ? code == EQ_EXPR : code != EQ_EXPR,
11671 type);
11672 }
11673
11674 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11675 a MINUS_EXPR of a constant, we can convert it into a comparison with
11676 a revised constant as long as no overflow occurs. */
11677 if (TREE_CODE (arg1) == INTEGER_CST
11678 && (TREE_CODE (arg0) == PLUS_EXPR
11679 || TREE_CODE (arg0) == MINUS_EXPR)
11680 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11681 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11682 ? MINUS_EXPR : PLUS_EXPR,
11683 fold_convert (TREE_TYPE (arg0), arg1),
11684 TREE_OPERAND (arg0, 1), 0))
11685 && !TREE_OVERFLOW (tem))
11686 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11687
11688 /* Similarly for a NEGATE_EXPR. */
11689 if (TREE_CODE (arg0) == NEGATE_EXPR
11690 && TREE_CODE (arg1) == INTEGER_CST
11691 && 0 != (tem = negate_expr (arg1))
11692 && TREE_CODE (tem) == INTEGER_CST
11693 && !TREE_OVERFLOW (tem))
11694 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11695
11696 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11697 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11698 && TREE_CODE (arg1) == INTEGER_CST
11699 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11700 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11701 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11702 fold_convert (TREE_TYPE (arg0), arg1),
11703 TREE_OPERAND (arg0, 1)));
11704
11705 /* Transform comparisons of the form X +- C CMP X. */
11706 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11707 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11708 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11709 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11710 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11711 {
11712 tree cst = TREE_OPERAND (arg0, 1);
11713
11714 if (code == EQ_EXPR
11715 && !integer_zerop (cst))
11716 return omit_two_operands (type, boolean_false_node,
11717 TREE_OPERAND (arg0, 0), arg1);
11718 else
11719 return omit_two_operands (type, boolean_true_node,
11720 TREE_OPERAND (arg0, 0), arg1);
11721 }
11722
11723 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11724 for !=. Don't do this for ordered comparisons due to overflow. */
11725 if (TREE_CODE (arg0) == MINUS_EXPR
11726 && integer_zerop (arg1))
11727 return fold_build2 (code, type,
11728 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11729
11730 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11731 if (TREE_CODE (arg0) == ABS_EXPR
11732 && (integer_zerop (arg1) || real_zerop (arg1)))
11733 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11734
11735 /* If this is an EQ or NE comparison with zero and ARG0 is
11736 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11737 two operations, but the latter can be done in one less insn
11738 on machines that have only two-operand insns or on which a
11739 constant cannot be the first operand. */
11740 if (TREE_CODE (arg0) == BIT_AND_EXPR
11741 && integer_zerop (arg1))
11742 {
11743 tree arg00 = TREE_OPERAND (arg0, 0);
11744 tree arg01 = TREE_OPERAND (arg0, 1);
11745 if (TREE_CODE (arg00) == LSHIFT_EXPR
11746 && integer_onep (TREE_OPERAND (arg00, 0)))
11747 return
11748 fold_build2 (code, type,
11749 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11750 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11751 arg01, TREE_OPERAND (arg00, 1)),
11752 fold_convert (TREE_TYPE (arg0),
11753 integer_one_node)),
11754 arg1);
11755 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11756 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11757 return
11758 fold_build2 (code, type,
11759 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11760 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11761 arg00, TREE_OPERAND (arg01, 1)),
11762 fold_convert (TREE_TYPE (arg0),
11763 integer_one_node)),
11764 arg1);
11765 }
11766
11767 /* If this is an NE or EQ comparison of zero against the result of a
11768 signed MOD operation whose second operand is a power of 2, make
11769 the MOD operation unsigned since it is simpler and equivalent. */
11770 if (integer_zerop (arg1)
11771 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11772 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11773 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11774 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11775 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11776 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11777 {
11778 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11779 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11780 fold_convert (newtype,
11781 TREE_OPERAND (arg0, 0)),
11782 fold_convert (newtype,
11783 TREE_OPERAND (arg0, 1)));
11784
11785 return fold_build2 (code, type, newmod,
11786 fold_convert (newtype, arg1));
11787 }
11788
11789 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11790 C1 is a valid shift constant, and C2 is a power of two, i.e.
11791 a single bit. */
11792 if (TREE_CODE (arg0) == BIT_AND_EXPR
11793 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11794 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11795 == INTEGER_CST
11796 && integer_pow2p (TREE_OPERAND (arg0, 1))
11797 && integer_zerop (arg1))
11798 {
11799 tree itype = TREE_TYPE (arg0);
11800 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11801 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11802
11803 /* Check for a valid shift count. */
11804 if (TREE_INT_CST_HIGH (arg001) == 0
11805 && TREE_INT_CST_LOW (arg001) < prec)
11806 {
11807 tree arg01 = TREE_OPERAND (arg0, 1);
11808 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11809 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11810 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11811 can be rewritten as (X & (C2 << C1)) != 0. */
11812 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11813 {
11814 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11815 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11816 return fold_build2 (code, type, tem, arg1);
11817 }
11818 /* Otherwise, for signed (arithmetic) shifts,
11819 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11820 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11821 else if (!TYPE_UNSIGNED (itype))
11822 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11823 arg000, build_int_cst (itype, 0));
11824 /* Otherwise, of unsigned (logical) shifts,
11825 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11826 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11827 else
11828 return omit_one_operand (type,
11829 code == EQ_EXPR ? integer_one_node
11830 : integer_zero_node,
11831 arg000);
11832 }
11833 }
11834
11835 /* If this is an NE comparison of zero with an AND of one, remove the
11836 comparison since the AND will give the correct value. */
11837 if (code == NE_EXPR
11838 && integer_zerop (arg1)
11839 && TREE_CODE (arg0) == BIT_AND_EXPR
11840 && integer_onep (TREE_OPERAND (arg0, 1)))
11841 return fold_convert (type, arg0);
11842
11843 /* If we have (A & C) == C where C is a power of 2, convert this into
11844 (A & C) != 0. Similarly for NE_EXPR. */
11845 if (TREE_CODE (arg0) == BIT_AND_EXPR
11846 && integer_pow2p (TREE_OPERAND (arg0, 1))
11847 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11848 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11849 arg0, fold_convert (TREE_TYPE (arg0),
11850 integer_zero_node));
11851
11852 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11853 bit, then fold the expression into A < 0 or A >= 0. */
11854 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11855 if (tem)
11856 return tem;
11857
11858 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11859 Similarly for NE_EXPR. */
11860 if (TREE_CODE (arg0) == BIT_AND_EXPR
11861 && TREE_CODE (arg1) == INTEGER_CST
11862 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11863 {
11864 tree notc = fold_build1 (BIT_NOT_EXPR,
11865 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11866 TREE_OPERAND (arg0, 1));
11867 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11868 arg1, notc);
11869 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11870 if (integer_nonzerop (dandnotc))
11871 return omit_one_operand (type, rslt, arg0);
11872 }
11873
11874 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11875 Similarly for NE_EXPR. */
11876 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11877 && TREE_CODE (arg1) == INTEGER_CST
11878 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11879 {
11880 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11881 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11882 TREE_OPERAND (arg0, 1), notd);
11883 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11884 if (integer_nonzerop (candnotd))
11885 return omit_one_operand (type, rslt, arg0);
11886 }
11887
11888 /* If this is a comparison of a field, we may be able to simplify it. */
11889 if ((TREE_CODE (arg0) == COMPONENT_REF
11890 || TREE_CODE (arg0) == BIT_FIELD_REF)
11891 /* Handle the constant case even without -O
11892 to make sure the warnings are given. */
11893 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11894 {
11895 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11896 if (t1)
11897 return t1;
11898 }
11899
11900 /* Optimize comparisons of strlen vs zero to a compare of the
11901 first character of the string vs zero. To wit,
11902 strlen(ptr) == 0 => *ptr == 0
11903 strlen(ptr) != 0 => *ptr != 0
11904 Other cases should reduce to one of these two (or a constant)
11905 due to the return value of strlen being unsigned. */
11906 if (TREE_CODE (arg0) == CALL_EXPR
11907 && integer_zerop (arg1))
11908 {
11909 tree fndecl = get_callee_fndecl (arg0);
11910
11911 if (fndecl
11912 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11913 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11914 && call_expr_nargs (arg0) == 1
11915 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11916 {
11917 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11918 return fold_build2 (code, type, iref,
11919 build_int_cst (TREE_TYPE (iref), 0));
11920 }
11921 }
11922
11923 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11924 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11925 if (TREE_CODE (arg0) == RSHIFT_EXPR
11926 && integer_zerop (arg1)
11927 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11928 {
11929 tree arg00 = TREE_OPERAND (arg0, 0);
11930 tree arg01 = TREE_OPERAND (arg0, 1);
11931 tree itype = TREE_TYPE (arg00);
11932 if (TREE_INT_CST_HIGH (arg01) == 0
11933 && TREE_INT_CST_LOW (arg01)
11934 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11935 {
11936 if (TYPE_UNSIGNED (itype))
11937 {
11938 itype = signed_type_for (itype);
11939 arg00 = fold_convert (itype, arg00);
11940 }
11941 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11942 type, arg00, build_int_cst (itype, 0));
11943 }
11944 }
11945
11946 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11947 if (integer_zerop (arg1)
11948 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11949 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11950 TREE_OPERAND (arg0, 1));
11951
11952 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11953 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11954 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11955 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11956 build_int_cst (TREE_TYPE (arg1), 0));
11957 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11958 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11959 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11960 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11961 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11962 build_int_cst (TREE_TYPE (arg1), 0));
11963
11964 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11965 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11966 && TREE_CODE (arg1) == INTEGER_CST
11967 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11968 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11969 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11970 TREE_OPERAND (arg0, 1), arg1));
11971
11972 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11973 (X & C) == 0 when C is a single bit. */
11974 if (TREE_CODE (arg0) == BIT_AND_EXPR
11975 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11976 && integer_zerop (arg1)
11977 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11978 {
11979 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11980 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11981 TREE_OPERAND (arg0, 1));
11982 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11983 type, tem, arg1);
11984 }
11985
11986 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11987 constant C is a power of two, i.e. a single bit. */
11988 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11989 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11990 && integer_zerop (arg1)
11991 && integer_pow2p (TREE_OPERAND (arg0, 1))
11992 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11993 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11994 {
11995 tree arg00 = TREE_OPERAND (arg0, 0);
11996 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11997 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11998 }
11999
12000 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12001 when is C is a power of two, i.e. a single bit. */
12002 if (TREE_CODE (arg0) == BIT_AND_EXPR
12003 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12004 && integer_zerop (arg1)
12005 && integer_pow2p (TREE_OPERAND (arg0, 1))
12006 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12007 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12008 {
12009 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12010 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12011 arg000, TREE_OPERAND (arg0, 1));
12012 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12013 tem, build_int_cst (TREE_TYPE (tem), 0));
12014 }
12015
12016 if (integer_zerop (arg1)
12017 && tree_expr_nonzero_p (arg0))
12018 {
12019 tree res = constant_boolean_node (code==NE_EXPR, type);
12020 return omit_one_operand (type, res, arg0);
12021 }
12022
12023 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12024 if (TREE_CODE (arg0) == NEGATE_EXPR
12025 && TREE_CODE (arg1) == NEGATE_EXPR)
12026 return fold_build2 (code, type,
12027 TREE_OPERAND (arg0, 0),
12028 TREE_OPERAND (arg1, 0));
12029
12030 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12031 if (TREE_CODE (arg0) == BIT_AND_EXPR
12032 && TREE_CODE (arg1) == BIT_AND_EXPR)
12033 {
12034 tree arg00 = TREE_OPERAND (arg0, 0);
12035 tree arg01 = TREE_OPERAND (arg0, 1);
12036 tree arg10 = TREE_OPERAND (arg1, 0);
12037 tree arg11 = TREE_OPERAND (arg1, 1);
12038 tree itype = TREE_TYPE (arg0);
12039
12040 if (operand_equal_p (arg01, arg11, 0))
12041 return fold_build2 (code, type,
12042 fold_build2 (BIT_AND_EXPR, itype,
12043 fold_build2 (BIT_XOR_EXPR, itype,
12044 arg00, arg10),
12045 arg01),
12046 build_int_cst (itype, 0));
12047
12048 if (operand_equal_p (arg01, arg10, 0))
12049 return fold_build2 (code, type,
12050 fold_build2 (BIT_AND_EXPR, itype,
12051 fold_build2 (BIT_XOR_EXPR, itype,
12052 arg00, arg11),
12053 arg01),
12054 build_int_cst (itype, 0));
12055
12056 if (operand_equal_p (arg00, arg11, 0))
12057 return fold_build2 (code, type,
12058 fold_build2 (BIT_AND_EXPR, itype,
12059 fold_build2 (BIT_XOR_EXPR, itype,
12060 arg01, arg10),
12061 arg00),
12062 build_int_cst (itype, 0));
12063
12064 if (operand_equal_p (arg00, arg10, 0))
12065 return fold_build2 (code, type,
12066 fold_build2 (BIT_AND_EXPR, itype,
12067 fold_build2 (BIT_XOR_EXPR, itype,
12068 arg01, arg11),
12069 arg00),
12070 build_int_cst (itype, 0));
12071 }
12072
12073 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12074 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12075 {
12076 tree arg00 = TREE_OPERAND (arg0, 0);
12077 tree arg01 = TREE_OPERAND (arg0, 1);
12078 tree arg10 = TREE_OPERAND (arg1, 0);
12079 tree arg11 = TREE_OPERAND (arg1, 1);
12080 tree itype = TREE_TYPE (arg0);
12081
12082 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12083 operand_equal_p guarantees no side-effects so we don't need
12084 to use omit_one_operand on Z. */
12085 if (operand_equal_p (arg01, arg11, 0))
12086 return fold_build2 (code, type, arg00, arg10);
12087 if (operand_equal_p (arg01, arg10, 0))
12088 return fold_build2 (code, type, arg00, arg11);
12089 if (operand_equal_p (arg00, arg11, 0))
12090 return fold_build2 (code, type, arg01, arg10);
12091 if (operand_equal_p (arg00, arg10, 0))
12092 return fold_build2 (code, type, arg01, arg11);
12093
12094 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12095 if (TREE_CODE (arg01) == INTEGER_CST
12096 && TREE_CODE (arg11) == INTEGER_CST)
12097 return fold_build2 (code, type,
12098 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12099 fold_build2 (BIT_XOR_EXPR, itype,
12100 arg01, arg11)),
12101 arg10);
12102 }
12103
12104 /* Attempt to simplify equality/inequality comparisons of complex
12105 values. Only lower the comparison if the result is known or
12106 can be simplified to a single scalar comparison. */
12107 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12108 || TREE_CODE (arg0) == COMPLEX_CST)
12109 && (TREE_CODE (arg1) == COMPLEX_EXPR
12110 || TREE_CODE (arg1) == COMPLEX_CST))
12111 {
12112 tree real0, imag0, real1, imag1;
12113 tree rcond, icond;
12114
12115 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12116 {
12117 real0 = TREE_OPERAND (arg0, 0);
12118 imag0 = TREE_OPERAND (arg0, 1);
12119 }
12120 else
12121 {
12122 real0 = TREE_REALPART (arg0);
12123 imag0 = TREE_IMAGPART (arg0);
12124 }
12125
12126 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12127 {
12128 real1 = TREE_OPERAND (arg1, 0);
12129 imag1 = TREE_OPERAND (arg1, 1);
12130 }
12131 else
12132 {
12133 real1 = TREE_REALPART (arg1);
12134 imag1 = TREE_IMAGPART (arg1);
12135 }
12136
12137 rcond = fold_binary (code, type, real0, real1);
12138 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12139 {
12140 if (integer_zerop (rcond))
12141 {
12142 if (code == EQ_EXPR)
12143 return omit_two_operands (type, boolean_false_node,
12144 imag0, imag1);
12145 return fold_build2 (NE_EXPR, type, imag0, imag1);
12146 }
12147 else
12148 {
12149 if (code == NE_EXPR)
12150 return omit_two_operands (type, boolean_true_node,
12151 imag0, imag1);
12152 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12153 }
12154 }
12155
12156 icond = fold_binary (code, type, imag0, imag1);
12157 if (icond && TREE_CODE (icond) == INTEGER_CST)
12158 {
12159 if (integer_zerop (icond))
12160 {
12161 if (code == EQ_EXPR)
12162 return omit_two_operands (type, boolean_false_node,
12163 real0, real1);
12164 return fold_build2 (NE_EXPR, type, real0, real1);
12165 }
12166 else
12167 {
12168 if (code == NE_EXPR)
12169 return omit_two_operands (type, boolean_true_node,
12170 real0, real1);
12171 return fold_build2 (EQ_EXPR, type, real0, real1);
12172 }
12173 }
12174 }
12175
12176 return NULL_TREE;
12177
12178 case LT_EXPR:
12179 case GT_EXPR:
12180 case LE_EXPR:
12181 case GE_EXPR:
12182 tem = fold_comparison (code, type, op0, op1);
12183 if (tem != NULL_TREE)
12184 return tem;
12185
12186 /* Transform comparisons of the form X +- C CMP X. */
12187 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12188 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12189 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12190 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12191 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12192 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12193 {
12194 tree arg01 = TREE_OPERAND (arg0, 1);
12195 enum tree_code code0 = TREE_CODE (arg0);
12196 int is_positive;
12197
12198 if (TREE_CODE (arg01) == REAL_CST)
12199 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12200 else
12201 is_positive = tree_int_cst_sgn (arg01);
12202
12203 /* (X - c) > X becomes false. */
12204 if (code == GT_EXPR
12205 && ((code0 == MINUS_EXPR && is_positive >= 0)
12206 || (code0 == PLUS_EXPR && is_positive <= 0)))
12207 {
12208 if (TREE_CODE (arg01) == INTEGER_CST
12209 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12210 fold_overflow_warning (("assuming signed overflow does not "
12211 "occur when assuming that (X - c) > X "
12212 "is always false"),
12213 WARN_STRICT_OVERFLOW_ALL);
12214 return constant_boolean_node (0, type);
12215 }
12216
12217 /* Likewise (X + c) < X becomes false. */
12218 if (code == LT_EXPR
12219 && ((code0 == PLUS_EXPR && is_positive >= 0)
12220 || (code0 == MINUS_EXPR && is_positive <= 0)))
12221 {
12222 if (TREE_CODE (arg01) == INTEGER_CST
12223 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12224 fold_overflow_warning (("assuming signed overflow does not "
12225 "occur when assuming that "
12226 "(X + c) < X is always false"),
12227 WARN_STRICT_OVERFLOW_ALL);
12228 return constant_boolean_node (0, type);
12229 }
12230
12231 /* Convert (X - c) <= X to true. */
12232 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12233 && code == LE_EXPR
12234 && ((code0 == MINUS_EXPR && is_positive >= 0)
12235 || (code0 == PLUS_EXPR && is_positive <= 0)))
12236 {
12237 if (TREE_CODE (arg01) == INTEGER_CST
12238 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12239 fold_overflow_warning (("assuming signed overflow does not "
12240 "occur when assuming that "
12241 "(X - c) <= X is always true"),
12242 WARN_STRICT_OVERFLOW_ALL);
12243 return constant_boolean_node (1, type);
12244 }
12245
12246 /* Convert (X + c) >= X to true. */
12247 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12248 && code == GE_EXPR
12249 && ((code0 == PLUS_EXPR && is_positive >= 0)
12250 || (code0 == MINUS_EXPR && is_positive <= 0)))
12251 {
12252 if (TREE_CODE (arg01) == INTEGER_CST
12253 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12254 fold_overflow_warning (("assuming signed overflow does not "
12255 "occur when assuming that "
12256 "(X + c) >= X is always true"),
12257 WARN_STRICT_OVERFLOW_ALL);
12258 return constant_boolean_node (1, type);
12259 }
12260
12261 if (TREE_CODE (arg01) == INTEGER_CST)
12262 {
12263 /* Convert X + c > X and X - c < X to true for integers. */
12264 if (code == GT_EXPR
12265 && ((code0 == PLUS_EXPR && is_positive > 0)
12266 || (code0 == MINUS_EXPR && is_positive < 0)))
12267 {
12268 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12269 fold_overflow_warning (("assuming signed overflow does "
12270 "not occur when assuming that "
12271 "(X + c) > X is always true"),
12272 WARN_STRICT_OVERFLOW_ALL);
12273 return constant_boolean_node (1, type);
12274 }
12275
12276 if (code == LT_EXPR
12277 && ((code0 == MINUS_EXPR && is_positive > 0)
12278 || (code0 == PLUS_EXPR && is_positive < 0)))
12279 {
12280 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12281 fold_overflow_warning (("assuming signed overflow does "
12282 "not occur when assuming that "
12283 "(X - c) < X is always true"),
12284 WARN_STRICT_OVERFLOW_ALL);
12285 return constant_boolean_node (1, type);
12286 }
12287
12288 /* Convert X + c <= X and X - c >= X to false for integers. */
12289 if (code == LE_EXPR
12290 && ((code0 == PLUS_EXPR && is_positive > 0)
12291 || (code0 == MINUS_EXPR && is_positive < 0)))
12292 {
12293 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12294 fold_overflow_warning (("assuming signed overflow does "
12295 "not occur when assuming that "
12296 "(X + c) <= X is always false"),
12297 WARN_STRICT_OVERFLOW_ALL);
12298 return constant_boolean_node (0, type);
12299 }
12300
12301 if (code == GE_EXPR
12302 && ((code0 == MINUS_EXPR && is_positive > 0)
12303 || (code0 == PLUS_EXPR && is_positive < 0)))
12304 {
12305 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12306 fold_overflow_warning (("assuming signed overflow does "
12307 "not occur when assuming that "
12308 "(X - c) >= X is always false"),
12309 WARN_STRICT_OVERFLOW_ALL);
12310 return constant_boolean_node (0, type);
12311 }
12312 }
12313 }
12314
12315 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12316 This transformation affects the cases which are handled in later
12317 optimizations involving comparisons with non-negative constants. */
12318 if (TREE_CODE (arg1) == INTEGER_CST
12319 && TREE_CODE (arg0) != INTEGER_CST
12320 && tree_int_cst_sgn (arg1) > 0)
12321 {
12322 if (code == GE_EXPR)
12323 {
12324 arg1 = const_binop (MINUS_EXPR, arg1,
12325 build_int_cst (TREE_TYPE (arg1), 1), 0);
12326 return fold_build2 (GT_EXPR, type, arg0,
12327 fold_convert (TREE_TYPE (arg0), arg1));
12328 }
12329 if (code == LT_EXPR)
12330 {
12331 arg1 = const_binop (MINUS_EXPR, arg1,
12332 build_int_cst (TREE_TYPE (arg1), 1), 0);
12333 return fold_build2 (LE_EXPR, type, arg0,
12334 fold_convert (TREE_TYPE (arg0), arg1));
12335 }
12336 }
12337
12338 /* Comparisons with the highest or lowest possible integer of
12339 the specified precision will have known values. */
12340 {
12341 tree arg1_type = TREE_TYPE (arg1);
12342 unsigned int width = TYPE_PRECISION (arg1_type);
12343
12344 if (TREE_CODE (arg1) == INTEGER_CST
12345 && !TREE_OVERFLOW (arg1)
12346 && width <= 2 * HOST_BITS_PER_WIDE_INT
12347 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12348 {
12349 HOST_WIDE_INT signed_max_hi;
12350 unsigned HOST_WIDE_INT signed_max_lo;
12351 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12352
12353 if (width <= HOST_BITS_PER_WIDE_INT)
12354 {
12355 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12356 - 1;
12357 signed_max_hi = 0;
12358 max_hi = 0;
12359
12360 if (TYPE_UNSIGNED (arg1_type))
12361 {
12362 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12363 min_lo = 0;
12364 min_hi = 0;
12365 }
12366 else
12367 {
12368 max_lo = signed_max_lo;
12369 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12370 min_hi = -1;
12371 }
12372 }
12373 else
12374 {
12375 width -= HOST_BITS_PER_WIDE_INT;
12376 signed_max_lo = -1;
12377 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12378 - 1;
12379 max_lo = -1;
12380 min_lo = 0;
12381
12382 if (TYPE_UNSIGNED (arg1_type))
12383 {
12384 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12385 min_hi = 0;
12386 }
12387 else
12388 {
12389 max_hi = signed_max_hi;
12390 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12391 }
12392 }
12393
12394 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12395 && TREE_INT_CST_LOW (arg1) == max_lo)
12396 switch (code)
12397 {
12398 case GT_EXPR:
12399 return omit_one_operand (type, integer_zero_node, arg0);
12400
12401 case GE_EXPR:
12402 return fold_build2 (EQ_EXPR, type, op0, op1);
12403
12404 case LE_EXPR:
12405 return omit_one_operand (type, integer_one_node, arg0);
12406
12407 case LT_EXPR:
12408 return fold_build2 (NE_EXPR, type, op0, op1);
12409
12410 /* The GE_EXPR and LT_EXPR cases above are not normally
12411 reached because of previous transformations. */
12412
12413 default:
12414 break;
12415 }
12416 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12417 == max_hi
12418 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12419 switch (code)
12420 {
12421 case GT_EXPR:
12422 arg1 = const_binop (PLUS_EXPR, arg1,
12423 build_int_cst (TREE_TYPE (arg1), 1), 0);
12424 return fold_build2 (EQ_EXPR, type,
12425 fold_convert (TREE_TYPE (arg1), arg0),
12426 arg1);
12427 case LE_EXPR:
12428 arg1 = const_binop (PLUS_EXPR, arg1,
12429 build_int_cst (TREE_TYPE (arg1), 1), 0);
12430 return fold_build2 (NE_EXPR, type,
12431 fold_convert (TREE_TYPE (arg1), arg0),
12432 arg1);
12433 default:
12434 break;
12435 }
12436 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12437 == min_hi
12438 && TREE_INT_CST_LOW (arg1) == min_lo)
12439 switch (code)
12440 {
12441 case LT_EXPR:
12442 return omit_one_operand (type, integer_zero_node, arg0);
12443
12444 case LE_EXPR:
12445 return fold_build2 (EQ_EXPR, type, op0, op1);
12446
12447 case GE_EXPR:
12448 return omit_one_operand (type, integer_one_node, arg0);
12449
12450 case GT_EXPR:
12451 return fold_build2 (NE_EXPR, type, op0, op1);
12452
12453 default:
12454 break;
12455 }
12456 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12457 == min_hi
12458 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12459 switch (code)
12460 {
12461 case GE_EXPR:
12462 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12463 return fold_build2 (NE_EXPR, type,
12464 fold_convert (TREE_TYPE (arg1), arg0),
12465 arg1);
12466 case LT_EXPR:
12467 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12468 return fold_build2 (EQ_EXPR, type,
12469 fold_convert (TREE_TYPE (arg1), arg0),
12470 arg1);
12471 default:
12472 break;
12473 }
12474
12475 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12476 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12477 && TYPE_UNSIGNED (arg1_type)
12478 /* We will flip the signedness of the comparison operator
12479 associated with the mode of arg1, so the sign bit is
12480 specified by this mode. Check that arg1 is the signed
12481 max associated with this sign bit. */
12482 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12483 /* signed_type does not work on pointer types. */
12484 && INTEGRAL_TYPE_P (arg1_type))
12485 {
12486 /* The following case also applies to X < signed_max+1
12487 and X >= signed_max+1 because previous transformations. */
12488 if (code == LE_EXPR || code == GT_EXPR)
12489 {
12490 tree st;
12491 st = signed_type_for (TREE_TYPE (arg1));
12492 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12493 type, fold_convert (st, arg0),
12494 build_int_cst (st, 0));
12495 }
12496 }
12497 }
12498 }
12499
12500 /* If we are comparing an ABS_EXPR with a constant, we can
12501 convert all the cases into explicit comparisons, but they may
12502 well not be faster than doing the ABS and one comparison.
12503 But ABS (X) <= C is a range comparison, which becomes a subtraction
12504 and a comparison, and is probably faster. */
12505 if (code == LE_EXPR
12506 && TREE_CODE (arg1) == INTEGER_CST
12507 && TREE_CODE (arg0) == ABS_EXPR
12508 && ! TREE_SIDE_EFFECTS (arg0)
12509 && (0 != (tem = negate_expr (arg1)))
12510 && TREE_CODE (tem) == INTEGER_CST
12511 && !TREE_OVERFLOW (tem))
12512 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12513 build2 (GE_EXPR, type,
12514 TREE_OPERAND (arg0, 0), tem),
12515 build2 (LE_EXPR, type,
12516 TREE_OPERAND (arg0, 0), arg1));
12517
12518 /* Convert ABS_EXPR<x> >= 0 to true. */
12519 strict_overflow_p = false;
12520 if (code == GE_EXPR
12521 && (integer_zerop (arg1)
12522 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12523 && real_zerop (arg1)))
12524 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12525 {
12526 if (strict_overflow_p)
12527 fold_overflow_warning (("assuming signed overflow does not occur "
12528 "when simplifying comparison of "
12529 "absolute value and zero"),
12530 WARN_STRICT_OVERFLOW_CONDITIONAL);
12531 return omit_one_operand (type, integer_one_node, arg0);
12532 }
12533
12534 /* Convert ABS_EXPR<x> < 0 to false. */
12535 strict_overflow_p = false;
12536 if (code == LT_EXPR
12537 && (integer_zerop (arg1) || real_zerop (arg1))
12538 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12539 {
12540 if (strict_overflow_p)
12541 fold_overflow_warning (("assuming signed overflow does not occur "
12542 "when simplifying comparison of "
12543 "absolute value and zero"),
12544 WARN_STRICT_OVERFLOW_CONDITIONAL);
12545 return omit_one_operand (type, integer_zero_node, arg0);
12546 }
12547
12548 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12549 and similarly for >= into !=. */
12550 if ((code == LT_EXPR || code == GE_EXPR)
12551 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12552 && TREE_CODE (arg1) == LSHIFT_EXPR
12553 && integer_onep (TREE_OPERAND (arg1, 0)))
12554 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12555 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12556 TREE_OPERAND (arg1, 1)),
12557 build_int_cst (TREE_TYPE (arg0), 0));
12558
12559 if ((code == LT_EXPR || code == GE_EXPR)
12560 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12561 && (TREE_CODE (arg1) == NOP_EXPR
12562 || TREE_CODE (arg1) == CONVERT_EXPR)
12563 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12564 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12565 return
12566 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12567 fold_convert (TREE_TYPE (arg0),
12568 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12569 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12570 1))),
12571 build_int_cst (TREE_TYPE (arg0), 0));
12572
12573 return NULL_TREE;
12574
12575 case UNORDERED_EXPR:
12576 case ORDERED_EXPR:
12577 case UNLT_EXPR:
12578 case UNLE_EXPR:
12579 case UNGT_EXPR:
12580 case UNGE_EXPR:
12581 case UNEQ_EXPR:
12582 case LTGT_EXPR:
12583 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12584 {
12585 t1 = fold_relational_const (code, type, arg0, arg1);
12586 if (t1 != NULL_TREE)
12587 return t1;
12588 }
12589
12590 /* If the first operand is NaN, the result is constant. */
12591 if (TREE_CODE (arg0) == REAL_CST
12592 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12593 && (code != LTGT_EXPR || ! flag_trapping_math))
12594 {
12595 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12596 ? integer_zero_node
12597 : integer_one_node;
12598 return omit_one_operand (type, t1, arg1);
12599 }
12600
12601 /* If the second operand is NaN, the result is constant. */
12602 if (TREE_CODE (arg1) == REAL_CST
12603 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12604 && (code != LTGT_EXPR || ! flag_trapping_math))
12605 {
12606 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12607 ? integer_zero_node
12608 : integer_one_node;
12609 return omit_one_operand (type, t1, arg0);
12610 }
12611
12612 /* Simplify unordered comparison of something with itself. */
12613 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12614 && operand_equal_p (arg0, arg1, 0))
12615 return constant_boolean_node (1, type);
12616
12617 if (code == LTGT_EXPR
12618 && !flag_trapping_math
12619 && operand_equal_p (arg0, arg1, 0))
12620 return constant_boolean_node (0, type);
12621
12622 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12623 {
12624 tree targ0 = strip_float_extensions (arg0);
12625 tree targ1 = strip_float_extensions (arg1);
12626 tree newtype = TREE_TYPE (targ0);
12627
12628 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12629 newtype = TREE_TYPE (targ1);
12630
12631 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12632 return fold_build2 (code, type, fold_convert (newtype, targ0),
12633 fold_convert (newtype, targ1));
12634 }
12635
12636 return NULL_TREE;
12637
12638 case COMPOUND_EXPR:
12639 /* When pedantic, a compound expression can be neither an lvalue
12640 nor an integer constant expression. */
12641 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12642 return NULL_TREE;
12643 /* Don't let (0, 0) be null pointer constant. */
12644 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12645 : fold_convert (type, arg1);
12646 return pedantic_non_lvalue (tem);
12647
12648 case COMPLEX_EXPR:
12649 if ((TREE_CODE (arg0) == REAL_CST
12650 && TREE_CODE (arg1) == REAL_CST)
12651 || (TREE_CODE (arg0) == INTEGER_CST
12652 && TREE_CODE (arg1) == INTEGER_CST))
12653 return build_complex (type, arg0, arg1);
12654 return NULL_TREE;
12655
12656 case ASSERT_EXPR:
12657 /* An ASSERT_EXPR should never be passed to fold_binary. */
12658 gcc_unreachable ();
12659
12660 default:
12661 return NULL_TREE;
12662 } /* switch (code) */
12663 }
12664
12665 /* Callback for walk_tree, looking for LABEL_EXPR.
12666 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12667 Do not check the sub-tree of GOTO_EXPR. */
12668
12669 static tree
12670 contains_label_1 (tree *tp,
12671 int *walk_subtrees,
12672 void *data ATTRIBUTE_UNUSED)
12673 {
12674 switch (TREE_CODE (*tp))
12675 {
12676 case LABEL_EXPR:
12677 return *tp;
12678 case GOTO_EXPR:
12679 *walk_subtrees = 0;
12680 /* no break */
12681 default:
12682 return NULL_TREE;
12683 }
12684 }
12685
12686 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12687 accessible from outside the sub-tree. Returns NULL_TREE if no
12688 addressable label is found. */
12689
12690 static bool
12691 contains_label_p (tree st)
12692 {
12693 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12694 }
12695
12696 /* Fold a ternary expression of code CODE and type TYPE with operands
12697 OP0, OP1, and OP2. Return the folded expression if folding is
12698 successful. Otherwise, return NULL_TREE. */
12699
12700 tree
12701 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12702 {
12703 tree tem;
12704 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12705 enum tree_code_class kind = TREE_CODE_CLASS (code);
12706
12707 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12708 && TREE_CODE_LENGTH (code) == 3);
12709
12710 /* Strip any conversions that don't change the mode. This is safe
12711 for every expression, except for a comparison expression because
12712 its signedness is derived from its operands. So, in the latter
12713 case, only strip conversions that don't change the signedness.
12714
12715 Note that this is done as an internal manipulation within the
12716 constant folder, in order to find the simplest representation of
12717 the arguments so that their form can be studied. In any cases,
12718 the appropriate type conversions should be put back in the tree
12719 that will get out of the constant folder. */
12720 if (op0)
12721 {
12722 arg0 = op0;
12723 STRIP_NOPS (arg0);
12724 }
12725
12726 if (op1)
12727 {
12728 arg1 = op1;
12729 STRIP_NOPS (arg1);
12730 }
12731
12732 switch (code)
12733 {
12734 case COMPONENT_REF:
12735 if (TREE_CODE (arg0) == CONSTRUCTOR
12736 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12737 {
12738 unsigned HOST_WIDE_INT idx;
12739 tree field, value;
12740 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12741 if (field == arg1)
12742 return value;
12743 }
12744 return NULL_TREE;
12745
12746 case COND_EXPR:
12747 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12748 so all simple results must be passed through pedantic_non_lvalue. */
12749 if (TREE_CODE (arg0) == INTEGER_CST)
12750 {
12751 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12752 tem = integer_zerop (arg0) ? op2 : op1;
12753 /* Only optimize constant conditions when the selected branch
12754 has the same type as the COND_EXPR. This avoids optimizing
12755 away "c ? x : throw", where the throw has a void type.
12756 Avoid throwing away that operand which contains label. */
12757 if ((!TREE_SIDE_EFFECTS (unused_op)
12758 || !contains_label_p (unused_op))
12759 && (! VOID_TYPE_P (TREE_TYPE (tem))
12760 || VOID_TYPE_P (type)))
12761 return pedantic_non_lvalue (tem);
12762 return NULL_TREE;
12763 }
12764 if (operand_equal_p (arg1, op2, 0))
12765 return pedantic_omit_one_operand (type, arg1, arg0);
12766
12767 /* If we have A op B ? A : C, we may be able to convert this to a
12768 simpler expression, depending on the operation and the values
12769 of B and C. Signed zeros prevent all of these transformations,
12770 for reasons given above each one.
12771
12772 Also try swapping the arguments and inverting the conditional. */
12773 if (COMPARISON_CLASS_P (arg0)
12774 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12775 arg1, TREE_OPERAND (arg0, 1))
12776 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12777 {
12778 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12779 if (tem)
12780 return tem;
12781 }
12782
12783 if (COMPARISON_CLASS_P (arg0)
12784 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12785 op2,
12786 TREE_OPERAND (arg0, 1))
12787 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12788 {
12789 tem = fold_truth_not_expr (arg0);
12790 if (tem && COMPARISON_CLASS_P (tem))
12791 {
12792 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12793 if (tem)
12794 return tem;
12795 }
12796 }
12797
12798 /* If the second operand is simpler than the third, swap them
12799 since that produces better jump optimization results. */
12800 if (truth_value_p (TREE_CODE (arg0))
12801 && tree_swap_operands_p (op1, op2, false))
12802 {
12803 /* See if this can be inverted. If it can't, possibly because
12804 it was a floating-point inequality comparison, don't do
12805 anything. */
12806 tem = fold_truth_not_expr (arg0);
12807 if (tem)
12808 return fold_build3 (code, type, tem, op2, op1);
12809 }
12810
12811 /* Convert A ? 1 : 0 to simply A. */
12812 if (integer_onep (op1)
12813 && integer_zerop (op2)
12814 /* If we try to convert OP0 to our type, the
12815 call to fold will try to move the conversion inside
12816 a COND, which will recurse. In that case, the COND_EXPR
12817 is probably the best choice, so leave it alone. */
12818 && type == TREE_TYPE (arg0))
12819 return pedantic_non_lvalue (arg0);
12820
12821 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12822 over COND_EXPR in cases such as floating point comparisons. */
12823 if (integer_zerop (op1)
12824 && integer_onep (op2)
12825 && truth_value_p (TREE_CODE (arg0)))
12826 return pedantic_non_lvalue (fold_convert (type,
12827 invert_truthvalue (arg0)));
12828
12829 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12830 if (TREE_CODE (arg0) == LT_EXPR
12831 && integer_zerop (TREE_OPERAND (arg0, 1))
12832 && integer_zerop (op2)
12833 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12834 {
12835 /* sign_bit_p only checks ARG1 bits within A's precision.
12836 If <sign bit of A> has wider type than A, bits outside
12837 of A's precision in <sign bit of A> need to be checked.
12838 If they are all 0, this optimization needs to be done
12839 in unsigned A's type, if they are all 1 in signed A's type,
12840 otherwise this can't be done. */
12841 if (TYPE_PRECISION (TREE_TYPE (tem))
12842 < TYPE_PRECISION (TREE_TYPE (arg1))
12843 && TYPE_PRECISION (TREE_TYPE (tem))
12844 < TYPE_PRECISION (type))
12845 {
12846 unsigned HOST_WIDE_INT mask_lo;
12847 HOST_WIDE_INT mask_hi;
12848 int inner_width, outer_width;
12849 tree tem_type;
12850
12851 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12852 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12853 if (outer_width > TYPE_PRECISION (type))
12854 outer_width = TYPE_PRECISION (type);
12855
12856 if (outer_width > HOST_BITS_PER_WIDE_INT)
12857 {
12858 mask_hi = ((unsigned HOST_WIDE_INT) -1
12859 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12860 mask_lo = -1;
12861 }
12862 else
12863 {
12864 mask_hi = 0;
12865 mask_lo = ((unsigned HOST_WIDE_INT) -1
12866 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12867 }
12868 if (inner_width > HOST_BITS_PER_WIDE_INT)
12869 {
12870 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12871 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12872 mask_lo = 0;
12873 }
12874 else
12875 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12876 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12877
12878 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12879 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12880 {
12881 tem_type = signed_type_for (TREE_TYPE (tem));
12882 tem = fold_convert (tem_type, tem);
12883 }
12884 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12885 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12886 {
12887 tem_type = unsigned_type_for (TREE_TYPE (tem));
12888 tem = fold_convert (tem_type, tem);
12889 }
12890 else
12891 tem = NULL;
12892 }
12893
12894 if (tem)
12895 return fold_convert (type,
12896 fold_build2 (BIT_AND_EXPR,
12897 TREE_TYPE (tem), tem,
12898 fold_convert (TREE_TYPE (tem),
12899 arg1)));
12900 }
12901
12902 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12903 already handled above. */
12904 if (TREE_CODE (arg0) == BIT_AND_EXPR
12905 && integer_onep (TREE_OPERAND (arg0, 1))
12906 && integer_zerop (op2)
12907 && integer_pow2p (arg1))
12908 {
12909 tree tem = TREE_OPERAND (arg0, 0);
12910 STRIP_NOPS (tem);
12911 if (TREE_CODE (tem) == RSHIFT_EXPR
12912 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12913 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12914 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12915 return fold_build2 (BIT_AND_EXPR, type,
12916 TREE_OPERAND (tem, 0), arg1);
12917 }
12918
12919 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12920 is probably obsolete because the first operand should be a
12921 truth value (that's why we have the two cases above), but let's
12922 leave it in until we can confirm this for all front-ends. */
12923 if (integer_zerop (op2)
12924 && TREE_CODE (arg0) == NE_EXPR
12925 && integer_zerop (TREE_OPERAND (arg0, 1))
12926 && integer_pow2p (arg1)
12927 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12928 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12929 arg1, OEP_ONLY_CONST))
12930 return pedantic_non_lvalue (fold_convert (type,
12931 TREE_OPERAND (arg0, 0)));
12932
12933 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12934 if (integer_zerop (op2)
12935 && truth_value_p (TREE_CODE (arg0))
12936 && truth_value_p (TREE_CODE (arg1)))
12937 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12938 fold_convert (type, arg0),
12939 arg1);
12940
12941 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12942 if (integer_onep (op2)
12943 && truth_value_p (TREE_CODE (arg0))
12944 && truth_value_p (TREE_CODE (arg1)))
12945 {
12946 /* Only perform transformation if ARG0 is easily inverted. */
12947 tem = fold_truth_not_expr (arg0);
12948 if (tem)
12949 return fold_build2 (TRUTH_ORIF_EXPR, type,
12950 fold_convert (type, tem),
12951 arg1);
12952 }
12953
12954 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12955 if (integer_zerop (arg1)
12956 && truth_value_p (TREE_CODE (arg0))
12957 && truth_value_p (TREE_CODE (op2)))
12958 {
12959 /* Only perform transformation if ARG0 is easily inverted. */
12960 tem = fold_truth_not_expr (arg0);
12961 if (tem)
12962 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12963 fold_convert (type, tem),
12964 op2);
12965 }
12966
12967 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12968 if (integer_onep (arg1)
12969 && truth_value_p (TREE_CODE (arg0))
12970 && truth_value_p (TREE_CODE (op2)))
12971 return fold_build2 (TRUTH_ORIF_EXPR, type,
12972 fold_convert (type, arg0),
12973 op2);
12974
12975 return NULL_TREE;
12976
12977 case CALL_EXPR:
12978 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12979 of fold_ternary on them. */
12980 gcc_unreachable ();
12981
12982 case BIT_FIELD_REF:
12983 if ((TREE_CODE (arg0) == VECTOR_CST
12984 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
12985 && type == TREE_TYPE (TREE_TYPE (arg0))
12986 && host_integerp (arg1, 1)
12987 && host_integerp (op2, 1))
12988 {
12989 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12990 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12991
12992 if (width != 0
12993 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12994 && (idx % width) == 0
12995 && (idx = idx / width)
12996 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12997 {
12998 tree elements = NULL_TREE;
12999
13000 if (TREE_CODE (arg0) == VECTOR_CST)
13001 elements = TREE_VECTOR_CST_ELTS (arg0);
13002 else
13003 {
13004 unsigned HOST_WIDE_INT idx;
13005 tree value;
13006
13007 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13008 elements = tree_cons (NULL_TREE, value, elements);
13009 }
13010 while (idx-- > 0 && elements)
13011 elements = TREE_CHAIN (elements);
13012 if (elements)
13013 return TREE_VALUE (elements);
13014 else
13015 return fold_convert (type, integer_zero_node);
13016 }
13017 }
13018 return NULL_TREE;
13019
13020 default:
13021 return NULL_TREE;
13022 } /* switch (code) */
13023 }
13024
13025 /* Perform constant folding and related simplification of EXPR.
13026 The related simplifications include x*1 => x, x*0 => 0, etc.,
13027 and application of the associative law.
13028 NOP_EXPR conversions may be removed freely (as long as we
13029 are careful not to change the type of the overall expression).
13030 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13031 but we can constant-fold them if they have constant operands. */
13032
13033 #ifdef ENABLE_FOLD_CHECKING
13034 # define fold(x) fold_1 (x)
13035 static tree fold_1 (tree);
13036 static
13037 #endif
13038 tree
13039 fold (tree expr)
13040 {
13041 const tree t = expr;
13042 enum tree_code code = TREE_CODE (t);
13043 enum tree_code_class kind = TREE_CODE_CLASS (code);
13044 tree tem;
13045
13046 /* Return right away if a constant. */
13047 if (kind == tcc_constant)
13048 return t;
13049
13050 /* CALL_EXPR-like objects with variable numbers of operands are
13051 treated specially. */
13052 if (kind == tcc_vl_exp)
13053 {
13054 if (code == CALL_EXPR)
13055 {
13056 tem = fold_call_expr (expr, false);
13057 return tem ? tem : expr;
13058 }
13059 return expr;
13060 }
13061
13062 if (IS_EXPR_CODE_CLASS (kind)
13063 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13064 {
13065 tree type = TREE_TYPE (t);
13066 tree op0, op1, op2;
13067
13068 switch (TREE_CODE_LENGTH (code))
13069 {
13070 case 1:
13071 op0 = TREE_OPERAND (t, 0);
13072 tem = fold_unary (code, type, op0);
13073 return tem ? tem : expr;
13074 case 2:
13075 op0 = TREE_OPERAND (t, 0);
13076 op1 = TREE_OPERAND (t, 1);
13077 tem = fold_binary (code, type, op0, op1);
13078 return tem ? tem : expr;
13079 case 3:
13080 op0 = TREE_OPERAND (t, 0);
13081 op1 = TREE_OPERAND (t, 1);
13082 op2 = TREE_OPERAND (t, 2);
13083 tem = fold_ternary (code, type, op0, op1, op2);
13084 return tem ? tem : expr;
13085 default:
13086 break;
13087 }
13088 }
13089
13090 switch (code)
13091 {
13092 case CONST_DECL:
13093 return fold (DECL_INITIAL (t));
13094
13095 default:
13096 return t;
13097 } /* switch (code) */
13098 }
13099
13100 #ifdef ENABLE_FOLD_CHECKING
13101 #undef fold
13102
13103 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
13104 static void fold_check_failed (tree, tree);
13105 void print_fold_checksum (tree);
13106
13107 /* When --enable-checking=fold, compute a digest of expr before
13108 and after actual fold call to see if fold did not accidentally
13109 change original expr. */
13110
13111 tree
13112 fold (tree expr)
13113 {
13114 tree ret;
13115 struct md5_ctx ctx;
13116 unsigned char checksum_before[16], checksum_after[16];
13117 htab_t ht;
13118
13119 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13120 md5_init_ctx (&ctx);
13121 fold_checksum_tree (expr, &ctx, ht);
13122 md5_finish_ctx (&ctx, checksum_before);
13123 htab_empty (ht);
13124
13125 ret = fold_1 (expr);
13126
13127 md5_init_ctx (&ctx);
13128 fold_checksum_tree (expr, &ctx, ht);
13129 md5_finish_ctx (&ctx, checksum_after);
13130 htab_delete (ht);
13131
13132 if (memcmp (checksum_before, checksum_after, 16))
13133 fold_check_failed (expr, ret);
13134
13135 return ret;
13136 }
13137
13138 void
13139 print_fold_checksum (tree expr)
13140 {
13141 struct md5_ctx ctx;
13142 unsigned char checksum[16], cnt;
13143 htab_t ht;
13144
13145 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13146 md5_init_ctx (&ctx);
13147 fold_checksum_tree (expr, &ctx, ht);
13148 md5_finish_ctx (&ctx, checksum);
13149 htab_delete (ht);
13150 for (cnt = 0; cnt < 16; ++cnt)
13151 fprintf (stderr, "%02x", checksum[cnt]);
13152 putc ('\n', stderr);
13153 }
13154
13155 static void
13156 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
13157 {
13158 internal_error ("fold check: original tree changed by fold");
13159 }
13160
13161 static void
13162 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
13163 {
13164 void **slot;
13165 enum tree_code code;
13166 struct tree_function_decl buf;
13167 int i, len;
13168
13169 recursive_label:
13170
13171 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13172 <= sizeof (struct tree_function_decl))
13173 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13174 if (expr == NULL)
13175 return;
13176 slot = htab_find_slot (ht, expr, INSERT);
13177 if (*slot != NULL)
13178 return;
13179 *slot = expr;
13180 code = TREE_CODE (expr);
13181 if (TREE_CODE_CLASS (code) == tcc_declaration
13182 && DECL_ASSEMBLER_NAME_SET_P (expr))
13183 {
13184 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13185 memcpy ((char *) &buf, expr, tree_size (expr));
13186 expr = (tree) &buf;
13187 SET_DECL_ASSEMBLER_NAME (expr, NULL);
13188 }
13189 else if (TREE_CODE_CLASS (code) == tcc_type
13190 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13191 || TYPE_CACHED_VALUES_P (expr)
13192 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13193 {
13194 /* Allow these fields to be modified. */
13195 memcpy ((char *) &buf, expr, tree_size (expr));
13196 expr = (tree) &buf;
13197 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
13198 TYPE_POINTER_TO (expr) = NULL;
13199 TYPE_REFERENCE_TO (expr) = NULL;
13200 if (TYPE_CACHED_VALUES_P (expr))
13201 {
13202 TYPE_CACHED_VALUES_P (expr) = 0;
13203 TYPE_CACHED_VALUES (expr) = NULL;
13204 }
13205 }
13206 md5_process_bytes (expr, tree_size (expr), ctx);
13207 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13208 if (TREE_CODE_CLASS (code) != tcc_type
13209 && TREE_CODE_CLASS (code) != tcc_declaration
13210 && code != TREE_LIST
13211 && code != SSA_NAME)
13212 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13213 switch (TREE_CODE_CLASS (code))
13214 {
13215 case tcc_constant:
13216 switch (code)
13217 {
13218 case STRING_CST:
13219 md5_process_bytes (TREE_STRING_POINTER (expr),
13220 TREE_STRING_LENGTH (expr), ctx);
13221 break;
13222 case COMPLEX_CST:
13223 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13224 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13225 break;
13226 case VECTOR_CST:
13227 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13228 break;
13229 default:
13230 break;
13231 }
13232 break;
13233 case tcc_exceptional:
13234 switch (code)
13235 {
13236 case TREE_LIST:
13237 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13238 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13239 expr = TREE_CHAIN (expr);
13240 goto recursive_label;
13241 break;
13242 case TREE_VEC:
13243 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13244 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13245 break;
13246 default:
13247 break;
13248 }
13249 break;
13250 case tcc_expression:
13251 case tcc_reference:
13252 case tcc_comparison:
13253 case tcc_unary:
13254 case tcc_binary:
13255 case tcc_statement:
13256 case tcc_vl_exp:
13257 len = TREE_OPERAND_LENGTH (expr);
13258 for (i = 0; i < len; ++i)
13259 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13260 break;
13261 case tcc_declaration:
13262 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13263 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13264 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13265 {
13266 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13267 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13268 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13269 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13270 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13271 }
13272 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13273 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13274
13275 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13276 {
13277 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13278 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13279 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13280 }
13281 break;
13282 case tcc_type:
13283 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13284 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13285 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13286 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13287 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13288 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13289 if (INTEGRAL_TYPE_P (expr)
13290 || SCALAR_FLOAT_TYPE_P (expr))
13291 {
13292 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13293 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13294 }
13295 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13296 if (TREE_CODE (expr) == RECORD_TYPE
13297 || TREE_CODE (expr) == UNION_TYPE
13298 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13299 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13300 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13301 break;
13302 default:
13303 break;
13304 }
13305 }
13306
13307 /* Helper function for outputting the checksum of a tree T. When
13308 debugging with gdb, you can "define mynext" to be "next" followed
13309 by "call debug_fold_checksum (op0)", then just trace down till the
13310 outputs differ. */
13311
13312 void
13313 debug_fold_checksum (tree t)
13314 {
13315 int i;
13316 unsigned char checksum[16];
13317 struct md5_ctx ctx;
13318 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13319
13320 md5_init_ctx (&ctx);
13321 fold_checksum_tree (t, &ctx, ht);
13322 md5_finish_ctx (&ctx, checksum);
13323 htab_empty (ht);
13324
13325 for (i = 0; i < 16; i++)
13326 fprintf (stderr, "%d ", checksum[i]);
13327
13328 fprintf (stderr, "\n");
13329 }
13330
13331 #endif
13332
13333 /* Fold a unary tree expression with code CODE of type TYPE with an
13334 operand OP0. Return a folded expression if successful. Otherwise,
13335 return a tree expression with code CODE of type TYPE with an
13336 operand OP0. */
13337
13338 tree
13339 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13340 {
13341 tree tem;
13342 #ifdef ENABLE_FOLD_CHECKING
13343 unsigned char checksum_before[16], checksum_after[16];
13344 struct md5_ctx ctx;
13345 htab_t ht;
13346
13347 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13348 md5_init_ctx (&ctx);
13349 fold_checksum_tree (op0, &ctx, ht);
13350 md5_finish_ctx (&ctx, checksum_before);
13351 htab_empty (ht);
13352 #endif
13353
13354 tem = fold_unary (code, type, op0);
13355 if (!tem)
13356 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13357
13358 #ifdef ENABLE_FOLD_CHECKING
13359 md5_init_ctx (&ctx);
13360 fold_checksum_tree (op0, &ctx, ht);
13361 md5_finish_ctx (&ctx, checksum_after);
13362 htab_delete (ht);
13363
13364 if (memcmp (checksum_before, checksum_after, 16))
13365 fold_check_failed (op0, tem);
13366 #endif
13367 return tem;
13368 }
13369
13370 /* Fold a binary tree expression with code CODE of type TYPE with
13371 operands OP0 and OP1. Return a folded expression if successful.
13372 Otherwise, return a tree expression with code CODE of type TYPE
13373 with operands OP0 and OP1. */
13374
13375 tree
13376 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13377 MEM_STAT_DECL)
13378 {
13379 tree tem;
13380 #ifdef ENABLE_FOLD_CHECKING
13381 unsigned char checksum_before_op0[16],
13382 checksum_before_op1[16],
13383 checksum_after_op0[16],
13384 checksum_after_op1[16];
13385 struct md5_ctx ctx;
13386 htab_t ht;
13387
13388 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13389 md5_init_ctx (&ctx);
13390 fold_checksum_tree (op0, &ctx, ht);
13391 md5_finish_ctx (&ctx, checksum_before_op0);
13392 htab_empty (ht);
13393
13394 md5_init_ctx (&ctx);
13395 fold_checksum_tree (op1, &ctx, ht);
13396 md5_finish_ctx (&ctx, checksum_before_op1);
13397 htab_empty (ht);
13398 #endif
13399
13400 tem = fold_binary (code, type, op0, op1);
13401 if (!tem)
13402 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13403
13404 #ifdef ENABLE_FOLD_CHECKING
13405 md5_init_ctx (&ctx);
13406 fold_checksum_tree (op0, &ctx, ht);
13407 md5_finish_ctx (&ctx, checksum_after_op0);
13408 htab_empty (ht);
13409
13410 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13411 fold_check_failed (op0, tem);
13412
13413 md5_init_ctx (&ctx);
13414 fold_checksum_tree (op1, &ctx, ht);
13415 md5_finish_ctx (&ctx, checksum_after_op1);
13416 htab_delete (ht);
13417
13418 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13419 fold_check_failed (op1, tem);
13420 #endif
13421 return tem;
13422 }
13423
13424 /* Fold a ternary tree expression with code CODE of type TYPE with
13425 operands OP0, OP1, and OP2. Return a folded expression if
13426 successful. Otherwise, return a tree expression with code CODE of
13427 type TYPE with operands OP0, OP1, and OP2. */
13428
13429 tree
13430 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13431 MEM_STAT_DECL)
13432 {
13433 tree tem;
13434 #ifdef ENABLE_FOLD_CHECKING
13435 unsigned char checksum_before_op0[16],
13436 checksum_before_op1[16],
13437 checksum_before_op2[16],
13438 checksum_after_op0[16],
13439 checksum_after_op1[16],
13440 checksum_after_op2[16];
13441 struct md5_ctx ctx;
13442 htab_t ht;
13443
13444 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13445 md5_init_ctx (&ctx);
13446 fold_checksum_tree (op0, &ctx, ht);
13447 md5_finish_ctx (&ctx, checksum_before_op0);
13448 htab_empty (ht);
13449
13450 md5_init_ctx (&ctx);
13451 fold_checksum_tree (op1, &ctx, ht);
13452 md5_finish_ctx (&ctx, checksum_before_op1);
13453 htab_empty (ht);
13454
13455 md5_init_ctx (&ctx);
13456 fold_checksum_tree (op2, &ctx, ht);
13457 md5_finish_ctx (&ctx, checksum_before_op2);
13458 htab_empty (ht);
13459 #endif
13460
13461 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13462 tem = fold_ternary (code, type, op0, op1, op2);
13463 if (!tem)
13464 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13465
13466 #ifdef ENABLE_FOLD_CHECKING
13467 md5_init_ctx (&ctx);
13468 fold_checksum_tree (op0, &ctx, ht);
13469 md5_finish_ctx (&ctx, checksum_after_op0);
13470 htab_empty (ht);
13471
13472 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13473 fold_check_failed (op0, tem);
13474
13475 md5_init_ctx (&ctx);
13476 fold_checksum_tree (op1, &ctx, ht);
13477 md5_finish_ctx (&ctx, checksum_after_op1);
13478 htab_empty (ht);
13479
13480 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13481 fold_check_failed (op1, tem);
13482
13483 md5_init_ctx (&ctx);
13484 fold_checksum_tree (op2, &ctx, ht);
13485 md5_finish_ctx (&ctx, checksum_after_op2);
13486 htab_delete (ht);
13487
13488 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13489 fold_check_failed (op2, tem);
13490 #endif
13491 return tem;
13492 }
13493
13494 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13495 arguments in ARGARRAY, and a null static chain.
13496 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13497 of type TYPE from the given operands as constructed by build_call_array. */
13498
13499 tree
13500 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13501 {
13502 tree tem;
13503 #ifdef ENABLE_FOLD_CHECKING
13504 unsigned char checksum_before_fn[16],
13505 checksum_before_arglist[16],
13506 checksum_after_fn[16],
13507 checksum_after_arglist[16];
13508 struct md5_ctx ctx;
13509 htab_t ht;
13510 int i;
13511
13512 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13513 md5_init_ctx (&ctx);
13514 fold_checksum_tree (fn, &ctx, ht);
13515 md5_finish_ctx (&ctx, checksum_before_fn);
13516 htab_empty (ht);
13517
13518 md5_init_ctx (&ctx);
13519 for (i = 0; i < nargs; i++)
13520 fold_checksum_tree (argarray[i], &ctx, ht);
13521 md5_finish_ctx (&ctx, checksum_before_arglist);
13522 htab_empty (ht);
13523 #endif
13524
13525 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13526
13527 #ifdef ENABLE_FOLD_CHECKING
13528 md5_init_ctx (&ctx);
13529 fold_checksum_tree (fn, &ctx, ht);
13530 md5_finish_ctx (&ctx, checksum_after_fn);
13531 htab_empty (ht);
13532
13533 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13534 fold_check_failed (fn, tem);
13535
13536 md5_init_ctx (&ctx);
13537 for (i = 0; i < nargs; i++)
13538 fold_checksum_tree (argarray[i], &ctx, ht);
13539 md5_finish_ctx (&ctx, checksum_after_arglist);
13540 htab_delete (ht);
13541
13542 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13543 fold_check_failed (NULL_TREE, tem);
13544 #endif
13545 return tem;
13546 }
13547
13548 /* Perform constant folding and related simplification of initializer
13549 expression EXPR. These behave identically to "fold_buildN" but ignore
13550 potential run-time traps and exceptions that fold must preserve. */
13551
13552 #define START_FOLD_INIT \
13553 int saved_signaling_nans = flag_signaling_nans;\
13554 int saved_trapping_math = flag_trapping_math;\
13555 int saved_rounding_math = flag_rounding_math;\
13556 int saved_trapv = flag_trapv;\
13557 int saved_folding_initializer = folding_initializer;\
13558 flag_signaling_nans = 0;\
13559 flag_trapping_math = 0;\
13560 flag_rounding_math = 0;\
13561 flag_trapv = 0;\
13562 folding_initializer = 1;
13563
13564 #define END_FOLD_INIT \
13565 flag_signaling_nans = saved_signaling_nans;\
13566 flag_trapping_math = saved_trapping_math;\
13567 flag_rounding_math = saved_rounding_math;\
13568 flag_trapv = saved_trapv;\
13569 folding_initializer = saved_folding_initializer;
13570
13571 tree
13572 fold_build1_initializer (enum tree_code code, tree type, tree op)
13573 {
13574 tree result;
13575 START_FOLD_INIT;
13576
13577 result = fold_build1 (code, type, op);
13578
13579 END_FOLD_INIT;
13580 return result;
13581 }
13582
13583 tree
13584 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13585 {
13586 tree result;
13587 START_FOLD_INIT;
13588
13589 result = fold_build2 (code, type, op0, op1);
13590
13591 END_FOLD_INIT;
13592 return result;
13593 }
13594
13595 tree
13596 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13597 tree op2)
13598 {
13599 tree result;
13600 START_FOLD_INIT;
13601
13602 result = fold_build3 (code, type, op0, op1, op2);
13603
13604 END_FOLD_INIT;
13605 return result;
13606 }
13607
13608 tree
13609 fold_build_call_array_initializer (tree type, tree fn,
13610 int nargs, tree *argarray)
13611 {
13612 tree result;
13613 START_FOLD_INIT;
13614
13615 result = fold_build_call_array (type, fn, nargs, argarray);
13616
13617 END_FOLD_INIT;
13618 return result;
13619 }
13620
13621 #undef START_FOLD_INIT
13622 #undef END_FOLD_INIT
13623
13624 /* Determine if first argument is a multiple of second argument. Return 0 if
13625 it is not, or we cannot easily determined it to be.
13626
13627 An example of the sort of thing we care about (at this point; this routine
13628 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13629 fold cases do now) is discovering that
13630
13631 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13632
13633 is a multiple of
13634
13635 SAVE_EXPR (J * 8)
13636
13637 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13638
13639 This code also handles discovering that
13640
13641 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13642
13643 is a multiple of 8 so we don't have to worry about dealing with a
13644 possible remainder.
13645
13646 Note that we *look* inside a SAVE_EXPR only to determine how it was
13647 calculated; it is not safe for fold to do much of anything else with the
13648 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13649 at run time. For example, the latter example above *cannot* be implemented
13650 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13651 evaluation time of the original SAVE_EXPR is not necessarily the same at
13652 the time the new expression is evaluated. The only optimization of this
13653 sort that would be valid is changing
13654
13655 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13656
13657 divided by 8 to
13658
13659 SAVE_EXPR (I) * SAVE_EXPR (J)
13660
13661 (where the same SAVE_EXPR (J) is used in the original and the
13662 transformed version). */
13663
13664 int
13665 multiple_of_p (tree type, tree top, tree bottom)
13666 {
13667 if (operand_equal_p (top, bottom, 0))
13668 return 1;
13669
13670 if (TREE_CODE (type) != INTEGER_TYPE)
13671 return 0;
13672
13673 switch (TREE_CODE (top))
13674 {
13675 case BIT_AND_EXPR:
13676 /* Bitwise and provides a power of two multiple. If the mask is
13677 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13678 if (!integer_pow2p (bottom))
13679 return 0;
13680 /* FALLTHRU */
13681
13682 case MULT_EXPR:
13683 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13684 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13685
13686 case PLUS_EXPR:
13687 case MINUS_EXPR:
13688 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13689 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13690
13691 case LSHIFT_EXPR:
13692 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13693 {
13694 tree op1, t1;
13695
13696 op1 = TREE_OPERAND (top, 1);
13697 /* const_binop may not detect overflow correctly,
13698 so check for it explicitly here. */
13699 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13700 > TREE_INT_CST_LOW (op1)
13701 && TREE_INT_CST_HIGH (op1) == 0
13702 && 0 != (t1 = fold_convert (type,
13703 const_binop (LSHIFT_EXPR,
13704 size_one_node,
13705 op1, 0)))
13706 && !TREE_OVERFLOW (t1))
13707 return multiple_of_p (type, t1, bottom);
13708 }
13709 return 0;
13710
13711 case NOP_EXPR:
13712 /* Can't handle conversions from non-integral or wider integral type. */
13713 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13714 || (TYPE_PRECISION (type)
13715 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13716 return 0;
13717
13718 /* .. fall through ... */
13719
13720 case SAVE_EXPR:
13721 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13722
13723 case INTEGER_CST:
13724 if (TREE_CODE (bottom) != INTEGER_CST
13725 || integer_zerop (bottom)
13726 || (TYPE_UNSIGNED (type)
13727 && (tree_int_cst_sgn (top) < 0
13728 || tree_int_cst_sgn (bottom) < 0)))
13729 return 0;
13730 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13731 top, bottom, 0));
13732
13733 default:
13734 return 0;
13735 }
13736 }
13737
13738 /* Return true if `t' is known to be non-negative. If the return
13739 value is based on the assumption that signed overflow is undefined,
13740 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13741 *STRICT_OVERFLOW_P. */
13742
13743 bool
13744 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13745 {
13746 if (t == error_mark_node)
13747 return false;
13748
13749 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13750 return true;
13751
13752 switch (TREE_CODE (t))
13753 {
13754 case SSA_NAME:
13755 /* Query VRP to see if it has recorded any information about
13756 the range of this object. */
13757 return ssa_name_nonnegative_p (t);
13758
13759 case ABS_EXPR:
13760 /* We can't return 1 if flag_wrapv is set because
13761 ABS_EXPR<INT_MIN> = INT_MIN. */
13762 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13763 return true;
13764 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13765 {
13766 *strict_overflow_p = true;
13767 return true;
13768 }
13769 break;
13770
13771 case INTEGER_CST:
13772 return tree_int_cst_sgn (t) >= 0;
13773
13774 case REAL_CST:
13775 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13776
13777 case FIXED_CST:
13778 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13779
13780 case POINTER_PLUS_EXPR:
13781 case PLUS_EXPR:
13782 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13783 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13784 strict_overflow_p)
13785 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13786 strict_overflow_p));
13787
13788 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13789 both unsigned and at least 2 bits shorter than the result. */
13790 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13791 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13792 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13793 {
13794 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13795 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13796 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13797 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13798 {
13799 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13800 TYPE_PRECISION (inner2)) + 1;
13801 return prec < TYPE_PRECISION (TREE_TYPE (t));
13802 }
13803 }
13804 break;
13805
13806 case MULT_EXPR:
13807 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13808 {
13809 /* x * x for floating point x is always non-negative. */
13810 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13811 return true;
13812 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13813 strict_overflow_p)
13814 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13815 strict_overflow_p));
13816 }
13817
13818 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13819 both unsigned and their total bits is shorter than the result. */
13820 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13821 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13822 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13823 {
13824 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13825 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13826 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13827 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13828 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13829 < TYPE_PRECISION (TREE_TYPE (t));
13830 }
13831 return false;
13832
13833 case BIT_AND_EXPR:
13834 case MAX_EXPR:
13835 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13836 strict_overflow_p)
13837 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13838 strict_overflow_p));
13839
13840 case BIT_IOR_EXPR:
13841 case BIT_XOR_EXPR:
13842 case MIN_EXPR:
13843 case RDIV_EXPR:
13844 case TRUNC_DIV_EXPR:
13845 case CEIL_DIV_EXPR:
13846 case FLOOR_DIV_EXPR:
13847 case ROUND_DIV_EXPR:
13848 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13849 strict_overflow_p)
13850 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13851 strict_overflow_p));
13852
13853 case TRUNC_MOD_EXPR:
13854 case CEIL_MOD_EXPR:
13855 case FLOOR_MOD_EXPR:
13856 case ROUND_MOD_EXPR:
13857 case SAVE_EXPR:
13858 case NON_LVALUE_EXPR:
13859 case FLOAT_EXPR:
13860 case FIX_TRUNC_EXPR:
13861 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13862 strict_overflow_p);
13863
13864 case COMPOUND_EXPR:
13865 case MODIFY_EXPR:
13866 case GIMPLE_MODIFY_STMT:
13867 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13868 strict_overflow_p);
13869
13870 case BIND_EXPR:
13871 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13872 strict_overflow_p);
13873
13874 case COND_EXPR:
13875 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13876 strict_overflow_p)
13877 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13878 strict_overflow_p));
13879
13880 case NOP_EXPR:
13881 {
13882 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13883 tree outer_type = TREE_TYPE (t);
13884
13885 if (TREE_CODE (outer_type) == REAL_TYPE)
13886 {
13887 if (TREE_CODE (inner_type) == REAL_TYPE)
13888 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13889 strict_overflow_p);
13890 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13891 {
13892 if (TYPE_UNSIGNED (inner_type))
13893 return true;
13894 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13895 strict_overflow_p);
13896 }
13897 }
13898 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13899 {
13900 if (TREE_CODE (inner_type) == REAL_TYPE)
13901 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13902 strict_overflow_p);
13903 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13904 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13905 && TYPE_UNSIGNED (inner_type);
13906 }
13907 }
13908 break;
13909
13910 case TARGET_EXPR:
13911 {
13912 tree temp = TARGET_EXPR_SLOT (t);
13913 t = TARGET_EXPR_INITIAL (t);
13914
13915 /* If the initializer is non-void, then it's a normal expression
13916 that will be assigned to the slot. */
13917 if (!VOID_TYPE_P (t))
13918 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13919
13920 /* Otherwise, the initializer sets the slot in some way. One common
13921 way is an assignment statement at the end of the initializer. */
13922 while (1)
13923 {
13924 if (TREE_CODE (t) == BIND_EXPR)
13925 t = expr_last (BIND_EXPR_BODY (t));
13926 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13927 || TREE_CODE (t) == TRY_CATCH_EXPR)
13928 t = expr_last (TREE_OPERAND (t, 0));
13929 else if (TREE_CODE (t) == STATEMENT_LIST)
13930 t = expr_last (t);
13931 else
13932 break;
13933 }
13934 if ((TREE_CODE (t) == MODIFY_EXPR
13935 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13936 && GENERIC_TREE_OPERAND (t, 0) == temp)
13937 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13938 strict_overflow_p);
13939
13940 return false;
13941 }
13942
13943 case CALL_EXPR:
13944 {
13945 tree fndecl = get_callee_fndecl (t);
13946 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13947 switch (DECL_FUNCTION_CODE (fndecl))
13948 {
13949 CASE_FLT_FN (BUILT_IN_ACOS):
13950 CASE_FLT_FN (BUILT_IN_ACOSH):
13951 CASE_FLT_FN (BUILT_IN_CABS):
13952 CASE_FLT_FN (BUILT_IN_COSH):
13953 CASE_FLT_FN (BUILT_IN_ERFC):
13954 CASE_FLT_FN (BUILT_IN_EXP):
13955 CASE_FLT_FN (BUILT_IN_EXP10):
13956 CASE_FLT_FN (BUILT_IN_EXP2):
13957 CASE_FLT_FN (BUILT_IN_FABS):
13958 CASE_FLT_FN (BUILT_IN_FDIM):
13959 CASE_FLT_FN (BUILT_IN_HYPOT):
13960 CASE_FLT_FN (BUILT_IN_POW10):
13961 CASE_INT_FN (BUILT_IN_FFS):
13962 CASE_INT_FN (BUILT_IN_PARITY):
13963 CASE_INT_FN (BUILT_IN_POPCOUNT):
13964 case BUILT_IN_BSWAP32:
13965 case BUILT_IN_BSWAP64:
13966 /* Always true. */
13967 return true;
13968
13969 CASE_FLT_FN (BUILT_IN_SQRT):
13970 /* sqrt(-0.0) is -0.0. */
13971 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13972 return true;
13973 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13974 strict_overflow_p);
13975
13976 CASE_FLT_FN (BUILT_IN_ASINH):
13977 CASE_FLT_FN (BUILT_IN_ATAN):
13978 CASE_FLT_FN (BUILT_IN_ATANH):
13979 CASE_FLT_FN (BUILT_IN_CBRT):
13980 CASE_FLT_FN (BUILT_IN_CEIL):
13981 CASE_FLT_FN (BUILT_IN_ERF):
13982 CASE_FLT_FN (BUILT_IN_EXPM1):
13983 CASE_FLT_FN (BUILT_IN_FLOOR):
13984 CASE_FLT_FN (BUILT_IN_FMOD):
13985 CASE_FLT_FN (BUILT_IN_FREXP):
13986 CASE_FLT_FN (BUILT_IN_LCEIL):
13987 CASE_FLT_FN (BUILT_IN_LDEXP):
13988 CASE_FLT_FN (BUILT_IN_LFLOOR):
13989 CASE_FLT_FN (BUILT_IN_LLCEIL):
13990 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13991 CASE_FLT_FN (BUILT_IN_LLRINT):
13992 CASE_FLT_FN (BUILT_IN_LLROUND):
13993 CASE_FLT_FN (BUILT_IN_LRINT):
13994 CASE_FLT_FN (BUILT_IN_LROUND):
13995 CASE_FLT_FN (BUILT_IN_MODF):
13996 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13997 CASE_FLT_FN (BUILT_IN_RINT):
13998 CASE_FLT_FN (BUILT_IN_ROUND):
13999 CASE_FLT_FN (BUILT_IN_SCALB):
14000 CASE_FLT_FN (BUILT_IN_SCALBLN):
14001 CASE_FLT_FN (BUILT_IN_SCALBN):
14002 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14003 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14004 CASE_FLT_FN (BUILT_IN_SINH):
14005 CASE_FLT_FN (BUILT_IN_TANH):
14006 CASE_FLT_FN (BUILT_IN_TRUNC):
14007 /* True if the 1st argument is nonnegative. */
14008 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14009 strict_overflow_p);
14010
14011 CASE_FLT_FN (BUILT_IN_FMAX):
14012 /* True if the 1st OR 2nd arguments are nonnegative. */
14013 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14014 strict_overflow_p)
14015 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14016 strict_overflow_p)));
14017
14018 CASE_FLT_FN (BUILT_IN_FMIN):
14019 /* True if the 1st AND 2nd arguments are nonnegative. */
14020 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14021 strict_overflow_p)
14022 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14023 strict_overflow_p)));
14024
14025 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14026 /* True if the 2nd argument is nonnegative. */
14027 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14028 strict_overflow_p);
14029
14030 CASE_FLT_FN (BUILT_IN_POWI):
14031 /* True if the 1st argument is nonnegative or the second
14032 argument is an even integer. */
14033 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
14034 {
14035 tree arg1 = CALL_EXPR_ARG (t, 1);
14036 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
14037 return true;
14038 }
14039 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14040 strict_overflow_p);
14041
14042 CASE_FLT_FN (BUILT_IN_POW):
14043 /* True if the 1st argument is nonnegative or the second
14044 argument is an even integer valued real. */
14045 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
14046 {
14047 REAL_VALUE_TYPE c;
14048 HOST_WIDE_INT n;
14049
14050 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
14051 n = real_to_integer (&c);
14052 if ((n & 1) == 0)
14053 {
14054 REAL_VALUE_TYPE cint;
14055 real_from_integer (&cint, VOIDmode, n,
14056 n < 0 ? -1 : 0, 0);
14057 if (real_identical (&c, &cint))
14058 return true;
14059 }
14060 }
14061 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14062 strict_overflow_p);
14063
14064 default:
14065 break;
14066 }
14067 }
14068
14069 /* ... fall through ... */
14070
14071 default:
14072 {
14073 tree type = TREE_TYPE (t);
14074 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14075 && truth_value_p (TREE_CODE (t)))
14076 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14077 have a signed:1 type (where the value is -1 and 0). */
14078 return true;
14079 }
14080 }
14081
14082 /* We don't know sign of `t', so be conservative and return false. */
14083 return false;
14084 }
14085
14086 /* Return true if `t' is known to be non-negative. Handle warnings
14087 about undefined signed overflow. */
14088
14089 bool
14090 tree_expr_nonnegative_p (tree t)
14091 {
14092 bool ret, strict_overflow_p;
14093
14094 strict_overflow_p = false;
14095 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14096 if (strict_overflow_p)
14097 fold_overflow_warning (("assuming signed overflow does not occur when "
14098 "determining that expression is always "
14099 "non-negative"),
14100 WARN_STRICT_OVERFLOW_MISC);
14101 return ret;
14102 }
14103
14104 /* Return true when T is an address and is known to be nonzero.
14105 For floating point we further ensure that T is not denormal.
14106 Similar logic is present in nonzero_address in rtlanal.h.
14107
14108 If the return value is based on the assumption that signed overflow
14109 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14110 change *STRICT_OVERFLOW_P. */
14111
14112 bool
14113 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14114 {
14115 tree type = TREE_TYPE (t);
14116 bool sub_strict_overflow_p;
14117
14118 /* Doing something useful for floating point would need more work. */
14119 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14120 return false;
14121
14122 switch (TREE_CODE (t))
14123 {
14124 case SSA_NAME:
14125 /* Query VRP to see if it has recorded any information about
14126 the range of this object. */
14127 return ssa_name_nonzero_p (t);
14128
14129 case ABS_EXPR:
14130 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14131 strict_overflow_p);
14132
14133 case INTEGER_CST:
14134 return !integer_zerop (t);
14135
14136 case POINTER_PLUS_EXPR:
14137 case PLUS_EXPR:
14138 if (TYPE_OVERFLOW_UNDEFINED (type))
14139 {
14140 /* With the presence of negative values it is hard
14141 to say something. */
14142 sub_strict_overflow_p = false;
14143 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14144 &sub_strict_overflow_p)
14145 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14146 &sub_strict_overflow_p))
14147 return false;
14148 /* One of operands must be positive and the other non-negative. */
14149 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14150 overflows, on a twos-complement machine the sum of two
14151 nonnegative numbers can never be zero. */
14152 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14153 strict_overflow_p)
14154 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14155 strict_overflow_p));
14156 }
14157 break;
14158
14159 case MULT_EXPR:
14160 if (TYPE_OVERFLOW_UNDEFINED (type))
14161 {
14162 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14163 strict_overflow_p)
14164 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14165 strict_overflow_p))
14166 {
14167 *strict_overflow_p = true;
14168 return true;
14169 }
14170 }
14171 break;
14172
14173 case NOP_EXPR:
14174 {
14175 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
14176 tree outer_type = TREE_TYPE (t);
14177
14178 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14179 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14180 strict_overflow_p));
14181 }
14182 break;
14183
14184 case ADDR_EXPR:
14185 {
14186 tree base = get_base_address (TREE_OPERAND (t, 0));
14187
14188 if (!base)
14189 return false;
14190
14191 /* Weak declarations may link to NULL. */
14192 if (VAR_OR_FUNCTION_DECL_P (base))
14193 return !DECL_WEAK (base);
14194
14195 /* Constants are never weak. */
14196 if (CONSTANT_CLASS_P (base))
14197 return true;
14198
14199 return false;
14200 }
14201
14202 case COND_EXPR:
14203 sub_strict_overflow_p = false;
14204 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14205 &sub_strict_overflow_p)
14206 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14207 &sub_strict_overflow_p))
14208 {
14209 if (sub_strict_overflow_p)
14210 *strict_overflow_p = true;
14211 return true;
14212 }
14213 break;
14214
14215 case MIN_EXPR:
14216 sub_strict_overflow_p = false;
14217 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14218 &sub_strict_overflow_p)
14219 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14220 &sub_strict_overflow_p))
14221 {
14222 if (sub_strict_overflow_p)
14223 *strict_overflow_p = true;
14224 }
14225 break;
14226
14227 case MAX_EXPR:
14228 sub_strict_overflow_p = false;
14229 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14230 &sub_strict_overflow_p))
14231 {
14232 if (sub_strict_overflow_p)
14233 *strict_overflow_p = true;
14234
14235 /* When both operands are nonzero, then MAX must be too. */
14236 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14237 strict_overflow_p))
14238 return true;
14239
14240 /* MAX where operand 0 is positive is positive. */
14241 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14242 strict_overflow_p);
14243 }
14244 /* MAX where operand 1 is positive is positive. */
14245 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14246 &sub_strict_overflow_p)
14247 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14248 &sub_strict_overflow_p))
14249 {
14250 if (sub_strict_overflow_p)
14251 *strict_overflow_p = true;
14252 return true;
14253 }
14254 break;
14255
14256 case COMPOUND_EXPR:
14257 case MODIFY_EXPR:
14258 case GIMPLE_MODIFY_STMT:
14259 case BIND_EXPR:
14260 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14261 strict_overflow_p);
14262
14263 case SAVE_EXPR:
14264 case NON_LVALUE_EXPR:
14265 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14266 strict_overflow_p);
14267
14268 case BIT_IOR_EXPR:
14269 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14270 strict_overflow_p)
14271 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14272 strict_overflow_p));
14273
14274 case CALL_EXPR:
14275 return alloca_call_p (t);
14276
14277 default:
14278 break;
14279 }
14280 return false;
14281 }
14282
14283 /* Return true when T is an address and is known to be nonzero.
14284 Handle warnings about undefined signed overflow. */
14285
14286 bool
14287 tree_expr_nonzero_p (tree t)
14288 {
14289 bool ret, strict_overflow_p;
14290
14291 strict_overflow_p = false;
14292 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14293 if (strict_overflow_p)
14294 fold_overflow_warning (("assuming signed overflow does not occur when "
14295 "determining that expression is always "
14296 "non-zero"),
14297 WARN_STRICT_OVERFLOW_MISC);
14298 return ret;
14299 }
14300
14301 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14302 attempt to fold the expression to a constant without modifying TYPE,
14303 OP0 or OP1.
14304
14305 If the expression could be simplified to a constant, then return
14306 the constant. If the expression would not be simplified to a
14307 constant, then return NULL_TREE. */
14308
14309 tree
14310 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14311 {
14312 tree tem = fold_binary (code, type, op0, op1);
14313 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14314 }
14315
14316 /* Given the components of a unary expression CODE, TYPE and OP0,
14317 attempt to fold the expression to a constant without modifying
14318 TYPE or OP0.
14319
14320 If the expression could be simplified to a constant, then return
14321 the constant. If the expression would not be simplified to a
14322 constant, then return NULL_TREE. */
14323
14324 tree
14325 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14326 {
14327 tree tem = fold_unary (code, type, op0);
14328 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14329 }
14330
14331 /* If EXP represents referencing an element in a constant string
14332 (either via pointer arithmetic or array indexing), return the
14333 tree representing the value accessed, otherwise return NULL. */
14334
14335 tree
14336 fold_read_from_constant_string (tree exp)
14337 {
14338 if ((TREE_CODE (exp) == INDIRECT_REF
14339 || TREE_CODE (exp) == ARRAY_REF)
14340 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14341 {
14342 tree exp1 = TREE_OPERAND (exp, 0);
14343 tree index;
14344 tree string;
14345
14346 if (TREE_CODE (exp) == INDIRECT_REF)
14347 string = string_constant (exp1, &index);
14348 else
14349 {
14350 tree low_bound = array_ref_low_bound (exp);
14351 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14352
14353 /* Optimize the special-case of a zero lower bound.
14354
14355 We convert the low_bound to sizetype to avoid some problems
14356 with constant folding. (E.g. suppose the lower bound is 1,
14357 and its mode is QI. Without the conversion,l (ARRAY
14358 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14359 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14360 if (! integer_zerop (low_bound))
14361 index = size_diffop (index, fold_convert (sizetype, low_bound));
14362
14363 string = exp1;
14364 }
14365
14366 if (string
14367 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14368 && TREE_CODE (string) == STRING_CST
14369 && TREE_CODE (index) == INTEGER_CST
14370 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14371 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14372 == MODE_INT)
14373 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14374 return build_int_cst_type (TREE_TYPE (exp),
14375 (TREE_STRING_POINTER (string)
14376 [TREE_INT_CST_LOW (index)]));
14377 }
14378 return NULL;
14379 }
14380
14381 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14382 an integer constant, real, or fixed-point constant.
14383
14384 TYPE is the type of the result. */
14385
14386 static tree
14387 fold_negate_const (tree arg0, tree type)
14388 {
14389 tree t = NULL_TREE;
14390
14391 switch (TREE_CODE (arg0))
14392 {
14393 case INTEGER_CST:
14394 {
14395 unsigned HOST_WIDE_INT low;
14396 HOST_WIDE_INT high;
14397 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14398 TREE_INT_CST_HIGH (arg0),
14399 &low, &high);
14400 t = force_fit_type_double (type, low, high, 1,
14401 (overflow | TREE_OVERFLOW (arg0))
14402 && !TYPE_UNSIGNED (type));
14403 break;
14404 }
14405
14406 case REAL_CST:
14407 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14408 break;
14409
14410 case FIXED_CST:
14411 {
14412 FIXED_VALUE_TYPE f;
14413 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14414 &(TREE_FIXED_CST (arg0)), NULL,
14415 TYPE_SATURATING (type));
14416 t = build_fixed (type, f);
14417 /* Propagate overflow flags. */
14418 if (overflow_p | TREE_OVERFLOW (arg0))
14419 {
14420 TREE_OVERFLOW (t) = 1;
14421 TREE_CONSTANT_OVERFLOW (t) = 1;
14422 }
14423 else if (TREE_CONSTANT_OVERFLOW (arg0))
14424 TREE_CONSTANT_OVERFLOW (t) = 1;
14425 break;
14426 }
14427
14428 default:
14429 gcc_unreachable ();
14430 }
14431
14432 return t;
14433 }
14434
14435 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14436 an integer constant or real constant.
14437
14438 TYPE is the type of the result. */
14439
14440 tree
14441 fold_abs_const (tree arg0, tree type)
14442 {
14443 tree t = NULL_TREE;
14444
14445 switch (TREE_CODE (arg0))
14446 {
14447 case INTEGER_CST:
14448 /* If the value is unsigned, then the absolute value is
14449 the same as the ordinary value. */
14450 if (TYPE_UNSIGNED (type))
14451 t = arg0;
14452 /* Similarly, if the value is non-negative. */
14453 else if (INT_CST_LT (integer_minus_one_node, arg0))
14454 t = arg0;
14455 /* If the value is negative, then the absolute value is
14456 its negation. */
14457 else
14458 {
14459 unsigned HOST_WIDE_INT low;
14460 HOST_WIDE_INT high;
14461 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14462 TREE_INT_CST_HIGH (arg0),
14463 &low, &high);
14464 t = force_fit_type_double (type, low, high, -1,
14465 overflow | TREE_OVERFLOW (arg0));
14466 }
14467 break;
14468
14469 case REAL_CST:
14470 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14471 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14472 else
14473 t = arg0;
14474 break;
14475
14476 default:
14477 gcc_unreachable ();
14478 }
14479
14480 return t;
14481 }
14482
14483 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14484 constant. TYPE is the type of the result. */
14485
14486 static tree
14487 fold_not_const (tree arg0, tree type)
14488 {
14489 tree t = NULL_TREE;
14490
14491 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14492
14493 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14494 ~TREE_INT_CST_HIGH (arg0), 0,
14495 TREE_OVERFLOW (arg0));
14496
14497 return t;
14498 }
14499
14500 /* Given CODE, a relational operator, the target type, TYPE and two
14501 constant operands OP0 and OP1, return the result of the
14502 relational operation. If the result is not a compile time
14503 constant, then return NULL_TREE. */
14504
14505 static tree
14506 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14507 {
14508 int result, invert;
14509
14510 /* From here on, the only cases we handle are when the result is
14511 known to be a constant. */
14512
14513 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14514 {
14515 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14516 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14517
14518 /* Handle the cases where either operand is a NaN. */
14519 if (real_isnan (c0) || real_isnan (c1))
14520 {
14521 switch (code)
14522 {
14523 case EQ_EXPR:
14524 case ORDERED_EXPR:
14525 result = 0;
14526 break;
14527
14528 case NE_EXPR:
14529 case UNORDERED_EXPR:
14530 case UNLT_EXPR:
14531 case UNLE_EXPR:
14532 case UNGT_EXPR:
14533 case UNGE_EXPR:
14534 case UNEQ_EXPR:
14535 result = 1;
14536 break;
14537
14538 case LT_EXPR:
14539 case LE_EXPR:
14540 case GT_EXPR:
14541 case GE_EXPR:
14542 case LTGT_EXPR:
14543 if (flag_trapping_math)
14544 return NULL_TREE;
14545 result = 0;
14546 break;
14547
14548 default:
14549 gcc_unreachable ();
14550 }
14551
14552 return constant_boolean_node (result, type);
14553 }
14554
14555 return constant_boolean_node (real_compare (code, c0, c1), type);
14556 }
14557
14558 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14559 {
14560 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14561 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14562 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14563 }
14564
14565 /* Handle equality/inequality of complex constants. */
14566 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14567 {
14568 tree rcond = fold_relational_const (code, type,
14569 TREE_REALPART (op0),
14570 TREE_REALPART (op1));
14571 tree icond = fold_relational_const (code, type,
14572 TREE_IMAGPART (op0),
14573 TREE_IMAGPART (op1));
14574 if (code == EQ_EXPR)
14575 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14576 else if (code == NE_EXPR)
14577 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14578 else
14579 return NULL_TREE;
14580 }
14581
14582 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14583
14584 To compute GT, swap the arguments and do LT.
14585 To compute GE, do LT and invert the result.
14586 To compute LE, swap the arguments, do LT and invert the result.
14587 To compute NE, do EQ and invert the result.
14588
14589 Therefore, the code below must handle only EQ and LT. */
14590
14591 if (code == LE_EXPR || code == GT_EXPR)
14592 {
14593 tree tem = op0;
14594 op0 = op1;
14595 op1 = tem;
14596 code = swap_tree_comparison (code);
14597 }
14598
14599 /* Note that it is safe to invert for real values here because we
14600 have already handled the one case that it matters. */
14601
14602 invert = 0;
14603 if (code == NE_EXPR || code == GE_EXPR)
14604 {
14605 invert = 1;
14606 code = invert_tree_comparison (code, false);
14607 }
14608
14609 /* Compute a result for LT or EQ if args permit;
14610 Otherwise return T. */
14611 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14612 {
14613 if (code == EQ_EXPR)
14614 result = tree_int_cst_equal (op0, op1);
14615 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14616 result = INT_CST_LT_UNSIGNED (op0, op1);
14617 else
14618 result = INT_CST_LT (op0, op1);
14619 }
14620 else
14621 return NULL_TREE;
14622
14623 if (invert)
14624 result ^= 1;
14625 return constant_boolean_node (result, type);
14626 }
14627
14628 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14629 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14630 itself. */
14631
14632 tree
14633 fold_build_cleanup_point_expr (tree type, tree expr)
14634 {
14635 /* If the expression does not have side effects then we don't have to wrap
14636 it with a cleanup point expression. */
14637 if (!TREE_SIDE_EFFECTS (expr))
14638 return expr;
14639
14640 /* If the expression is a return, check to see if the expression inside the
14641 return has no side effects or the right hand side of the modify expression
14642 inside the return. If either don't have side effects set we don't need to
14643 wrap the expression in a cleanup point expression. Note we don't check the
14644 left hand side of the modify because it should always be a return decl. */
14645 if (TREE_CODE (expr) == RETURN_EXPR)
14646 {
14647 tree op = TREE_OPERAND (expr, 0);
14648 if (!op || !TREE_SIDE_EFFECTS (op))
14649 return expr;
14650 op = TREE_OPERAND (op, 1);
14651 if (!TREE_SIDE_EFFECTS (op))
14652 return expr;
14653 }
14654
14655 return build1 (CLEANUP_POINT_EXPR, type, expr);
14656 }
14657
14658 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14659 of an indirection through OP0, or NULL_TREE if no simplification is
14660 possible. */
14661
14662 tree
14663 fold_indirect_ref_1 (tree type, tree op0)
14664 {
14665 tree sub = op0;
14666 tree subtype;
14667
14668 STRIP_NOPS (sub);
14669 subtype = TREE_TYPE (sub);
14670 if (!POINTER_TYPE_P (subtype))
14671 return NULL_TREE;
14672
14673 if (TREE_CODE (sub) == ADDR_EXPR)
14674 {
14675 tree op = TREE_OPERAND (sub, 0);
14676 tree optype = TREE_TYPE (op);
14677 /* *&CONST_DECL -> to the value of the const decl. */
14678 if (TREE_CODE (op) == CONST_DECL)
14679 return DECL_INITIAL (op);
14680 /* *&p => p; make sure to handle *&"str"[cst] here. */
14681 if (type == optype)
14682 {
14683 tree fop = fold_read_from_constant_string (op);
14684 if (fop)
14685 return fop;
14686 else
14687 return op;
14688 }
14689 /* *(foo *)&fooarray => fooarray[0] */
14690 else if (TREE_CODE (optype) == ARRAY_TYPE
14691 && type == TREE_TYPE (optype))
14692 {
14693 tree type_domain = TYPE_DOMAIN (optype);
14694 tree min_val = size_zero_node;
14695 if (type_domain && TYPE_MIN_VALUE (type_domain))
14696 min_val = TYPE_MIN_VALUE (type_domain);
14697 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14698 }
14699 /* *(foo *)&complexfoo => __real__ complexfoo */
14700 else if (TREE_CODE (optype) == COMPLEX_TYPE
14701 && type == TREE_TYPE (optype))
14702 return fold_build1 (REALPART_EXPR, type, op);
14703 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14704 else if (TREE_CODE (optype) == VECTOR_TYPE
14705 && type == TREE_TYPE (optype))
14706 {
14707 tree part_width = TYPE_SIZE (type);
14708 tree index = bitsize_int (0);
14709 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14710 }
14711 }
14712
14713 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14714 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14715 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14716 {
14717 tree op00 = TREE_OPERAND (sub, 0);
14718 tree op01 = TREE_OPERAND (sub, 1);
14719 tree op00type;
14720
14721 STRIP_NOPS (op00);
14722 op00type = TREE_TYPE (op00);
14723 if (TREE_CODE (op00) == ADDR_EXPR
14724 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14725 && type == TREE_TYPE (TREE_TYPE (op00type)))
14726 {
14727 tree size = TYPE_SIZE_UNIT (type);
14728 if (tree_int_cst_equal (size, op01))
14729 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14730 }
14731 }
14732
14733 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14734 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14735 && type == TREE_TYPE (TREE_TYPE (subtype)))
14736 {
14737 tree type_domain;
14738 tree min_val = size_zero_node;
14739 sub = build_fold_indirect_ref (sub);
14740 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14741 if (type_domain && TYPE_MIN_VALUE (type_domain))
14742 min_val = TYPE_MIN_VALUE (type_domain);
14743 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14744 }
14745
14746 return NULL_TREE;
14747 }
14748
14749 /* Builds an expression for an indirection through T, simplifying some
14750 cases. */
14751
14752 tree
14753 build_fold_indirect_ref (tree t)
14754 {
14755 tree type = TREE_TYPE (TREE_TYPE (t));
14756 tree sub = fold_indirect_ref_1 (type, t);
14757
14758 if (sub)
14759 return sub;
14760 else
14761 return build1 (INDIRECT_REF, type, t);
14762 }
14763
14764 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14765
14766 tree
14767 fold_indirect_ref (tree t)
14768 {
14769 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14770
14771 if (sub)
14772 return sub;
14773 else
14774 return t;
14775 }
14776
14777 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14778 whose result is ignored. The type of the returned tree need not be
14779 the same as the original expression. */
14780
14781 tree
14782 fold_ignored_result (tree t)
14783 {
14784 if (!TREE_SIDE_EFFECTS (t))
14785 return integer_zero_node;
14786
14787 for (;;)
14788 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14789 {
14790 case tcc_unary:
14791 t = TREE_OPERAND (t, 0);
14792 break;
14793
14794 case tcc_binary:
14795 case tcc_comparison:
14796 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14797 t = TREE_OPERAND (t, 0);
14798 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14799 t = TREE_OPERAND (t, 1);
14800 else
14801 return t;
14802 break;
14803
14804 case tcc_expression:
14805 switch (TREE_CODE (t))
14806 {
14807 case COMPOUND_EXPR:
14808 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14809 return t;
14810 t = TREE_OPERAND (t, 0);
14811 break;
14812
14813 case COND_EXPR:
14814 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14815 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14816 return t;
14817 t = TREE_OPERAND (t, 0);
14818 break;
14819
14820 default:
14821 return t;
14822 }
14823 break;
14824
14825 default:
14826 return t;
14827 }
14828 }
14829
14830 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14831 This can only be applied to objects of a sizetype. */
14832
14833 tree
14834 round_up (tree value, int divisor)
14835 {
14836 tree div = NULL_TREE;
14837
14838 gcc_assert (divisor > 0);
14839 if (divisor == 1)
14840 return value;
14841
14842 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14843 have to do anything. Only do this when we are not given a const,
14844 because in that case, this check is more expensive than just
14845 doing it. */
14846 if (TREE_CODE (value) != INTEGER_CST)
14847 {
14848 div = build_int_cst (TREE_TYPE (value), divisor);
14849
14850 if (multiple_of_p (TREE_TYPE (value), value, div))
14851 return value;
14852 }
14853
14854 /* If divisor is a power of two, simplify this to bit manipulation. */
14855 if (divisor == (divisor & -divisor))
14856 {
14857 if (TREE_CODE (value) == INTEGER_CST)
14858 {
14859 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14860 unsigned HOST_WIDE_INT high;
14861 bool overflow_p;
14862
14863 if ((low & (divisor - 1)) == 0)
14864 return value;
14865
14866 overflow_p = TREE_OVERFLOW (value);
14867 high = TREE_INT_CST_HIGH (value);
14868 low &= ~(divisor - 1);
14869 low += divisor;
14870 if (low == 0)
14871 {
14872 high++;
14873 if (high == 0)
14874 overflow_p = true;
14875 }
14876
14877 return force_fit_type_double (TREE_TYPE (value), low, high,
14878 -1, overflow_p);
14879 }
14880 else
14881 {
14882 tree t;
14883
14884 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14885 value = size_binop (PLUS_EXPR, value, t);
14886 t = build_int_cst (TREE_TYPE (value), -divisor);
14887 value = size_binop (BIT_AND_EXPR, value, t);
14888 }
14889 }
14890 else
14891 {
14892 if (!div)
14893 div = build_int_cst (TREE_TYPE (value), divisor);
14894 value = size_binop (CEIL_DIV_EXPR, value, div);
14895 value = size_binop (MULT_EXPR, value, div);
14896 }
14897
14898 return value;
14899 }
14900
14901 /* Likewise, but round down. */
14902
14903 tree
14904 round_down (tree value, int divisor)
14905 {
14906 tree div = NULL_TREE;
14907
14908 gcc_assert (divisor > 0);
14909 if (divisor == 1)
14910 return value;
14911
14912 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14913 have to do anything. Only do this when we are not given a const,
14914 because in that case, this check is more expensive than just
14915 doing it. */
14916 if (TREE_CODE (value) != INTEGER_CST)
14917 {
14918 div = build_int_cst (TREE_TYPE (value), divisor);
14919
14920 if (multiple_of_p (TREE_TYPE (value), value, div))
14921 return value;
14922 }
14923
14924 /* If divisor is a power of two, simplify this to bit manipulation. */
14925 if (divisor == (divisor & -divisor))
14926 {
14927 tree t;
14928
14929 t = build_int_cst (TREE_TYPE (value), -divisor);
14930 value = size_binop (BIT_AND_EXPR, value, t);
14931 }
14932 else
14933 {
14934 if (!div)
14935 div = build_int_cst (TREE_TYPE (value), divisor);
14936 value = size_binop (FLOOR_DIV_EXPR, value, div);
14937 value = size_binop (MULT_EXPR, value, div);
14938 }
14939
14940 return value;
14941 }
14942
14943 /* Returns the pointer to the base of the object addressed by EXP and
14944 extracts the information about the offset of the access, storing it
14945 to PBITPOS and POFFSET. */
14946
14947 static tree
14948 split_address_to_core_and_offset (tree exp,
14949 HOST_WIDE_INT *pbitpos, tree *poffset)
14950 {
14951 tree core;
14952 enum machine_mode mode;
14953 int unsignedp, volatilep;
14954 HOST_WIDE_INT bitsize;
14955
14956 if (TREE_CODE (exp) == ADDR_EXPR)
14957 {
14958 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14959 poffset, &mode, &unsignedp, &volatilep,
14960 false);
14961 core = fold_addr_expr (core);
14962 }
14963 else
14964 {
14965 core = exp;
14966 *pbitpos = 0;
14967 *poffset = NULL_TREE;
14968 }
14969
14970 return core;
14971 }
14972
14973 /* Returns true if addresses of E1 and E2 differ by a constant, false
14974 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14975
14976 bool
14977 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14978 {
14979 tree core1, core2;
14980 HOST_WIDE_INT bitpos1, bitpos2;
14981 tree toffset1, toffset2, tdiff, type;
14982
14983 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14984 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14985
14986 if (bitpos1 % BITS_PER_UNIT != 0
14987 || bitpos2 % BITS_PER_UNIT != 0
14988 || !operand_equal_p (core1, core2, 0))
14989 return false;
14990
14991 if (toffset1 && toffset2)
14992 {
14993 type = TREE_TYPE (toffset1);
14994 if (type != TREE_TYPE (toffset2))
14995 toffset2 = fold_convert (type, toffset2);
14996
14997 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14998 if (!cst_and_fits_in_hwi (tdiff))
14999 return false;
15000
15001 *diff = int_cst_value (tdiff);
15002 }
15003 else if (toffset1 || toffset2)
15004 {
15005 /* If only one of the offsets is non-constant, the difference cannot
15006 be a constant. */
15007 return false;
15008 }
15009 else
15010 *diff = 0;
15011
15012 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15013 return true;
15014 }
15015
15016 /* Simplify the floating point expression EXP when the sign of the
15017 result is not significant. Return NULL_TREE if no simplification
15018 is possible. */
15019
15020 tree
15021 fold_strip_sign_ops (tree exp)
15022 {
15023 tree arg0, arg1;
15024
15025 switch (TREE_CODE (exp))
15026 {
15027 case ABS_EXPR:
15028 case NEGATE_EXPR:
15029 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15030 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15031
15032 case MULT_EXPR:
15033 case RDIV_EXPR:
15034 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15035 return NULL_TREE;
15036 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15037 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15038 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15039 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15040 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15041 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15042 break;
15043
15044 case COMPOUND_EXPR:
15045 arg0 = TREE_OPERAND (exp, 0);
15046 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15047 if (arg1)
15048 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15049 break;
15050
15051 case COND_EXPR:
15052 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15053 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15054 if (arg0 || arg1)
15055 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15056 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15057 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15058 break;
15059
15060 case CALL_EXPR:
15061 {
15062 const enum built_in_function fcode = builtin_mathfn_code (exp);
15063 switch (fcode)
15064 {
15065 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15066 /* Strip copysign function call, return the 1st argument. */
15067 arg0 = CALL_EXPR_ARG (exp, 0);
15068 arg1 = CALL_EXPR_ARG (exp, 1);
15069 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15070
15071 default:
15072 /* Strip sign ops from the argument of "odd" math functions. */
15073 if (negate_mathfn_p (fcode))
15074 {
15075 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15076 if (arg0)
15077 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
15078 }
15079 break;
15080 }
15081 }
15082 break;
15083
15084 default:
15085 break;
15086 }
15087 return NULL_TREE;
15088 }