]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fold-const.c
expr.c (store_constructor): SIZE now signed.
[thirdparty/gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type.
33
34 fold takes a tree as argument and returns a simplified tree.
35
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
39
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
42
43 force_fit_type takes a constant and prior overflow indicator, and
44 forces the value to fit the type. It returns an overflow indicator. */
45
46 #include "config.h"
47 #include "system.h"
48 #include <setjmp.h>
49 #include "flags.h"
50 #include "tree.h"
51 #include "rtl.h"
52 #include "tm_p.h"
53 #include "toplev.h"
54 #include "ggc.h"
55
56 static void encode PARAMS ((HOST_WIDE_INT *,
57 unsigned HOST_WIDE_INT,
58 HOST_WIDE_INT));
59 static void decode PARAMS ((HOST_WIDE_INT *,
60 unsigned HOST_WIDE_INT *,
61 HOST_WIDE_INT *));
62 static tree negate_expr PARAMS ((tree));
63 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
64 int));
65 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
66 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int, int));
67 static void const_binop_1 PARAMS ((PTR));
68 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static void fold_convert_1 PARAMS ((PTR));
70 static tree fold_convert PARAMS ((tree, tree));
71 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
72 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
73 static int truth_value_p PARAMS ((enum tree_code));
74 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
75 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
76 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
77 static tree omit_one_operand PARAMS ((tree, tree, tree));
78 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
79 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
80 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
81 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
82 tree, tree));
83 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
84 HOST_WIDE_INT *,
85 enum machine_mode *, int *,
86 int *, tree *, tree *));
87 static int all_ones_mask_p PARAMS ((tree, int));
88 static int simple_operand_p PARAMS ((tree));
89 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
90 tree, int));
91 static tree make_range PARAMS ((tree, int *, tree *, tree *));
92 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
93 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
94 int, tree, tree));
95 static tree fold_range_test PARAMS ((tree));
96 static tree unextend PARAMS ((tree, int, int, tree));
97 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
98 static tree optimize_minmax_comparison PARAMS ((tree));
99 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
100 static tree strip_compound_expr PARAMS ((tree, tree));
101 static int multiple_of_p PARAMS ((tree, tree, tree));
102 static tree constant_boolean_node PARAMS ((int, tree));
103 static int count_cond PARAMS ((tree, int));
104
105 #ifndef BRANCH_COST
106 #define BRANCH_COST 1
107 #endif
108
109 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
110 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
111 and SUM1. Then this yields nonzero if overflow occurred during the
112 addition.
113
114 Overflow occurs if A and B have the same sign, but A and SUM differ in
115 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
116 sign. */
117 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
118 \f
119 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
120 We do that by representing the two-word integer in 4 words, with only
121 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
122 number. The value of the word is LOWPART + HIGHPART * BASE. */
123
124 #define LOWPART(x) \
125 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
126 #define HIGHPART(x) \
127 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
128 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
129
130 /* Unpack a two-word integer into 4 words.
131 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
132 WORDS points to the array of HOST_WIDE_INTs. */
133
134 static void
135 encode (words, low, hi)
136 HOST_WIDE_INT *words;
137 unsigned HOST_WIDE_INT low;
138 HOST_WIDE_INT hi;
139 {
140 words[0] = LOWPART (low);
141 words[1] = HIGHPART (low);
142 words[2] = LOWPART (hi);
143 words[3] = HIGHPART (hi);
144 }
145
146 /* Pack an array of 4 words into a two-word integer.
147 WORDS points to the array of words.
148 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
149
150 static void
151 decode (words, low, hi)
152 HOST_WIDE_INT *words;
153 unsigned HOST_WIDE_INT *low;
154 HOST_WIDE_INT *hi;
155 {
156 *low = words[0] + words[1] * BASE;
157 *hi = words[2] + words[3] * BASE;
158 }
159 \f
160 /* Make the integer constant T valid for its type by setting to 0 or 1 all
161 the bits in the constant that don't belong in the type.
162
163 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
164 nonzero, a signed overflow has already occurred in calculating T, so
165 propagate it.
166
167 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
168 if it exists. */
169
170 int
171 force_fit_type (t, overflow)
172 tree t;
173 int overflow;
174 {
175 unsigned HOST_WIDE_INT low;
176 HOST_WIDE_INT high;
177 unsigned int prec;
178
179 if (TREE_CODE (t) == REAL_CST)
180 {
181 #ifdef CHECK_FLOAT_VALUE
182 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
183 overflow);
184 #endif
185 return overflow;
186 }
187
188 else if (TREE_CODE (t) != INTEGER_CST)
189 return overflow;
190
191 low = TREE_INT_CST_LOW (t);
192 high = TREE_INT_CST_HIGH (t);
193
194 if (POINTER_TYPE_P (TREE_TYPE (t)))
195 prec = POINTER_SIZE;
196 else
197 prec = TYPE_PRECISION (TREE_TYPE (t));
198
199 /* First clear all bits that are beyond the type's precision. */
200
201 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
202 ;
203 else if (prec > HOST_BITS_PER_WIDE_INT)
204 TREE_INT_CST_HIGH (t)
205 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
206 else
207 {
208 TREE_INT_CST_HIGH (t) = 0;
209 if (prec < HOST_BITS_PER_WIDE_INT)
210 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
211 }
212
213 /* Unsigned types do not suffer sign extension or overflow. */
214 if (TREE_UNSIGNED (TREE_TYPE (t)))
215 return overflow;
216
217 /* If the value's sign bit is set, extend the sign. */
218 if (prec != 2 * HOST_BITS_PER_WIDE_INT
219 && (prec > HOST_BITS_PER_WIDE_INT
220 ? 0 != (TREE_INT_CST_HIGH (t)
221 & ((HOST_WIDE_INT) 1
222 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
223 : 0 != (TREE_INT_CST_LOW (t)
224 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
225 {
226 /* Value is negative:
227 set to 1 all the bits that are outside this type's precision. */
228 if (prec > HOST_BITS_PER_WIDE_INT)
229 TREE_INT_CST_HIGH (t)
230 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
231 else
232 {
233 TREE_INT_CST_HIGH (t) = -1;
234 if (prec < HOST_BITS_PER_WIDE_INT)
235 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
236 }
237 }
238
239 /* Return nonzero if signed overflow occurred. */
240 return
241 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
242 != 0);
243 }
244 \f
245 /* Add two doubleword integers with doubleword result.
246 Each argument is given as two `HOST_WIDE_INT' pieces.
247 One argument is L1 and H1; the other, L2 and H2.
248 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
249
250 int
251 add_double (l1, h1, l2, h2, lv, hv)
252 unsigned HOST_WIDE_INT l1, l2;
253 HOST_WIDE_INT h1, h2;
254 unsigned HOST_WIDE_INT *lv;
255 HOST_WIDE_INT *hv;
256 {
257 unsigned HOST_WIDE_INT l;
258 HOST_WIDE_INT h;
259
260 l = l1 + l2;
261 h = h1 + h2 + (l < l1);
262
263 *lv = l;
264 *hv = h;
265 return OVERFLOW_SUM_SIGN (h1, h2, h);
266 }
267
268 /* Negate a doubleword integer with doubleword result.
269 Return nonzero if the operation overflows, assuming it's signed.
270 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
271 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
272
273 int
274 neg_double (l1, h1, lv, hv)
275 unsigned HOST_WIDE_INT l1;
276 HOST_WIDE_INT h1;
277 unsigned HOST_WIDE_INT *lv;
278 HOST_WIDE_INT *hv;
279 {
280 if (l1 == 0)
281 {
282 *lv = 0;
283 *hv = - h1;
284 return (*hv & h1) < 0;
285 }
286 else
287 {
288 *lv = - l1;
289 *hv = ~ h1;
290 return 0;
291 }
292 }
293 \f
294 /* Multiply two doubleword integers with doubleword result.
295 Return nonzero if the operation overflows, assuming it's signed.
296 Each argument is given as two `HOST_WIDE_INT' pieces.
297 One argument is L1 and H1; the other, L2 and H2.
298 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
299
300 int
301 mul_double (l1, h1, l2, h2, lv, hv)
302 unsigned HOST_WIDE_INT l1, l2;
303 HOST_WIDE_INT h1, h2;
304 unsigned HOST_WIDE_INT *lv;
305 HOST_WIDE_INT *hv;
306 {
307 HOST_WIDE_INT arg1[4];
308 HOST_WIDE_INT arg2[4];
309 HOST_WIDE_INT prod[4 * 2];
310 register unsigned HOST_WIDE_INT carry;
311 register int i, j, k;
312 unsigned HOST_WIDE_INT toplow, neglow;
313 HOST_WIDE_INT tophigh, neghigh;
314
315 encode (arg1, l1, h1);
316 encode (arg2, l2, h2);
317
318 bzero ((char *) prod, sizeof prod);
319
320 for (i = 0; i < 4; i++)
321 {
322 carry = 0;
323 for (j = 0; j < 4; j++)
324 {
325 k = i + j;
326 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
327 carry += arg1[i] * arg2[j];
328 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
329 carry += prod[k];
330 prod[k] = LOWPART (carry);
331 carry = HIGHPART (carry);
332 }
333 prod[i + 4] = carry;
334 }
335
336 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
337
338 /* Check for overflow by calculating the top half of the answer in full;
339 it should agree with the low half's sign bit. */
340 decode (prod+4, &toplow, &tophigh);
341 if (h1 < 0)
342 {
343 neg_double (l2, h2, &neglow, &neghigh);
344 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
345 }
346 if (h2 < 0)
347 {
348 neg_double (l1, h1, &neglow, &neghigh);
349 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
350 }
351 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
352 }
353 \f
354 /* Shift the doubleword integer in L1, H1 left by COUNT places
355 keeping only PREC bits of result.
356 Shift right if COUNT is negative.
357 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
358 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
359
360 void
361 lshift_double (l1, h1, count, prec, lv, hv, arith)
362 unsigned HOST_WIDE_INT l1;
363 HOST_WIDE_INT h1, count;
364 unsigned int prec;
365 unsigned HOST_WIDE_INT *lv;
366 HOST_WIDE_INT *hv;
367 int arith;
368 {
369 if (count < 0)
370 {
371 rshift_double (l1, h1, - count, prec, lv, hv, arith);
372 return;
373 }
374
375 #ifdef SHIFT_COUNT_TRUNCATED
376 if (SHIFT_COUNT_TRUNCATED)
377 count %= prec;
378 #endif
379
380 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
381 {
382 /* Shifting by the host word size is undefined according to the
383 ANSI standard, so we must handle this as a special case. */
384 *hv = 0;
385 *lv = 0;
386 }
387 else if (count >= HOST_BITS_PER_WIDE_INT)
388 {
389 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
390 *lv = 0;
391 }
392 else
393 {
394 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
395 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
396 *lv = l1 << count;
397 }
398 }
399
400 /* Shift the doubleword integer in L1, H1 right by COUNT places
401 keeping only PREC bits of result. COUNT must be positive.
402 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
403 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
404
405 void
406 rshift_double (l1, h1, count, prec, lv, hv, arith)
407 unsigned HOST_WIDE_INT l1;
408 HOST_WIDE_INT h1, count;
409 unsigned int prec ATTRIBUTE_UNUSED;
410 unsigned HOST_WIDE_INT *lv;
411 HOST_WIDE_INT *hv;
412 int arith;
413 {
414 unsigned HOST_WIDE_INT signmask;
415
416 signmask = (arith
417 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
418 : 0);
419
420 #ifdef SHIFT_COUNT_TRUNCATED
421 if (SHIFT_COUNT_TRUNCATED)
422 count %= prec;
423 #endif
424
425 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
426 {
427 /* Shifting by the host word size is undefined according to the
428 ANSI standard, so we must handle this as a special case. */
429 *hv = signmask;
430 *lv = signmask;
431 }
432 else if (count >= HOST_BITS_PER_WIDE_INT)
433 {
434 *hv = signmask;
435 *lv = ((signmask << (2 * HOST_BITS_PER_WIDE_INT - count - 1) << 1)
436 | ((unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT)));
437 }
438 else
439 {
440 *lv = ((l1 >> count)
441 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
442 *hv = ((signmask << (HOST_BITS_PER_WIDE_INT - count))
443 | ((unsigned HOST_WIDE_INT) h1 >> count));
444 }
445 }
446 \f
447 /* Rotate the doubleword integer in L1, H1 left by COUNT places
448 keeping only PREC bits of result.
449 Rotate right if COUNT is negative.
450 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
451
452 void
453 lrotate_double (l1, h1, count, prec, lv, hv)
454 unsigned HOST_WIDE_INT l1;
455 HOST_WIDE_INT h1, count;
456 unsigned int prec;
457 unsigned HOST_WIDE_INT *lv;
458 HOST_WIDE_INT *hv;
459 {
460 unsigned HOST_WIDE_INT s1l, s2l;
461 HOST_WIDE_INT s1h, s2h;
462
463 count %= prec;
464 if (count < 0)
465 count += prec;
466
467 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
468 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
469 *lv = s1l | s2l;
470 *hv = s1h | s2h;
471 }
472
473 /* Rotate the doubleword integer in L1, H1 left by COUNT places
474 keeping only PREC bits of result. COUNT must be positive.
475 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
476
477 void
478 rrotate_double (l1, h1, count, prec, lv, hv)
479 unsigned HOST_WIDE_INT l1;
480 HOST_WIDE_INT h1, count;
481 unsigned int prec;
482 unsigned HOST_WIDE_INT *lv;
483 HOST_WIDE_INT *hv;
484 {
485 unsigned HOST_WIDE_INT s1l, s2l;
486 HOST_WIDE_INT s1h, s2h;
487
488 count %= prec;
489 if (count < 0)
490 count += prec;
491
492 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
493 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
494 *lv = s1l | s2l;
495 *hv = s1h | s2h;
496 }
497 \f
498 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
499 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
500 CODE is a tree code for a kind of division, one of
501 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
502 or EXACT_DIV_EXPR
503 It controls how the quotient is rounded to a integer.
504 Return nonzero if the operation overflows.
505 UNS nonzero says do unsigned division. */
506
507 int
508 div_and_round_double (code, uns,
509 lnum_orig, hnum_orig, lden_orig, hden_orig,
510 lquo, hquo, lrem, hrem)
511 enum tree_code code;
512 int uns;
513 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
514 HOST_WIDE_INT hnum_orig;
515 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
516 HOST_WIDE_INT hden_orig;
517 unsigned HOST_WIDE_INT *lquo, *lrem;
518 HOST_WIDE_INT *hquo, *hrem;
519 {
520 int quo_neg = 0;
521 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
522 HOST_WIDE_INT den[4], quo[4];
523 register int i, j;
524 unsigned HOST_WIDE_INT work;
525 unsigned HOST_WIDE_INT carry = 0;
526 unsigned HOST_WIDE_INT lnum = lnum_orig;
527 HOST_WIDE_INT hnum = hnum_orig;
528 unsigned HOST_WIDE_INT lden = lden_orig;
529 HOST_WIDE_INT hden = hden_orig;
530 int overflow = 0;
531
532 if (hden == 0 && lden == 0)
533 overflow = 1, lden = 1;
534
535 /* calculate quotient sign and convert operands to unsigned. */
536 if (!uns)
537 {
538 if (hnum < 0)
539 {
540 quo_neg = ~ quo_neg;
541 /* (minimum integer) / (-1) is the only overflow case. */
542 if (neg_double (lnum, hnum, &lnum, &hnum)
543 && ((HOST_WIDE_INT) lden & hden) == -1)
544 overflow = 1;
545 }
546 if (hden < 0)
547 {
548 quo_neg = ~ quo_neg;
549 neg_double (lden, hden, &lden, &hden);
550 }
551 }
552
553 if (hnum == 0 && hden == 0)
554 { /* single precision */
555 *hquo = *hrem = 0;
556 /* This unsigned division rounds toward zero. */
557 *lquo = lnum / lden;
558 goto finish_up;
559 }
560
561 if (hnum == 0)
562 { /* trivial case: dividend < divisor */
563 /* hden != 0 already checked. */
564 *hquo = *lquo = 0;
565 *hrem = hnum;
566 *lrem = lnum;
567 goto finish_up;
568 }
569
570 bzero ((char *) quo, sizeof quo);
571
572 bzero ((char *) num, sizeof num); /* to zero 9th element */
573 bzero ((char *) den, sizeof den);
574
575 encode (num, lnum, hnum);
576 encode (den, lden, hden);
577
578 /* Special code for when the divisor < BASE. */
579 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
580 {
581 /* hnum != 0 already checked. */
582 for (i = 4 - 1; i >= 0; i--)
583 {
584 work = num[i] + carry * BASE;
585 quo[i] = work / lden;
586 carry = work % lden;
587 }
588 }
589 else
590 {
591 /* Full double precision division,
592 with thanks to Don Knuth's "Seminumerical Algorithms". */
593 int num_hi_sig, den_hi_sig;
594 unsigned HOST_WIDE_INT quo_est, scale;
595
596 /* Find the highest non-zero divisor digit. */
597 for (i = 4 - 1; ; i--)
598 if (den[i] != 0) {
599 den_hi_sig = i;
600 break;
601 }
602
603 /* Insure that the first digit of the divisor is at least BASE/2.
604 This is required by the quotient digit estimation algorithm. */
605
606 scale = BASE / (den[den_hi_sig] + 1);
607 if (scale > 1)
608 { /* scale divisor and dividend */
609 carry = 0;
610 for (i = 0; i <= 4 - 1; i++)
611 {
612 work = (num[i] * scale) + carry;
613 num[i] = LOWPART (work);
614 carry = HIGHPART (work);
615 }
616
617 num[4] = carry;
618 carry = 0;
619 for (i = 0; i <= 4 - 1; i++)
620 {
621 work = (den[i] * scale) + carry;
622 den[i] = LOWPART (work);
623 carry = HIGHPART (work);
624 if (den[i] != 0) den_hi_sig = i;
625 }
626 }
627
628 num_hi_sig = 4;
629
630 /* Main loop */
631 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
632 {
633 /* Guess the next quotient digit, quo_est, by dividing the first
634 two remaining dividend digits by the high order quotient digit.
635 quo_est is never low and is at most 2 high. */
636 unsigned HOST_WIDE_INT tmp;
637
638 num_hi_sig = i + den_hi_sig + 1;
639 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
640 if (num[num_hi_sig] != den[den_hi_sig])
641 quo_est = work / den[den_hi_sig];
642 else
643 quo_est = BASE - 1;
644
645 /* Refine quo_est so it's usually correct, and at most one high. */
646 tmp = work - quo_est * den[den_hi_sig];
647 if (tmp < BASE
648 && (den[den_hi_sig - 1] * quo_est
649 > (tmp * BASE + num[num_hi_sig - 2])))
650 quo_est--;
651
652 /* Try QUO_EST as the quotient digit, by multiplying the
653 divisor by QUO_EST and subtracting from the remaining dividend.
654 Keep in mind that QUO_EST is the I - 1st digit. */
655
656 carry = 0;
657 for (j = 0; j <= den_hi_sig; j++)
658 {
659 work = quo_est * den[j] + carry;
660 carry = HIGHPART (work);
661 work = num[i + j] - LOWPART (work);
662 num[i + j] = LOWPART (work);
663 carry += HIGHPART (work) != 0;
664 }
665
666 /* If quo_est was high by one, then num[i] went negative and
667 we need to correct things. */
668 if (num[num_hi_sig] < carry)
669 {
670 quo_est--;
671 carry = 0; /* add divisor back in */
672 for (j = 0; j <= den_hi_sig; j++)
673 {
674 work = num[i + j] + den[j] + carry;
675 carry = HIGHPART (work);
676 num[i + j] = LOWPART (work);
677 }
678
679 num [num_hi_sig] += carry;
680 }
681
682 /* Store the quotient digit. */
683 quo[i] = quo_est;
684 }
685 }
686
687 decode (quo, lquo, hquo);
688
689 finish_up:
690 /* if result is negative, make it so. */
691 if (quo_neg)
692 neg_double (*lquo, *hquo, lquo, hquo);
693
694 /* compute trial remainder: rem = num - (quo * den) */
695 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
696 neg_double (*lrem, *hrem, lrem, hrem);
697 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
698
699 switch (code)
700 {
701 case TRUNC_DIV_EXPR:
702 case TRUNC_MOD_EXPR: /* round toward zero */
703 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
704 return overflow;
705
706 case FLOOR_DIV_EXPR:
707 case FLOOR_MOD_EXPR: /* round toward negative infinity */
708 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
709 {
710 /* quo = quo - 1; */
711 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
712 lquo, hquo);
713 }
714 else
715 return overflow;
716 break;
717
718 case CEIL_DIV_EXPR:
719 case CEIL_MOD_EXPR: /* round toward positive infinity */
720 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
721 {
722 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
723 lquo, hquo);
724 }
725 else
726 return overflow;
727 break;
728
729 case ROUND_DIV_EXPR:
730 case ROUND_MOD_EXPR: /* round to closest integer */
731 {
732 unsigned HOST_WIDE_INT labs_rem = *lrem;
733 HOST_WIDE_INT habs_rem = *hrem;
734 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
735 HOST_WIDE_INT habs_den = hden, htwice;
736
737 /* Get absolute values */
738 if (*hrem < 0)
739 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
740 if (hden < 0)
741 neg_double (lden, hden, &labs_den, &habs_den);
742
743 /* If (2 * abs (lrem) >= abs (lden)) */
744 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
745 labs_rem, habs_rem, &ltwice, &htwice);
746
747 if (((unsigned HOST_WIDE_INT) habs_den
748 < (unsigned HOST_WIDE_INT) htwice)
749 || (((unsigned HOST_WIDE_INT) habs_den
750 == (unsigned HOST_WIDE_INT) htwice)
751 && (labs_den < ltwice)))
752 {
753 if (*hquo < 0)
754 /* quo = quo - 1; */
755 add_double (*lquo, *hquo,
756 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
757 else
758 /* quo = quo + 1; */
759 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
760 lquo, hquo);
761 }
762 else
763 return overflow;
764 }
765 break;
766
767 default:
768 abort ();
769 }
770
771 /* compute true remainder: rem = num - (quo * den) */
772 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
773 neg_double (*lrem, *hrem, lrem, hrem);
774 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
775 return overflow;
776 }
777 \f
778 #ifndef REAL_ARITHMETIC
779 /* Effectively truncate a real value to represent the nearest possible value
780 in a narrower mode. The result is actually represented in the same data
781 type as the argument, but its value is usually different.
782
783 A trap may occur during the FP operations and it is the responsibility
784 of the calling function to have a handler established. */
785
786 REAL_VALUE_TYPE
787 real_value_truncate (mode, arg)
788 enum machine_mode mode;
789 REAL_VALUE_TYPE arg;
790 {
791 return REAL_VALUE_TRUNCATE (mode, arg);
792 }
793
794 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
795
796 /* Check for infinity in an IEEE double precision number. */
797
798 int
799 target_isinf (x)
800 REAL_VALUE_TYPE x;
801 {
802 /* The IEEE 64-bit double format. */
803 union {
804 REAL_VALUE_TYPE d;
805 struct {
806 unsigned sign : 1;
807 unsigned exponent : 11;
808 unsigned mantissa1 : 20;
809 unsigned mantissa2;
810 } little_endian;
811 struct {
812 unsigned mantissa2;
813 unsigned mantissa1 : 20;
814 unsigned exponent : 11;
815 unsigned sign : 1;
816 } big_endian;
817 } u;
818
819 u.d = dconstm1;
820 if (u.big_endian.sign == 1)
821 {
822 u.d = x;
823 return (u.big_endian.exponent == 2047
824 && u.big_endian.mantissa1 == 0
825 && u.big_endian.mantissa2 == 0);
826 }
827 else
828 {
829 u.d = x;
830 return (u.little_endian.exponent == 2047
831 && u.little_endian.mantissa1 == 0
832 && u.little_endian.mantissa2 == 0);
833 }
834 }
835
836 /* Check whether an IEEE double precision number is a NaN. */
837
838 int
839 target_isnan (x)
840 REAL_VALUE_TYPE x;
841 {
842 /* The IEEE 64-bit double format. */
843 union {
844 REAL_VALUE_TYPE d;
845 struct {
846 unsigned sign : 1;
847 unsigned exponent : 11;
848 unsigned mantissa1 : 20;
849 unsigned mantissa2;
850 } little_endian;
851 struct {
852 unsigned mantissa2;
853 unsigned mantissa1 : 20;
854 unsigned exponent : 11;
855 unsigned sign : 1;
856 } big_endian;
857 } u;
858
859 u.d = dconstm1;
860 if (u.big_endian.sign == 1)
861 {
862 u.d = x;
863 return (u.big_endian.exponent == 2047
864 && (u.big_endian.mantissa1 != 0
865 || u.big_endian.mantissa2 != 0));
866 }
867 else
868 {
869 u.d = x;
870 return (u.little_endian.exponent == 2047
871 && (u.little_endian.mantissa1 != 0
872 || u.little_endian.mantissa2 != 0));
873 }
874 }
875
876 /* Check for a negative IEEE double precision number. */
877
878 int
879 target_negative (x)
880 REAL_VALUE_TYPE x;
881 {
882 /* The IEEE 64-bit double format. */
883 union {
884 REAL_VALUE_TYPE d;
885 struct {
886 unsigned sign : 1;
887 unsigned exponent : 11;
888 unsigned mantissa1 : 20;
889 unsigned mantissa2;
890 } little_endian;
891 struct {
892 unsigned mantissa2;
893 unsigned mantissa1 : 20;
894 unsigned exponent : 11;
895 unsigned sign : 1;
896 } big_endian;
897 } u;
898
899 u.d = dconstm1;
900 if (u.big_endian.sign == 1)
901 {
902 u.d = x;
903 return u.big_endian.sign;
904 }
905 else
906 {
907 u.d = x;
908 return u.little_endian.sign;
909 }
910 }
911 #else /* Target not IEEE */
912
913 /* Let's assume other float formats don't have infinity.
914 (This can be overridden by redefining REAL_VALUE_ISINF.) */
915
916 int
917 target_isinf (x)
918 REAL_VALUE_TYPE x ATTRIBUTE_UNUSED;
919 {
920 return 0;
921 }
922
923 /* Let's assume other float formats don't have NaNs.
924 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
925
926 int
927 target_isnan (x)
928 REAL_VALUE_TYPE x ATTRIBUTE_UNUSED;
929 {
930 return 0;
931 }
932
933 /* Let's assume other float formats don't have minus zero.
934 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
935
936 int
937 target_negative (x)
938 REAL_VALUE_TYPE x;
939 {
940 return x < 0;
941 }
942 #endif /* Target not IEEE */
943
944 /* Try to change R into its exact multiplicative inverse in machine mode
945 MODE. Return nonzero function value if successful. */
946
947 int
948 exact_real_inverse (mode, r)
949 enum machine_mode mode;
950 REAL_VALUE_TYPE *r;
951 {
952 jmp_buf float_error;
953 union
954 {
955 double d;
956 unsigned short i[4];
957 }x, t, y;
958 #ifdef CHECK_FLOAT_VALUE
959 int i;
960 #endif
961
962 /* Usually disable if bounds checks are not reliable. */
963 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
964 return 0;
965
966 /* Set array index to the less significant bits in the unions, depending
967 on the endian-ness of the host doubles.
968 Disable if insufficient information on the data structure. */
969 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
970 return 0;
971 #else
972 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
973 #define K 2
974 #else
975 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
976 #define K 2
977 #else
978 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
979 #endif
980 #endif
981 #endif
982
983 if (setjmp (float_error))
984 {
985 /* Don't do the optimization if there was an arithmetic error. */
986 fail:
987 set_float_handler (NULL_PTR);
988 return 0;
989 }
990 set_float_handler (float_error);
991
992 /* Domain check the argument. */
993 x.d = *r;
994 if (x.d == 0.0)
995 goto fail;
996
997 #ifdef REAL_INFINITY
998 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
999 goto fail;
1000 #endif
1001
1002 /* Compute the reciprocal and check for numerical exactness.
1003 It is unnecessary to check all the significand bits to determine
1004 whether X is a power of 2. If X is not, then it is impossible for
1005 the bottom half significand of both X and 1/X to be all zero bits.
1006 Hence we ignore the data structure of the top half and examine only
1007 the low order bits of the two significands. */
1008 t.d = 1.0 / x.d;
1009 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
1010 goto fail;
1011
1012 /* Truncate to the required mode and range-check the result. */
1013 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
1014 #ifdef CHECK_FLOAT_VALUE
1015 i = 0;
1016 if (CHECK_FLOAT_VALUE (mode, y.d, i))
1017 goto fail;
1018 #endif
1019
1020 /* Fail if truncation changed the value. */
1021 if (y.d != t.d || y.d == 0.0)
1022 goto fail;
1023
1024 #ifdef REAL_INFINITY
1025 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
1026 goto fail;
1027 #endif
1028
1029 /* Output the reciprocal and return success flag. */
1030 set_float_handler (NULL_PTR);
1031 *r = y.d;
1032 return 1;
1033 }
1034
1035 /* Convert C9X hexadecimal floating point string constant S. Return
1036 real value type in mode MODE. This function uses the host computer's
1037 floating point arithmetic when there is no REAL_ARITHMETIC. */
1038
1039 REAL_VALUE_TYPE
1040 real_hex_to_f (s, mode)
1041 char *s;
1042 enum machine_mode mode;
1043 {
1044 REAL_VALUE_TYPE ip;
1045 char *p = s;
1046 unsigned HOST_WIDE_INT low, high;
1047 int shcount, nrmcount, k;
1048 int sign, expsign, isfloat;
1049 int lost = 0;/* Nonzero low order bits shifted out and discarded. */
1050 int frexpon = 0; /* Bits after the decimal point. */
1051 int expon = 0; /* Value of exponent. */
1052 int decpt = 0; /* How many decimal points. */
1053 int gotp = 0; /* How many P's. */
1054 char c;
1055
1056 isfloat = 0;
1057 expsign = 1;
1058 ip = 0.0;
1059
1060 while (*p == ' ' || *p == '\t')
1061 ++p;
1062
1063 /* Sign, if any, comes first. */
1064 sign = 1;
1065 if (*p == '-')
1066 {
1067 sign = -1;
1068 ++p;
1069 }
1070
1071 /* The string is supposed to start with 0x or 0X . */
1072 if (*p == '0')
1073 {
1074 ++p;
1075 if (*p == 'x' || *p == 'X')
1076 ++p;
1077 else
1078 abort ();
1079 }
1080 else
1081 abort ();
1082
1083 while (*p == '0')
1084 ++p;
1085
1086 high = 0;
1087 low = 0;
1088 shcount = 0;
1089 while ((c = *p) != '\0')
1090 {
1091 if ((c >= '0' && c <= '9') || (c >= 'A' && c <= 'F')
1092 || (c >= 'a' && c <= 'f'))
1093 {
1094 k = c & 0x7f;
1095 if (k >= 'a')
1096 k = k - 'a' + 10;
1097 else if (k >= 'A')
1098 k = k - 'A' + 10;
1099 else
1100 k = k - '0';
1101
1102 if ((high & 0xf0000000) == 0)
1103 {
1104 high = (high << 4) + ((low >> 28) & 15);
1105 low = (low << 4) + k;
1106 shcount += 4;
1107 if (decpt)
1108 frexpon += 4;
1109 }
1110 else
1111 {
1112 /* Record nonzero lost bits. */
1113 lost |= k;
1114 if (! decpt)
1115 frexpon -= 4;
1116 }
1117 ++p;
1118 }
1119 else if ( c == '.')
1120 {
1121 ++decpt;
1122 ++p;
1123 }
1124
1125 else if (c == 'p' || c == 'P')
1126 {
1127 ++gotp;
1128 ++p;
1129 /* Sign of exponent. */
1130 if (*p == '-')
1131 {
1132 expsign = -1;
1133 ++p;
1134 }
1135
1136 /* Value of exponent.
1137 The exponent field is a decimal integer. */
1138 while (ISDIGIT(*p))
1139 {
1140 k = (*p++ & 0x7f) - '0';
1141 expon = 10 * expon + k;
1142 }
1143
1144 expon *= expsign;
1145 /* F suffix is ambiguous in the significand part
1146 so it must appear after the decimal exponent field. */
1147 if (*p == 'f' || *p == 'F')
1148 {
1149 isfloat = 1;
1150 ++p;
1151 break;
1152 }
1153 }
1154
1155 else if (c == 'l' || c == 'L')
1156 {
1157 ++p;
1158 break;
1159 }
1160 else
1161 break;
1162 }
1163
1164 /* Abort if last character read was not legitimate. */
1165 c = *p;
1166 if ((c != '\0' && c != ' ' && c != '\n' && c != '\r') || (decpt > 1))
1167 abort ();
1168
1169 /* There must be either one decimal point or one p. */
1170 if (decpt == 0 && gotp == 0)
1171 abort ();
1172
1173 shcount -= 4;
1174 if (high == 0 && low == 0)
1175 return dconst0;
1176
1177 /* Normalize. */
1178 nrmcount = 0;
1179 if (high == 0)
1180 {
1181 high = low;
1182 low = 0;
1183 nrmcount += 32;
1184 }
1185
1186 /* Leave a high guard bit for carry-out. */
1187 if ((high & 0x80000000) != 0)
1188 {
1189 lost |= low & 1;
1190 low = (low >> 1) | (high << 31);
1191 high = high >> 1;
1192 nrmcount -= 1;
1193 }
1194
1195 if ((high & 0xffff8000) == 0)
1196 {
1197 high = (high << 16) + ((low >> 16) & 0xffff);
1198 low = low << 16;
1199 nrmcount += 16;
1200 }
1201
1202 while ((high & 0xc0000000) == 0)
1203 {
1204 high = (high << 1) + ((low >> 31) & 1);
1205 low = low << 1;
1206 nrmcount += 1;
1207 }
1208
1209 if (isfloat || GET_MODE_SIZE(mode) == UNITS_PER_WORD)
1210 {
1211 /* Keep 24 bits precision, bits 0x7fffff80.
1212 Rounding bit is 0x40. */
1213 lost = lost | low | (high & 0x3f);
1214 low = 0;
1215 if (high & 0x40)
1216 {
1217 if ((high & 0x80) || lost)
1218 high += 0x40;
1219 }
1220 high &= 0xffffff80;
1221 }
1222 else
1223 {
1224 /* We need real.c to do long double formats, so here default
1225 to double precision. */
1226 #if HOST_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1227 /* IEEE double.
1228 Keep 53 bits precision, bits 0x7fffffff fffffc00.
1229 Rounding bit is low word 0x200. */
1230 lost = lost | (low & 0x1ff);
1231 if (low & 0x200)
1232 {
1233 if ((low & 0x400) || lost)
1234 {
1235 low = (low + 0x200) & 0xfffffc00;
1236 if (low == 0)
1237 high += 1;
1238 }
1239 }
1240 low &= 0xfffffc00;
1241 #else
1242 /* Assume it's a VAX with 56-bit significand,
1243 bits 0x7fffffff ffffff80. */
1244 lost = lost | (low & 0x7f);
1245 if (low & 0x40)
1246 {
1247 if ((low & 0x80) || lost)
1248 {
1249 low = (low + 0x40) & 0xffffff80;
1250 if (low == 0)
1251 high += 1;
1252 }
1253 }
1254 low &= 0xffffff80;
1255 #endif
1256 }
1257
1258 ip = (double) high;
1259 ip = REAL_VALUE_LDEXP (ip, 32) + (double) low;
1260 /* Apply shifts and exponent value as power of 2. */
1261 ip = REAL_VALUE_LDEXP (ip, expon - (nrmcount + frexpon));
1262
1263 if (sign < 0)
1264 ip = -ip;
1265 return ip;
1266 }
1267
1268 #endif /* no REAL_ARITHMETIC */
1269 \f
1270 /* Given T, an expression, return the negation of T. Allow for T to be
1271 null, in which case return null. */
1272
1273 static tree
1274 negate_expr (t)
1275 tree t;
1276 {
1277 tree type;
1278 tree tem;
1279
1280 if (t == 0)
1281 return 0;
1282
1283 type = TREE_TYPE (t);
1284 STRIP_SIGN_NOPS (t);
1285
1286 switch (TREE_CODE (t))
1287 {
1288 case INTEGER_CST:
1289 case REAL_CST:
1290 if (! TREE_UNSIGNED (type)
1291 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
1292 && ! TREE_OVERFLOW (tem))
1293 return tem;
1294 break;
1295
1296 case NEGATE_EXPR:
1297 return convert (type, TREE_OPERAND (t, 0));
1298
1299 case MINUS_EXPR:
1300 /* - (A - B) -> B - A */
1301 if (! FLOAT_TYPE_P (type) || flag_fast_math)
1302 return convert (type,
1303 fold (build (MINUS_EXPR, TREE_TYPE (t),
1304 TREE_OPERAND (t, 1),
1305 TREE_OPERAND (t, 0))));
1306 break;
1307
1308 default:
1309 break;
1310 }
1311
1312 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1313 }
1314 \f
1315 /* Split a tree IN into a constant, literal and variable parts that could be
1316 combined with CODE to make IN. "constant" means an expression with
1317 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1318 commutative arithmetic operation. Store the constant part into *CONP,
1319 the literal in &LITP and return the variable part. If a part isn't
1320 present, set it to null. If the tree does not decompose in this way,
1321 return the entire tree as the variable part and the other parts as null.
1322
1323 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1324 case, we negate an operand that was subtracted. If NEGATE_P is true, we
1325 are negating all of IN.
1326
1327 If IN is itself a literal or constant, return it as appropriate.
1328
1329 Note that we do not guarantee that any of the three values will be the
1330 same type as IN, but they will have the same signedness and mode. */
1331
1332 static tree
1333 split_tree (in, code, conp, litp, negate_p)
1334 tree in;
1335 enum tree_code code;
1336 tree *conp, *litp;
1337 int negate_p;
1338 {
1339 tree var = 0;
1340
1341 *conp = 0;
1342 *litp = 0;
1343
1344 /* Strip any conversions that don't change the machine mode or signedness. */
1345 STRIP_SIGN_NOPS (in);
1346
1347 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1348 *litp = in;
1349 else if (TREE_CONSTANT (in))
1350 *conp = in;
1351
1352 else if (TREE_CODE (in) == code
1353 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1354 /* We can associate addition and subtraction together (even
1355 though the C standard doesn't say so) for integers because
1356 the value is not affected. For reals, the value might be
1357 affected, so we can't. */
1358 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1359 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1360 {
1361 tree op0 = TREE_OPERAND (in, 0);
1362 tree op1 = TREE_OPERAND (in, 1);
1363 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1364 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1365
1366 /* First see if either of the operands is a literal, then a constant. */
1367 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1368 *litp = op0, op0 = 0;
1369 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1370 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1371
1372 if (op0 != 0 && TREE_CONSTANT (op0))
1373 *conp = op0, op0 = 0;
1374 else if (op1 != 0 && TREE_CONSTANT (op1))
1375 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1376
1377 /* If we haven't dealt with either operand, this is not a case we can
1378 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1379 if (op0 != 0 && op1 != 0)
1380 var = in;
1381 else if (op0 != 0)
1382 var = op0;
1383 else
1384 var = op1, neg_var_p = neg1_p;
1385
1386 /* Now do any needed negations. */
1387 if (neg_litp_p) *litp = negate_expr (*litp);
1388 if (neg_conp_p) *conp = negate_expr (*conp);
1389 if (neg_var_p) var = negate_expr (var);
1390 }
1391 else
1392 var = in;
1393
1394 if (negate_p)
1395 {
1396 var = negate_expr (var);
1397 *conp = negate_expr (*conp);
1398 *litp = negate_expr (*litp);
1399 }
1400
1401 return var;
1402 }
1403
1404 /* Re-associate trees split by the above function. T1 and T2 are either
1405 expressions to associate or null. Return the new expression, if any. If
1406 we build an operation, do it in TYPE and with CODE, except if CODE is a
1407 MINUS_EXPR, in which case we use PLUS_EXPR since split_tree will already
1408 have taken care of the negations. */
1409
1410 static tree
1411 associate_trees (t1, t2, code, type)
1412 tree t1, t2;
1413 enum tree_code code;
1414 tree type;
1415 {
1416 if (t1 == 0)
1417 return t2;
1418 else if (t2 == 0)
1419 return t1;
1420
1421 if (code == MINUS_EXPR)
1422 code = PLUS_EXPR;
1423
1424 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1425 try to fold this since we will have infinite recursion. But do
1426 deal with any NEGATE_EXPRs. */
1427 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1428 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1429 {
1430 if (TREE_CODE (t1) == NEGATE_EXPR)
1431 return build (MINUS_EXPR, type, convert (type, t2),
1432 convert (type, TREE_OPERAND (t1, 0)));
1433 else if (TREE_CODE (t2) == NEGATE_EXPR)
1434 return build (MINUS_EXPR, type, convert (type, t1),
1435 convert (type, TREE_OPERAND (t2, 0)));
1436 else
1437 return build (code, type, convert (type, t1), convert (type, t2));
1438 }
1439
1440 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1441 }
1442 \f
1443 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1444 to produce a new constant.
1445
1446 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1447 If FORSIZE is nonzero, compute overflow for unsigned types. */
1448
1449 static tree
1450 int_const_binop (code, arg1, arg2, notrunc, forsize)
1451 enum tree_code code;
1452 register tree arg1, arg2;
1453 int notrunc, forsize;
1454 {
1455 unsigned HOST_WIDE_INT int1l, int2l;
1456 HOST_WIDE_INT int1h, int2h;
1457 unsigned HOST_WIDE_INT low;
1458 HOST_WIDE_INT hi;
1459 unsigned HOST_WIDE_INT garbagel;
1460 HOST_WIDE_INT garbageh;
1461 register tree t;
1462 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1463 int overflow = 0;
1464 int no_overflow = 0;
1465
1466 int1l = TREE_INT_CST_LOW (arg1);
1467 int1h = TREE_INT_CST_HIGH (arg1);
1468 int2l = TREE_INT_CST_LOW (arg2);
1469 int2h = TREE_INT_CST_HIGH (arg2);
1470
1471 switch (code)
1472 {
1473 case BIT_IOR_EXPR:
1474 low = int1l | int2l, hi = int1h | int2h;
1475 break;
1476
1477 case BIT_XOR_EXPR:
1478 low = int1l ^ int2l, hi = int1h ^ int2h;
1479 break;
1480
1481 case BIT_AND_EXPR:
1482 low = int1l & int2l, hi = int1h & int2h;
1483 break;
1484
1485 case BIT_ANDTC_EXPR:
1486 low = int1l & ~int2l, hi = int1h & ~int2h;
1487 break;
1488
1489 case RSHIFT_EXPR:
1490 int2l = - int2l;
1491 case LSHIFT_EXPR:
1492 /* It's unclear from the C standard whether shifts can overflow.
1493 The following code ignores overflow; perhaps a C standard
1494 interpretation ruling is needed. */
1495 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (TREE_TYPE (arg1)),
1496 &low, &hi, !uns);
1497 no_overflow = 1;
1498 break;
1499
1500 case RROTATE_EXPR:
1501 int2l = - int2l;
1502 case LROTATE_EXPR:
1503 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (TREE_TYPE (arg1)),
1504 &low, &hi);
1505 break;
1506
1507 case PLUS_EXPR:
1508 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1509 break;
1510
1511 case MINUS_EXPR:
1512 neg_double (int2l, int2h, &low, &hi);
1513 add_double (int1l, int1h, low, hi, &low, &hi);
1514 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1515 break;
1516
1517 case MULT_EXPR:
1518 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1519 break;
1520
1521 case TRUNC_DIV_EXPR:
1522 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1523 case EXACT_DIV_EXPR:
1524 /* This is a shortcut for a common special case. */
1525 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1526 && ! TREE_CONSTANT_OVERFLOW (arg1)
1527 && ! TREE_CONSTANT_OVERFLOW (arg2)
1528 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1529 {
1530 if (code == CEIL_DIV_EXPR)
1531 int1l += int2l - 1;
1532
1533 low = int1l / int2l, hi = 0;
1534 break;
1535 }
1536
1537 /* ... fall through ... */
1538
1539 case ROUND_DIV_EXPR:
1540 if (int2h == 0 && int2l == 1)
1541 {
1542 low = int1l, hi = int1h;
1543 break;
1544 }
1545 if (int1l == int2l && int1h == int2h
1546 && ! (int1l == 0 && int1h == 0))
1547 {
1548 low = 1, hi = 0;
1549 break;
1550 }
1551 overflow = div_and_round_double (code, uns,
1552 int1l, int1h, int2l, int2h,
1553 &low, &hi, &garbagel, &garbageh);
1554 break;
1555
1556 case TRUNC_MOD_EXPR:
1557 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1558 /* This is a shortcut for a common special case. */
1559 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1560 && ! TREE_CONSTANT_OVERFLOW (arg1)
1561 && ! TREE_CONSTANT_OVERFLOW (arg2)
1562 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1563 {
1564 if (code == CEIL_MOD_EXPR)
1565 int1l += int2l - 1;
1566 low = int1l % int2l, hi = 0;
1567 break;
1568 }
1569
1570 /* ... fall through ... */
1571
1572 case ROUND_MOD_EXPR:
1573 overflow = div_and_round_double (code, uns,
1574 int1l, int1h, int2l, int2h,
1575 &garbagel, &garbageh, &low, &hi);
1576 break;
1577
1578 case MIN_EXPR:
1579 case MAX_EXPR:
1580 if (uns)
1581 low = (((unsigned HOST_WIDE_INT) int1h
1582 < (unsigned HOST_WIDE_INT) int2h)
1583 || (((unsigned HOST_WIDE_INT) int1h
1584 == (unsigned HOST_WIDE_INT) int2h)
1585 && int1l < int2l));
1586 else
1587 low = (int1h < int2h
1588 || (int1h == int2h && int1l < int2l));
1589
1590 if (low == (code == MIN_EXPR))
1591 low = int1l, hi = int1h;
1592 else
1593 low = int2l, hi = int2h;
1594 break;
1595
1596 default:
1597 abort ();
1598 }
1599
1600 if (forsize && hi == 0 && low < 10000)
1601 return size_int_type_wide (low, TREE_TYPE (arg1));
1602 else
1603 {
1604 t = build_int_2 (low, hi);
1605 TREE_TYPE (t) = TREE_TYPE (arg1);
1606 }
1607
1608 TREE_OVERFLOW (t)
1609 = ((notrunc ? (!uns || forsize) && overflow
1610 : force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1611 | TREE_OVERFLOW (arg1)
1612 | TREE_OVERFLOW (arg2));
1613
1614 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1615 So check if force_fit_type truncated the value. */
1616 if (forsize
1617 && ! TREE_OVERFLOW (t)
1618 && (TREE_INT_CST_HIGH (t) != hi
1619 || TREE_INT_CST_LOW (t) != low))
1620 TREE_OVERFLOW (t) = 1;
1621
1622 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1623 | TREE_CONSTANT_OVERFLOW (arg1)
1624 | TREE_CONSTANT_OVERFLOW (arg2));
1625 return t;
1626 }
1627
1628 /* Define input and output argument for const_binop_1. */
1629 struct cb_args
1630 {
1631 enum tree_code code; /* Input: tree code for operation*/
1632 tree type; /* Input: tree type for operation. */
1633 REAL_VALUE_TYPE d1, d2; /* Input: floating point operands. */
1634 tree t; /* Output: constant for result. */
1635 };
1636
1637 /* Do the real arithmetic for const_binop while protected by a
1638 float overflow handler. */
1639
1640 static void
1641 const_binop_1 (data)
1642 PTR data;
1643 {
1644 struct cb_args *args = (struct cb_args *) data;
1645 REAL_VALUE_TYPE value;
1646
1647 #ifdef REAL_ARITHMETIC
1648 REAL_ARITHMETIC (value, args->code, args->d1, args->d2);
1649 #else
1650 switch (args->code)
1651 {
1652 case PLUS_EXPR:
1653 value = args->d1 + args->d2;
1654 break;
1655
1656 case MINUS_EXPR:
1657 value = args->d1 - args->d2;
1658 break;
1659
1660 case MULT_EXPR:
1661 value = args->d1 * args->d2;
1662 break;
1663
1664 case RDIV_EXPR:
1665 #ifndef REAL_INFINITY
1666 if (args->d2 == 0)
1667 abort ();
1668 #endif
1669
1670 value = args->d1 / args->d2;
1671 break;
1672
1673 case MIN_EXPR:
1674 value = MIN (args->d1, args->d2);
1675 break;
1676
1677 case MAX_EXPR:
1678 value = MAX (args->d1, args->d2);
1679 break;
1680
1681 default:
1682 abort ();
1683 }
1684 #endif /* no REAL_ARITHMETIC */
1685
1686 args->t
1687 = build_real (args->type,
1688 real_value_truncate (TYPE_MODE (args->type), value));
1689 }
1690
1691 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1692 constant. We assume ARG1 and ARG2 have the same data type, or at least
1693 are the same kind of constant and the same machine mode.
1694
1695 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1696
1697 static tree
1698 const_binop (code, arg1, arg2, notrunc)
1699 enum tree_code code;
1700 register tree arg1, arg2;
1701 int notrunc;
1702 {
1703 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1704
1705 if (TREE_CODE (arg1) == INTEGER_CST)
1706 return int_const_binop (code, arg1, arg2, notrunc, 0);
1707
1708 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1709 if (TREE_CODE (arg1) == REAL_CST)
1710 {
1711 REAL_VALUE_TYPE d1;
1712 REAL_VALUE_TYPE d2;
1713 int overflow = 0;
1714 tree t;
1715 struct cb_args args;
1716
1717 d1 = TREE_REAL_CST (arg1);
1718 d2 = TREE_REAL_CST (arg2);
1719
1720 /* If either operand is a NaN, just return it. Otherwise, set up
1721 for floating-point trap; we return an overflow. */
1722 if (REAL_VALUE_ISNAN (d1))
1723 return arg1;
1724 else if (REAL_VALUE_ISNAN (d2))
1725 return arg2;
1726
1727 /* Setup input for const_binop_1() */
1728 args.type = TREE_TYPE (arg1);
1729 args.d1 = d1;
1730 args.d2 = d2;
1731 args.code = code;
1732
1733 if (do_float_handler (const_binop_1, (PTR) &args))
1734 /* Receive output from const_binop_1. */
1735 t = args.t;
1736 else
1737 {
1738 /* We got an exception from const_binop_1. */
1739 t = copy_node (arg1);
1740 overflow = 1;
1741 }
1742
1743 TREE_OVERFLOW (t)
1744 = (force_fit_type (t, overflow)
1745 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1746 TREE_CONSTANT_OVERFLOW (t)
1747 = TREE_OVERFLOW (t)
1748 | TREE_CONSTANT_OVERFLOW (arg1)
1749 | TREE_CONSTANT_OVERFLOW (arg2);
1750 return t;
1751 }
1752 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1753 if (TREE_CODE (arg1) == COMPLEX_CST)
1754 {
1755 register tree type = TREE_TYPE (arg1);
1756 register tree r1 = TREE_REALPART (arg1);
1757 register tree i1 = TREE_IMAGPART (arg1);
1758 register tree r2 = TREE_REALPART (arg2);
1759 register tree i2 = TREE_IMAGPART (arg2);
1760 register tree t;
1761
1762 switch (code)
1763 {
1764 case PLUS_EXPR:
1765 t = build_complex (type,
1766 const_binop (PLUS_EXPR, r1, r2, notrunc),
1767 const_binop (PLUS_EXPR, i1, i2, notrunc));
1768 break;
1769
1770 case MINUS_EXPR:
1771 t = build_complex (type,
1772 const_binop (MINUS_EXPR, r1, r2, notrunc),
1773 const_binop (MINUS_EXPR, i1, i2, notrunc));
1774 break;
1775
1776 case MULT_EXPR:
1777 t = build_complex (type,
1778 const_binop (MINUS_EXPR,
1779 const_binop (MULT_EXPR,
1780 r1, r2, notrunc),
1781 const_binop (MULT_EXPR,
1782 i1, i2, notrunc),
1783 notrunc),
1784 const_binop (PLUS_EXPR,
1785 const_binop (MULT_EXPR,
1786 r1, i2, notrunc),
1787 const_binop (MULT_EXPR,
1788 i1, r2, notrunc),
1789 notrunc));
1790 break;
1791
1792 case RDIV_EXPR:
1793 {
1794 register tree magsquared
1795 = const_binop (PLUS_EXPR,
1796 const_binop (MULT_EXPR, r2, r2, notrunc),
1797 const_binop (MULT_EXPR, i2, i2, notrunc),
1798 notrunc);
1799
1800 t = build_complex (type,
1801 const_binop
1802 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1803 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1804 const_binop (PLUS_EXPR,
1805 const_binop (MULT_EXPR, r1, r2,
1806 notrunc),
1807 const_binop (MULT_EXPR, i1, i2,
1808 notrunc),
1809 notrunc),
1810 magsquared, notrunc),
1811 const_binop
1812 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1813 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1814 const_binop (MINUS_EXPR,
1815 const_binop (MULT_EXPR, i1, r2,
1816 notrunc),
1817 const_binop (MULT_EXPR, r1, i2,
1818 notrunc),
1819 notrunc),
1820 magsquared, notrunc));
1821 }
1822 break;
1823
1824 default:
1825 abort ();
1826 }
1827 return t;
1828 }
1829 return 0;
1830 }
1831 \f
1832 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1833 bits are given by NUMBER and of the sizetype represented by KIND. */
1834
1835 tree
1836 size_int_wide (number, kind)
1837 HOST_WIDE_INT number;
1838 enum size_type_kind kind;
1839 {
1840 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1841 }
1842
1843 /* Likewise, but the desired type is specified explicitly. */
1844
1845 tree
1846 size_int_type_wide (number, type)
1847 HOST_WIDE_INT number;
1848 tree type;
1849 {
1850 /* Type-size nodes already made for small sizes. */
1851 static tree size_table[2048 + 1];
1852 static int init_p = 0;
1853 tree t;
1854
1855 if (ggc_p && ! init_p)
1856 {
1857 ggc_add_tree_root ((tree *) size_table,
1858 sizeof size_table / sizeof (tree));
1859 init_p = 1;
1860 }
1861
1862 /* If this is a positive number that fits in the table we use to hold
1863 cached entries, see if it is already in the table and put it there
1864 if not. */
1865 if (number >= 0 && number < (int) (sizeof size_table / sizeof size_table[0]))
1866 {
1867 if (size_table[number] != 0)
1868 for (t = size_table[number]; t != 0; t = TREE_CHAIN (t))
1869 if (TREE_TYPE (t) == type)
1870 return t;
1871
1872 if (! ggc_p)
1873 {
1874 /* Make this a permanent node. */
1875 push_obstacks_nochange ();
1876 end_temporary_allocation ();
1877 }
1878
1879 t = build_int_2 (number, 0);
1880 TREE_TYPE (t) = type;
1881 TREE_CHAIN (t) = size_table[number];
1882 size_table[number] = t;
1883
1884 if (! ggc_p)
1885 pop_obstacks ();
1886
1887 return t;
1888 }
1889
1890 t = build_int_2 (number, number < 0 ? -1 : 0);
1891 TREE_TYPE (t) = type;
1892 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1893 return t;
1894 }
1895
1896 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1897 is a tree code. The type of the result is taken from the operands.
1898 Both must be the same type integer type and it must be a size type.
1899 If the operands are constant, so is the result. */
1900
1901 tree
1902 size_binop (code, arg0, arg1)
1903 enum tree_code code;
1904 tree arg0, arg1;
1905 {
1906 tree type = TREE_TYPE (arg0);
1907
1908 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1909 || type != TREE_TYPE (arg1))
1910 abort ();
1911
1912 /* Handle the special case of two integer constants faster. */
1913 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1914 {
1915 /* And some specific cases even faster than that. */
1916 if (code == PLUS_EXPR && integer_zerop (arg0))
1917 return arg1;
1918 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1919 && integer_zerop (arg1))
1920 return arg0;
1921 else if (code == MULT_EXPR && integer_onep (arg0))
1922 return arg1;
1923
1924 /* Handle general case of two integer constants. */
1925 return int_const_binop (code, arg0, arg1, 0, 1);
1926 }
1927
1928 if (arg0 == error_mark_node || arg1 == error_mark_node)
1929 return error_mark_node;
1930
1931 return fold (build (code, type, arg0, arg1));
1932 }
1933
1934 /* Given two values, either both of sizetype or both of bitsizetype,
1935 compute the difference between the two values. Return the value
1936 in signed type corresponding to the type of the operands. */
1937
1938 tree
1939 size_diffop (arg0, arg1)
1940 tree arg0, arg1;
1941 {
1942 tree type = TREE_TYPE (arg0);
1943 tree ctype;
1944
1945 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1946 || type != TREE_TYPE (arg1))
1947 abort ();
1948
1949 /* If the type is already signed, just do the simple thing. */
1950 if (! TREE_UNSIGNED (type))
1951 return size_binop (MINUS_EXPR, arg0, arg1);
1952
1953 ctype = (type == bitsizetype || type == ubitsizetype
1954 ? sbitsizetype : ssizetype);
1955
1956 /* If either operand is not a constant, do the conversions to the signed
1957 type and subtract. The hardware will do the right thing with any
1958 overflow in the subtraction. */
1959 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1960 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1961 convert (ctype, arg1));
1962
1963 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1964 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1965 overflow) and negate (which can't either). Special-case a result
1966 of zero while we're here. */
1967 if (tree_int_cst_equal (arg0, arg1))
1968 return convert (ctype, integer_zero_node);
1969 else if (tree_int_cst_lt (arg1, arg0))
1970 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1971 else
1972 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1973 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1974 }
1975 \f
1976 /* This structure is used to communicate arguments to fold_convert_1. */
1977 struct fc_args
1978 {
1979 tree arg1; /* Input: value to convert. */
1980 tree type; /* Input: type to convert value to. */
1981 tree t; /* Ouput: result of conversion. */
1982 };
1983
1984 /* Function to convert floating-point constants, protected by floating
1985 point exception handler. */
1986
1987 static void
1988 fold_convert_1 (data)
1989 PTR data;
1990 {
1991 struct fc_args * args = (struct fc_args *) data;
1992
1993 args->t = build_real (args->type,
1994 real_value_truncate (TYPE_MODE (args->type),
1995 TREE_REAL_CST (args->arg1)));
1996 }
1997
1998 /* Given T, a tree representing type conversion of ARG1, a constant,
1999 return a constant tree representing the result of conversion. */
2000
2001 static tree
2002 fold_convert (t, arg1)
2003 register tree t;
2004 register tree arg1;
2005 {
2006 register tree type = TREE_TYPE (t);
2007 int overflow = 0;
2008
2009 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2010 {
2011 if (TREE_CODE (arg1) == INTEGER_CST)
2012 {
2013 /* If we would build a constant wider than GCC supports,
2014 leave the conversion unfolded. */
2015 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
2016 return t;
2017
2018 /* If we are trying to make a sizetype for a small integer, use
2019 size_int to pick up cached types to reduce duplicate nodes. */
2020 if (TREE_CODE (type) == INTEGER_CST && TYPE_IS_SIZETYPE (type)
2021 && compare_tree_int (arg1, 10000) < 0)
2022 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
2023
2024 /* Given an integer constant, make new constant with new type,
2025 appropriately sign-extended or truncated. */
2026 t = build_int_2 (TREE_INT_CST_LOW (arg1),
2027 TREE_INT_CST_HIGH (arg1));
2028 TREE_TYPE (t) = type;
2029 /* Indicate an overflow if (1) ARG1 already overflowed,
2030 or (2) force_fit_type indicates an overflow.
2031 Tell force_fit_type that an overflow has already occurred
2032 if ARG1 is a too-large unsigned value and T is signed.
2033 But don't indicate an overflow if converting a pointer. */
2034 TREE_OVERFLOW (t)
2035 = ((force_fit_type (t,
2036 (TREE_INT_CST_HIGH (arg1) < 0
2037 && (TREE_UNSIGNED (type)
2038 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
2039 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
2040 || TREE_OVERFLOW (arg1));
2041 TREE_CONSTANT_OVERFLOW (t)
2042 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2043 }
2044 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2045 else if (TREE_CODE (arg1) == REAL_CST)
2046 {
2047 /* Don't initialize these, use assignments.
2048 Initialized local aggregates don't work on old compilers. */
2049 REAL_VALUE_TYPE x;
2050 REAL_VALUE_TYPE l;
2051 REAL_VALUE_TYPE u;
2052 tree type1 = TREE_TYPE (arg1);
2053 int no_upper_bound;
2054
2055 x = TREE_REAL_CST (arg1);
2056 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
2057
2058 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
2059 if (!no_upper_bound)
2060 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
2061
2062 /* See if X will be in range after truncation towards 0.
2063 To compensate for truncation, move the bounds away from 0,
2064 but reject if X exactly equals the adjusted bounds. */
2065 #ifdef REAL_ARITHMETIC
2066 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
2067 if (!no_upper_bound)
2068 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
2069 #else
2070 l--;
2071 if (!no_upper_bound)
2072 u++;
2073 #endif
2074 /* If X is a NaN, use zero instead and show we have an overflow.
2075 Otherwise, range check. */
2076 if (REAL_VALUE_ISNAN (x))
2077 overflow = 1, x = dconst0;
2078 else if (! (REAL_VALUES_LESS (l, x)
2079 && !no_upper_bound
2080 && REAL_VALUES_LESS (x, u)))
2081 overflow = 1;
2082
2083 #ifndef REAL_ARITHMETIC
2084 {
2085 HOST_WIDE_INT low, high;
2086 HOST_WIDE_INT half_word
2087 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
2088
2089 if (x < 0)
2090 x = -x;
2091
2092 high = (HOST_WIDE_INT) (x / half_word / half_word);
2093 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
2094 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
2095 {
2096 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
2097 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
2098 }
2099 else
2100 low = (HOST_WIDE_INT) x;
2101 if (TREE_REAL_CST (arg1) < 0)
2102 neg_double (low, high, &low, &high);
2103 t = build_int_2 (low, high);
2104 }
2105 #else
2106 {
2107 HOST_WIDE_INT low, high;
2108 REAL_VALUE_TO_INT (&low, &high, x);
2109 t = build_int_2 (low, high);
2110 }
2111 #endif
2112 TREE_TYPE (t) = type;
2113 TREE_OVERFLOW (t)
2114 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
2115 TREE_CONSTANT_OVERFLOW (t)
2116 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2117 }
2118 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2119 TREE_TYPE (t) = type;
2120 }
2121 else if (TREE_CODE (type) == REAL_TYPE)
2122 {
2123 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2124 if (TREE_CODE (arg1) == INTEGER_CST)
2125 return build_real_from_int_cst (type, arg1);
2126 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2127 if (TREE_CODE (arg1) == REAL_CST)
2128 {
2129 struct fc_args args;
2130
2131 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
2132 {
2133 t = arg1;
2134 TREE_TYPE (arg1) = type;
2135 return t;
2136 }
2137
2138 /* Setup input for fold_convert_1() */
2139 args.arg1 = arg1;
2140 args.type = type;
2141
2142 if (do_float_handler (fold_convert_1, (PTR) &args))
2143 {
2144 /* Receive output from fold_convert_1() */
2145 t = args.t;
2146 }
2147 else
2148 {
2149 /* We got an exception from fold_convert_1() */
2150 overflow = 1;
2151 t = copy_node (arg1);
2152 }
2153
2154 TREE_OVERFLOW (t)
2155 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
2156 TREE_CONSTANT_OVERFLOW (t)
2157 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2158 return t;
2159 }
2160 }
2161 TREE_CONSTANT (t) = 1;
2162 return t;
2163 }
2164 \f
2165 /* Return an expr equal to X but certainly not valid as an lvalue. */
2166
2167 tree
2168 non_lvalue (x)
2169 tree x;
2170 {
2171 tree result;
2172
2173 /* These things are certainly not lvalues. */
2174 if (TREE_CODE (x) == NON_LVALUE_EXPR
2175 || TREE_CODE (x) == INTEGER_CST
2176 || TREE_CODE (x) == REAL_CST
2177 || TREE_CODE (x) == STRING_CST
2178 || TREE_CODE (x) == ADDR_EXPR)
2179 return x;
2180
2181 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2182 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2183 return result;
2184 }
2185
2186 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2187 Zero means allow extended lvalues. */
2188
2189 int pedantic_lvalues;
2190
2191 /* When pedantic, return an expr equal to X but certainly not valid as a
2192 pedantic lvalue. Otherwise, return X. */
2193
2194 tree
2195 pedantic_non_lvalue (x)
2196 tree x;
2197 {
2198 if (pedantic_lvalues)
2199 return non_lvalue (x);
2200 else
2201 return x;
2202 }
2203 \f
2204 /* Given a tree comparison code, return the code that is the logical inverse
2205 of the given code. It is not safe to do this for floating-point
2206 comparisons, except for NE_EXPR and EQ_EXPR. */
2207
2208 static enum tree_code
2209 invert_tree_comparison (code)
2210 enum tree_code code;
2211 {
2212 switch (code)
2213 {
2214 case EQ_EXPR:
2215 return NE_EXPR;
2216 case NE_EXPR:
2217 return EQ_EXPR;
2218 case GT_EXPR:
2219 return LE_EXPR;
2220 case GE_EXPR:
2221 return LT_EXPR;
2222 case LT_EXPR:
2223 return GE_EXPR;
2224 case LE_EXPR:
2225 return GT_EXPR;
2226 default:
2227 abort ();
2228 }
2229 }
2230
2231 /* Similar, but return the comparison that results if the operands are
2232 swapped. This is safe for floating-point. */
2233
2234 static enum tree_code
2235 swap_tree_comparison (code)
2236 enum tree_code code;
2237 {
2238 switch (code)
2239 {
2240 case EQ_EXPR:
2241 case NE_EXPR:
2242 return code;
2243 case GT_EXPR:
2244 return LT_EXPR;
2245 case GE_EXPR:
2246 return LE_EXPR;
2247 case LT_EXPR:
2248 return GT_EXPR;
2249 case LE_EXPR:
2250 return GE_EXPR;
2251 default:
2252 abort ();
2253 }
2254 }
2255
2256 /* Return nonzero if CODE is a tree code that represents a truth value. */
2257
2258 static int
2259 truth_value_p (code)
2260 enum tree_code code;
2261 {
2262 return (TREE_CODE_CLASS (code) == '<'
2263 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2264 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2265 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2266 }
2267 \f
2268 /* Return nonzero if two operands are necessarily equal.
2269 If ONLY_CONST is non-zero, only return non-zero for constants.
2270 This function tests whether the operands are indistinguishable;
2271 it does not test whether they are equal using C's == operation.
2272 The distinction is important for IEEE floating point, because
2273 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2274 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
2275
2276 int
2277 operand_equal_p (arg0, arg1, only_const)
2278 tree arg0, arg1;
2279 int only_const;
2280 {
2281 /* If both types don't have the same signedness, then we can't consider
2282 them equal. We must check this before the STRIP_NOPS calls
2283 because they may change the signedness of the arguments. */
2284 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2285 return 0;
2286
2287 STRIP_NOPS (arg0);
2288 STRIP_NOPS (arg1);
2289
2290 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2291 /* This is needed for conversions and for COMPONENT_REF.
2292 Might as well play it safe and always test this. */
2293 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2294 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2295 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2296 return 0;
2297
2298 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2299 We don't care about side effects in that case because the SAVE_EXPR
2300 takes care of that for us. In all other cases, two expressions are
2301 equal if they have no side effects. If we have two identical
2302 expressions with side effects that should be treated the same due
2303 to the only side effects being identical SAVE_EXPR's, that will
2304 be detected in the recursive calls below. */
2305 if (arg0 == arg1 && ! only_const
2306 && (TREE_CODE (arg0) == SAVE_EXPR
2307 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2308 return 1;
2309
2310 /* Next handle constant cases, those for which we can return 1 even
2311 if ONLY_CONST is set. */
2312 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2313 switch (TREE_CODE (arg0))
2314 {
2315 case INTEGER_CST:
2316 return (! TREE_CONSTANT_OVERFLOW (arg0)
2317 && ! TREE_CONSTANT_OVERFLOW (arg1)
2318 && tree_int_cst_equal (arg0, arg1));
2319
2320 case REAL_CST:
2321 return (! TREE_CONSTANT_OVERFLOW (arg0)
2322 && ! TREE_CONSTANT_OVERFLOW (arg1)
2323 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2324 TREE_REAL_CST (arg1)));
2325
2326 case COMPLEX_CST:
2327 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2328 only_const)
2329 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2330 only_const));
2331
2332 case STRING_CST:
2333 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2334 && ! memcmp (TREE_STRING_POINTER (arg0),
2335 TREE_STRING_POINTER (arg1),
2336 TREE_STRING_LENGTH (arg0)));
2337
2338 case ADDR_EXPR:
2339 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2340 0);
2341 default:
2342 break;
2343 }
2344
2345 if (only_const)
2346 return 0;
2347
2348 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2349 {
2350 case '1':
2351 /* Two conversions are equal only if signedness and modes match. */
2352 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2353 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2354 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2355 return 0;
2356
2357 return operand_equal_p (TREE_OPERAND (arg0, 0),
2358 TREE_OPERAND (arg1, 0), 0);
2359
2360 case '<':
2361 case '2':
2362 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2363 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2364 0))
2365 return 1;
2366
2367 /* For commutative ops, allow the other order. */
2368 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2369 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2370 || TREE_CODE (arg0) == BIT_IOR_EXPR
2371 || TREE_CODE (arg0) == BIT_XOR_EXPR
2372 || TREE_CODE (arg0) == BIT_AND_EXPR
2373 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2374 && operand_equal_p (TREE_OPERAND (arg0, 0),
2375 TREE_OPERAND (arg1, 1), 0)
2376 && operand_equal_p (TREE_OPERAND (arg0, 1),
2377 TREE_OPERAND (arg1, 0), 0));
2378
2379 case 'r':
2380 /* If either of the pointer (or reference) expressions we are dereferencing
2381 contain a side effect, these cannot be equal. */
2382 if (TREE_SIDE_EFFECTS (arg0)
2383 || TREE_SIDE_EFFECTS (arg1))
2384 return 0;
2385
2386 switch (TREE_CODE (arg0))
2387 {
2388 case INDIRECT_REF:
2389 return operand_equal_p (TREE_OPERAND (arg0, 0),
2390 TREE_OPERAND (arg1, 0), 0);
2391
2392 case COMPONENT_REF:
2393 case ARRAY_REF:
2394 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2395 TREE_OPERAND (arg1, 0), 0)
2396 && operand_equal_p (TREE_OPERAND (arg0, 1),
2397 TREE_OPERAND (arg1, 1), 0));
2398
2399 case BIT_FIELD_REF:
2400 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2401 TREE_OPERAND (arg1, 0), 0)
2402 && operand_equal_p (TREE_OPERAND (arg0, 1),
2403 TREE_OPERAND (arg1, 1), 0)
2404 && operand_equal_p (TREE_OPERAND (arg0, 2),
2405 TREE_OPERAND (arg1, 2), 0));
2406 default:
2407 return 0;
2408 }
2409
2410 case 'e':
2411 if (TREE_CODE (arg0) == RTL_EXPR)
2412 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2413 return 0;
2414
2415 default:
2416 return 0;
2417 }
2418 }
2419 \f
2420 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2421 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2422
2423 When in doubt, return 0. */
2424
2425 static int
2426 operand_equal_for_comparison_p (arg0, arg1, other)
2427 tree arg0, arg1;
2428 tree other;
2429 {
2430 int unsignedp1, unsignedpo;
2431 tree primarg0, primarg1, primother;
2432 unsigned int correct_width;
2433
2434 if (operand_equal_p (arg0, arg1, 0))
2435 return 1;
2436
2437 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2438 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2439 return 0;
2440
2441 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2442 and see if the inner values are the same. This removes any
2443 signedness comparison, which doesn't matter here. */
2444 primarg0 = arg0, primarg1 = arg1;
2445 STRIP_NOPS (primarg0); STRIP_NOPS (primarg1);
2446 if (operand_equal_p (primarg0, primarg1, 0))
2447 return 1;
2448
2449 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2450 actual comparison operand, ARG0.
2451
2452 First throw away any conversions to wider types
2453 already present in the operands. */
2454
2455 primarg1 = get_narrower (arg1, &unsignedp1);
2456 primother = get_narrower (other, &unsignedpo);
2457
2458 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2459 if (unsignedp1 == unsignedpo
2460 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2461 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2462 {
2463 tree type = TREE_TYPE (arg0);
2464
2465 /* Make sure shorter operand is extended the right way
2466 to match the longer operand. */
2467 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
2468 TREE_TYPE (primarg1)),
2469 primarg1);
2470
2471 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2472 return 1;
2473 }
2474
2475 return 0;
2476 }
2477 \f
2478 /* See if ARG is an expression that is either a comparison or is performing
2479 arithmetic on comparisons. The comparisons must only be comparing
2480 two different values, which will be stored in *CVAL1 and *CVAL2; if
2481 they are non-zero it means that some operands have already been found.
2482 No variables may be used anywhere else in the expression except in the
2483 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2484 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2485
2486 If this is true, return 1. Otherwise, return zero. */
2487
2488 static int
2489 twoval_comparison_p (arg, cval1, cval2, save_p)
2490 tree arg;
2491 tree *cval1, *cval2;
2492 int *save_p;
2493 {
2494 enum tree_code code = TREE_CODE (arg);
2495 char class = TREE_CODE_CLASS (code);
2496
2497 /* We can handle some of the 'e' cases here. */
2498 if (class == 'e' && code == TRUTH_NOT_EXPR)
2499 class = '1';
2500 else if (class == 'e'
2501 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2502 || code == COMPOUND_EXPR))
2503 class = '2';
2504
2505 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2506 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2507 {
2508 /* If we've already found a CVAL1 or CVAL2, this expression is
2509 two complex to handle. */
2510 if (*cval1 || *cval2)
2511 return 0;
2512
2513 class = '1';
2514 *save_p = 1;
2515 }
2516
2517 switch (class)
2518 {
2519 case '1':
2520 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2521
2522 case '2':
2523 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2524 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2525 cval1, cval2, save_p));
2526
2527 case 'c':
2528 return 1;
2529
2530 case 'e':
2531 if (code == COND_EXPR)
2532 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2533 cval1, cval2, save_p)
2534 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2535 cval1, cval2, save_p)
2536 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2537 cval1, cval2, save_p));
2538 return 0;
2539
2540 case '<':
2541 /* First see if we can handle the first operand, then the second. For
2542 the second operand, we know *CVAL1 can't be zero. It must be that
2543 one side of the comparison is each of the values; test for the
2544 case where this isn't true by failing if the two operands
2545 are the same. */
2546
2547 if (operand_equal_p (TREE_OPERAND (arg, 0),
2548 TREE_OPERAND (arg, 1), 0))
2549 return 0;
2550
2551 if (*cval1 == 0)
2552 *cval1 = TREE_OPERAND (arg, 0);
2553 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2554 ;
2555 else if (*cval2 == 0)
2556 *cval2 = TREE_OPERAND (arg, 0);
2557 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2558 ;
2559 else
2560 return 0;
2561
2562 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2563 ;
2564 else if (*cval2 == 0)
2565 *cval2 = TREE_OPERAND (arg, 1);
2566 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2567 ;
2568 else
2569 return 0;
2570
2571 return 1;
2572
2573 default:
2574 return 0;
2575 }
2576 }
2577 \f
2578 /* ARG is a tree that is known to contain just arithmetic operations and
2579 comparisons. Evaluate the operations in the tree substituting NEW0 for
2580 any occurrence of OLD0 as an operand of a comparison and likewise for
2581 NEW1 and OLD1. */
2582
2583 static tree
2584 eval_subst (arg, old0, new0, old1, new1)
2585 tree arg;
2586 tree old0, new0, old1, new1;
2587 {
2588 tree type = TREE_TYPE (arg);
2589 enum tree_code code = TREE_CODE (arg);
2590 char class = TREE_CODE_CLASS (code);
2591
2592 /* We can handle some of the 'e' cases here. */
2593 if (class == 'e' && code == TRUTH_NOT_EXPR)
2594 class = '1';
2595 else if (class == 'e'
2596 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2597 class = '2';
2598
2599 switch (class)
2600 {
2601 case '1':
2602 return fold (build1 (code, type,
2603 eval_subst (TREE_OPERAND (arg, 0),
2604 old0, new0, old1, new1)));
2605
2606 case '2':
2607 return fold (build (code, type,
2608 eval_subst (TREE_OPERAND (arg, 0),
2609 old0, new0, old1, new1),
2610 eval_subst (TREE_OPERAND (arg, 1),
2611 old0, new0, old1, new1)));
2612
2613 case 'e':
2614 switch (code)
2615 {
2616 case SAVE_EXPR:
2617 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2618
2619 case COMPOUND_EXPR:
2620 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2621
2622 case COND_EXPR:
2623 return fold (build (code, type,
2624 eval_subst (TREE_OPERAND (arg, 0),
2625 old0, new0, old1, new1),
2626 eval_subst (TREE_OPERAND (arg, 1),
2627 old0, new0, old1, new1),
2628 eval_subst (TREE_OPERAND (arg, 2),
2629 old0, new0, old1, new1)));
2630 default:
2631 break;
2632 }
2633 /* fall through - ??? */
2634
2635 case '<':
2636 {
2637 tree arg0 = TREE_OPERAND (arg, 0);
2638 tree arg1 = TREE_OPERAND (arg, 1);
2639
2640 /* We need to check both for exact equality and tree equality. The
2641 former will be true if the operand has a side-effect. In that
2642 case, we know the operand occurred exactly once. */
2643
2644 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2645 arg0 = new0;
2646 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2647 arg0 = new1;
2648
2649 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2650 arg1 = new0;
2651 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2652 arg1 = new1;
2653
2654 return fold (build (code, type, arg0, arg1));
2655 }
2656
2657 default:
2658 return arg;
2659 }
2660 }
2661 \f
2662 /* Return a tree for the case when the result of an expression is RESULT
2663 converted to TYPE and OMITTED was previously an operand of the expression
2664 but is now not needed (e.g., we folded OMITTED * 0).
2665
2666 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2667 the conversion of RESULT to TYPE. */
2668
2669 static tree
2670 omit_one_operand (type, result, omitted)
2671 tree type, result, omitted;
2672 {
2673 tree t = convert (type, result);
2674
2675 if (TREE_SIDE_EFFECTS (omitted))
2676 return build (COMPOUND_EXPR, type, omitted, t);
2677
2678 return non_lvalue (t);
2679 }
2680
2681 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2682
2683 static tree
2684 pedantic_omit_one_operand (type, result, omitted)
2685 tree type, result, omitted;
2686 {
2687 tree t = convert (type, result);
2688
2689 if (TREE_SIDE_EFFECTS (omitted))
2690 return build (COMPOUND_EXPR, type, omitted, t);
2691
2692 return pedantic_non_lvalue (t);
2693 }
2694
2695
2696 \f
2697 /* Return a simplified tree node for the truth-negation of ARG. This
2698 never alters ARG itself. We assume that ARG is an operation that
2699 returns a truth value (0 or 1). */
2700
2701 tree
2702 invert_truthvalue (arg)
2703 tree arg;
2704 {
2705 tree type = TREE_TYPE (arg);
2706 enum tree_code code = TREE_CODE (arg);
2707
2708 if (code == ERROR_MARK)
2709 return arg;
2710
2711 /* If this is a comparison, we can simply invert it, except for
2712 floating-point non-equality comparisons, in which case we just
2713 enclose a TRUTH_NOT_EXPR around what we have. */
2714
2715 if (TREE_CODE_CLASS (code) == '<')
2716 {
2717 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2718 && !flag_fast_math && code != NE_EXPR && code != EQ_EXPR)
2719 return build1 (TRUTH_NOT_EXPR, type, arg);
2720 else
2721 return build (invert_tree_comparison (code), type,
2722 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2723 }
2724
2725 switch (code)
2726 {
2727 case INTEGER_CST:
2728 return convert (type, build_int_2 (integer_zerop (arg), 0));
2729
2730 case TRUTH_AND_EXPR:
2731 return build (TRUTH_OR_EXPR, type,
2732 invert_truthvalue (TREE_OPERAND (arg, 0)),
2733 invert_truthvalue (TREE_OPERAND (arg, 1)));
2734
2735 case TRUTH_OR_EXPR:
2736 return build (TRUTH_AND_EXPR, type,
2737 invert_truthvalue (TREE_OPERAND (arg, 0)),
2738 invert_truthvalue (TREE_OPERAND (arg, 1)));
2739
2740 case TRUTH_XOR_EXPR:
2741 /* Here we can invert either operand. We invert the first operand
2742 unless the second operand is a TRUTH_NOT_EXPR in which case our
2743 result is the XOR of the first operand with the inside of the
2744 negation of the second operand. */
2745
2746 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2747 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2748 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2749 else
2750 return build (TRUTH_XOR_EXPR, type,
2751 invert_truthvalue (TREE_OPERAND (arg, 0)),
2752 TREE_OPERAND (arg, 1));
2753
2754 case TRUTH_ANDIF_EXPR:
2755 return build (TRUTH_ORIF_EXPR, type,
2756 invert_truthvalue (TREE_OPERAND (arg, 0)),
2757 invert_truthvalue (TREE_OPERAND (arg, 1)));
2758
2759 case TRUTH_ORIF_EXPR:
2760 return build (TRUTH_ANDIF_EXPR, type,
2761 invert_truthvalue (TREE_OPERAND (arg, 0)),
2762 invert_truthvalue (TREE_OPERAND (arg, 1)));
2763
2764 case TRUTH_NOT_EXPR:
2765 return TREE_OPERAND (arg, 0);
2766
2767 case COND_EXPR:
2768 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2769 invert_truthvalue (TREE_OPERAND (arg, 1)),
2770 invert_truthvalue (TREE_OPERAND (arg, 2)));
2771
2772 case COMPOUND_EXPR:
2773 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2774 invert_truthvalue (TREE_OPERAND (arg, 1)));
2775
2776 case WITH_RECORD_EXPR:
2777 return build (WITH_RECORD_EXPR, type,
2778 invert_truthvalue (TREE_OPERAND (arg, 0)),
2779 TREE_OPERAND (arg, 1));
2780
2781 case NON_LVALUE_EXPR:
2782 return invert_truthvalue (TREE_OPERAND (arg, 0));
2783
2784 case NOP_EXPR:
2785 case CONVERT_EXPR:
2786 case FLOAT_EXPR:
2787 return build1 (TREE_CODE (arg), type,
2788 invert_truthvalue (TREE_OPERAND (arg, 0)));
2789
2790 case BIT_AND_EXPR:
2791 if (!integer_onep (TREE_OPERAND (arg, 1)))
2792 break;
2793 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2794
2795 case SAVE_EXPR:
2796 return build1 (TRUTH_NOT_EXPR, type, arg);
2797
2798 case CLEANUP_POINT_EXPR:
2799 return build1 (CLEANUP_POINT_EXPR, type,
2800 invert_truthvalue (TREE_OPERAND (arg, 0)));
2801
2802 default:
2803 break;
2804 }
2805 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2806 abort ();
2807 return build1 (TRUTH_NOT_EXPR, type, arg);
2808 }
2809
2810 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2811 operands are another bit-wise operation with a common input. If so,
2812 distribute the bit operations to save an operation and possibly two if
2813 constants are involved. For example, convert
2814 (A | B) & (A | C) into A | (B & C)
2815 Further simplification will occur if B and C are constants.
2816
2817 If this optimization cannot be done, 0 will be returned. */
2818
2819 static tree
2820 distribute_bit_expr (code, type, arg0, arg1)
2821 enum tree_code code;
2822 tree type;
2823 tree arg0, arg1;
2824 {
2825 tree common;
2826 tree left, right;
2827
2828 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2829 || TREE_CODE (arg0) == code
2830 || (TREE_CODE (arg0) != BIT_AND_EXPR
2831 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2832 return 0;
2833
2834 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2835 {
2836 common = TREE_OPERAND (arg0, 0);
2837 left = TREE_OPERAND (arg0, 1);
2838 right = TREE_OPERAND (arg1, 1);
2839 }
2840 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2841 {
2842 common = TREE_OPERAND (arg0, 0);
2843 left = TREE_OPERAND (arg0, 1);
2844 right = TREE_OPERAND (arg1, 0);
2845 }
2846 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2847 {
2848 common = TREE_OPERAND (arg0, 1);
2849 left = TREE_OPERAND (arg0, 0);
2850 right = TREE_OPERAND (arg1, 1);
2851 }
2852 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2853 {
2854 common = TREE_OPERAND (arg0, 1);
2855 left = TREE_OPERAND (arg0, 0);
2856 right = TREE_OPERAND (arg1, 0);
2857 }
2858 else
2859 return 0;
2860
2861 return fold (build (TREE_CODE (arg0), type, common,
2862 fold (build (code, type, left, right))));
2863 }
2864 \f
2865 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2866 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2867
2868 static tree
2869 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2870 tree inner;
2871 tree type;
2872 int bitsize, bitpos;
2873 int unsignedp;
2874 {
2875 tree result = build (BIT_FIELD_REF, type, inner,
2876 size_int (bitsize), bitsize_int (bitpos));
2877
2878 TREE_UNSIGNED (result) = unsignedp;
2879
2880 return result;
2881 }
2882
2883 /* Optimize a bit-field compare.
2884
2885 There are two cases: First is a compare against a constant and the
2886 second is a comparison of two items where the fields are at the same
2887 bit position relative to the start of a chunk (byte, halfword, word)
2888 large enough to contain it. In these cases we can avoid the shift
2889 implicit in bitfield extractions.
2890
2891 For constants, we emit a compare of the shifted constant with the
2892 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2893 compared. For two fields at the same position, we do the ANDs with the
2894 similar mask and compare the result of the ANDs.
2895
2896 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2897 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2898 are the left and right operands of the comparison, respectively.
2899
2900 If the optimization described above can be done, we return the resulting
2901 tree. Otherwise we return zero. */
2902
2903 static tree
2904 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2905 enum tree_code code;
2906 tree compare_type;
2907 tree lhs, rhs;
2908 {
2909 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2910 tree type = TREE_TYPE (lhs);
2911 tree signed_type, unsigned_type;
2912 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2913 enum machine_mode lmode, rmode, nmode;
2914 int lunsignedp, runsignedp;
2915 int lvolatilep = 0, rvolatilep = 0;
2916 unsigned int alignment;
2917 tree linner, rinner = NULL_TREE;
2918 tree mask;
2919 tree offset;
2920
2921 /* Get all the information about the extractions being done. If the bit size
2922 if the same as the size of the underlying object, we aren't doing an
2923 extraction at all and so can do nothing. We also don't want to
2924 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2925 then will no longer be able to replace it. */
2926 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2927 &lunsignedp, &lvolatilep, &alignment);
2928 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2929 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2930 return 0;
2931
2932 if (!const_p)
2933 {
2934 /* If this is not a constant, we can only do something if bit positions,
2935 sizes, and signedness are the same. */
2936 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2937 &runsignedp, &rvolatilep, &alignment);
2938
2939 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2940 || lunsignedp != runsignedp || offset != 0
2941 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2942 return 0;
2943 }
2944
2945 /* See if we can find a mode to refer to this field. We should be able to,
2946 but fail if we can't. */
2947 nmode = get_best_mode (lbitsize, lbitpos,
2948 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2949 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2950 TYPE_ALIGN (TREE_TYPE (rinner))),
2951 word_mode, lvolatilep || rvolatilep);
2952 if (nmode == VOIDmode)
2953 return 0;
2954
2955 /* Set signed and unsigned types of the precision of this mode for the
2956 shifts below. */
2957 signed_type = type_for_mode (nmode, 0);
2958 unsigned_type = type_for_mode (nmode, 1);
2959
2960 /* Compute the bit position and size for the new reference and our offset
2961 within it. If the new reference is the same size as the original, we
2962 won't optimize anything, so return zero. */
2963 nbitsize = GET_MODE_BITSIZE (nmode);
2964 nbitpos = lbitpos & ~ (nbitsize - 1);
2965 lbitpos -= nbitpos;
2966 if (nbitsize == lbitsize)
2967 return 0;
2968
2969 if (BYTES_BIG_ENDIAN)
2970 lbitpos = nbitsize - lbitsize - lbitpos;
2971
2972 /* Make the mask to be used against the extracted field. */
2973 mask = build_int_2 (~0, ~0);
2974 TREE_TYPE (mask) = unsigned_type;
2975 force_fit_type (mask, 0);
2976 mask = convert (unsigned_type, mask);
2977 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2978 mask = const_binop (RSHIFT_EXPR, mask,
2979 size_int (nbitsize - lbitsize - lbitpos), 0);
2980
2981 if (! const_p)
2982 /* If not comparing with constant, just rework the comparison
2983 and return. */
2984 return build (code, compare_type,
2985 build (BIT_AND_EXPR, unsigned_type,
2986 make_bit_field_ref (linner, unsigned_type,
2987 nbitsize, nbitpos, 1),
2988 mask),
2989 build (BIT_AND_EXPR, unsigned_type,
2990 make_bit_field_ref (rinner, unsigned_type,
2991 nbitsize, nbitpos, 1),
2992 mask));
2993
2994 /* Otherwise, we are handling the constant case. See if the constant is too
2995 big for the field. Warn and return a tree of for 0 (false) if so. We do
2996 this not only for its own sake, but to avoid having to test for this
2997 error case below. If we didn't, we might generate wrong code.
2998
2999 For unsigned fields, the constant shifted right by the field length should
3000 be all zero. For signed fields, the high-order bits should agree with
3001 the sign bit. */
3002
3003 if (lunsignedp)
3004 {
3005 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3006 convert (unsigned_type, rhs),
3007 size_int (lbitsize), 0)))
3008 {
3009 warning ("comparison is always %d due to width of bitfield",
3010 code == NE_EXPR);
3011 return convert (compare_type,
3012 (code == NE_EXPR
3013 ? integer_one_node : integer_zero_node));
3014 }
3015 }
3016 else
3017 {
3018 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
3019 size_int (lbitsize - 1), 0);
3020 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3021 {
3022 warning ("comparison is always %d due to width of bitfield",
3023 code == NE_EXPR);
3024 return convert (compare_type,
3025 (code == NE_EXPR
3026 ? integer_one_node : integer_zero_node));
3027 }
3028 }
3029
3030 /* Single-bit compares should always be against zero. */
3031 if (lbitsize == 1 && ! integer_zerop (rhs))
3032 {
3033 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3034 rhs = convert (type, integer_zero_node);
3035 }
3036
3037 /* Make a new bitfield reference, shift the constant over the
3038 appropriate number of bits and mask it with the computed mask
3039 (in case this was a signed field). If we changed it, make a new one. */
3040 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3041 if (lvolatilep)
3042 {
3043 TREE_SIDE_EFFECTS (lhs) = 1;
3044 TREE_THIS_VOLATILE (lhs) = 1;
3045 }
3046
3047 rhs = fold (const_binop (BIT_AND_EXPR,
3048 const_binop (LSHIFT_EXPR,
3049 convert (unsigned_type, rhs),
3050 size_int (lbitpos), 0),
3051 mask, 0));
3052
3053 return build (code, compare_type,
3054 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
3055 rhs);
3056 }
3057 \f
3058 /* Subroutine for fold_truthop: decode a field reference.
3059
3060 If EXP is a comparison reference, we return the innermost reference.
3061
3062 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3063 set to the starting bit number.
3064
3065 If the innermost field can be completely contained in a mode-sized
3066 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3067
3068 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3069 otherwise it is not changed.
3070
3071 *PUNSIGNEDP is set to the signedness of the field.
3072
3073 *PMASK is set to the mask used. This is either contained in a
3074 BIT_AND_EXPR or derived from the width of the field.
3075
3076 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3077
3078 Return 0 if this is not a component reference or is one that we can't
3079 do anything with. */
3080
3081 static tree
3082 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
3083 pvolatilep, pmask, pand_mask)
3084 tree exp;
3085 HOST_WIDE_INT *pbitsize, *pbitpos;
3086 enum machine_mode *pmode;
3087 int *punsignedp, *pvolatilep;
3088 tree *pmask;
3089 tree *pand_mask;
3090 {
3091 tree and_mask = 0;
3092 tree mask, inner, offset;
3093 tree unsigned_type;
3094 unsigned int precision;
3095 unsigned int alignment;
3096
3097 /* All the optimizations using this function assume integer fields.
3098 There are problems with FP fields since the type_for_size call
3099 below can fail for, e.g., XFmode. */
3100 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3101 return 0;
3102
3103 STRIP_NOPS (exp);
3104
3105 if (TREE_CODE (exp) == BIT_AND_EXPR)
3106 {
3107 and_mask = TREE_OPERAND (exp, 1);
3108 exp = TREE_OPERAND (exp, 0);
3109 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3110 if (TREE_CODE (and_mask) != INTEGER_CST)
3111 return 0;
3112 }
3113
3114
3115 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3116 punsignedp, pvolatilep, &alignment);
3117 if ((inner == exp && and_mask == 0)
3118 || *pbitsize < 0 || offset != 0
3119 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3120 return 0;
3121
3122 /* Compute the mask to access the bitfield. */
3123 unsigned_type = type_for_size (*pbitsize, 1);
3124 precision = TYPE_PRECISION (unsigned_type);
3125
3126 mask = build_int_2 (~0, ~0);
3127 TREE_TYPE (mask) = unsigned_type;
3128 force_fit_type (mask, 0);
3129 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3130 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3131
3132 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3133 if (and_mask != 0)
3134 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3135 convert (unsigned_type, and_mask), mask));
3136
3137 *pmask = mask;
3138 *pand_mask = and_mask;
3139 return inner;
3140 }
3141
3142 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
3143 bit positions. */
3144
3145 static int
3146 all_ones_mask_p (mask, size)
3147 tree mask;
3148 int size;
3149 {
3150 tree type = TREE_TYPE (mask);
3151 unsigned int precision = TYPE_PRECISION (type);
3152 tree tmask;
3153
3154 tmask = build_int_2 (~0, ~0);
3155 TREE_TYPE (tmask) = signed_type (type);
3156 force_fit_type (tmask, 0);
3157 return
3158 tree_int_cst_equal (mask,
3159 const_binop (RSHIFT_EXPR,
3160 const_binop (LSHIFT_EXPR, tmask,
3161 size_int (precision - size),
3162 0),
3163 size_int (precision - size), 0));
3164 }
3165
3166 /* Subroutine for fold_truthop: determine if an operand is simple enough
3167 to be evaluated unconditionally. */
3168
3169 static int
3170 simple_operand_p (exp)
3171 tree exp;
3172 {
3173 /* Strip any conversions that don't change the machine mode. */
3174 while ((TREE_CODE (exp) == NOP_EXPR
3175 || TREE_CODE (exp) == CONVERT_EXPR)
3176 && (TYPE_MODE (TREE_TYPE (exp))
3177 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3178 exp = TREE_OPERAND (exp, 0);
3179
3180 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3181 || (DECL_P (exp)
3182 && ! TREE_ADDRESSABLE (exp)
3183 && ! TREE_THIS_VOLATILE (exp)
3184 && ! DECL_NONLOCAL (exp)
3185 /* Don't regard global variables as simple. They may be
3186 allocated in ways unknown to the compiler (shared memory,
3187 #pragma weak, etc). */
3188 && ! TREE_PUBLIC (exp)
3189 && ! DECL_EXTERNAL (exp)
3190 /* Loading a static variable is unduly expensive, but global
3191 registers aren't expensive. */
3192 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3193 }
3194 \f
3195 /* The following functions are subroutines to fold_range_test and allow it to
3196 try to change a logical combination of comparisons into a range test.
3197
3198 For example, both
3199 X == 2 && X == 3 && X == 4 && X == 5
3200 and
3201 X >= 2 && X <= 5
3202 are converted to
3203 (unsigned) (X - 2) <= 3
3204
3205 We describe each set of comparisons as being either inside or outside
3206 a range, using a variable named like IN_P, and then describe the
3207 range with a lower and upper bound. If one of the bounds is omitted,
3208 it represents either the highest or lowest value of the type.
3209
3210 In the comments below, we represent a range by two numbers in brackets
3211 preceded by a "+" to designate being inside that range, or a "-" to
3212 designate being outside that range, so the condition can be inverted by
3213 flipping the prefix. An omitted bound is represented by a "-". For
3214 example, "- [-, 10]" means being outside the range starting at the lowest
3215 possible value and ending at 10, in other words, being greater than 10.
3216 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3217 always false.
3218
3219 We set up things so that the missing bounds are handled in a consistent
3220 manner so neither a missing bound nor "true" and "false" need to be
3221 handled using a special case. */
3222
3223 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3224 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3225 and UPPER1_P are nonzero if the respective argument is an upper bound
3226 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3227 must be specified for a comparison. ARG1 will be converted to ARG0's
3228 type if both are specified. */
3229
3230 static tree
3231 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
3232 enum tree_code code;
3233 tree type;
3234 tree arg0, arg1;
3235 int upper0_p, upper1_p;
3236 {
3237 tree tem;
3238 int result;
3239 int sgn0, sgn1;
3240
3241 /* If neither arg represents infinity, do the normal operation.
3242 Else, if not a comparison, return infinity. Else handle the special
3243 comparison rules. Note that most of the cases below won't occur, but
3244 are handled for consistency. */
3245
3246 if (arg0 != 0 && arg1 != 0)
3247 {
3248 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3249 arg0, convert (TREE_TYPE (arg0), arg1)));
3250 STRIP_NOPS (tem);
3251 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3252 }
3253
3254 if (TREE_CODE_CLASS (code) != '<')
3255 return 0;
3256
3257 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3258 for neither. In real maths, we cannot assume open ended ranges are
3259 the same. But, this is computer arithmetic, where numbers are finite.
3260 We can therefore make the transformation of any unbounded range with
3261 the value Z, Z being greater than any representable number. This permits
3262 us to treat unbounded ranges as equal. */
3263 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3264 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3265 switch (code)
3266 {
3267 case EQ_EXPR:
3268 result = sgn0 == sgn1;
3269 break;
3270 case NE_EXPR:
3271 result = sgn0 != sgn1;
3272 break;
3273 case LT_EXPR:
3274 result = sgn0 < sgn1;
3275 break;
3276 case LE_EXPR:
3277 result = sgn0 <= sgn1;
3278 break;
3279 case GT_EXPR:
3280 result = sgn0 > sgn1;
3281 break;
3282 case GE_EXPR:
3283 result = sgn0 >= sgn1;
3284 break;
3285 default:
3286 abort ();
3287 }
3288
3289 return convert (type, result ? integer_one_node : integer_zero_node);
3290 }
3291 \f
3292 /* Given EXP, a logical expression, set the range it is testing into
3293 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3294 actually being tested. *PLOW and *PHIGH will have be made the same type
3295 as the returned expression. If EXP is not a comparison, we will most
3296 likely not be returning a useful value and range. */
3297
3298 static tree
3299 make_range (exp, pin_p, plow, phigh)
3300 tree exp;
3301 int *pin_p;
3302 tree *plow, *phigh;
3303 {
3304 enum tree_code code;
3305 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3306 tree orig_type = NULL_TREE;
3307 int in_p, n_in_p;
3308 tree low, high, n_low, n_high;
3309
3310 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3311 and see if we can refine the range. Some of the cases below may not
3312 happen, but it doesn't seem worth worrying about this. We "continue"
3313 the outer loop when we've changed something; otherwise we "break"
3314 the switch, which will "break" the while. */
3315
3316 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
3317
3318 while (1)
3319 {
3320 code = TREE_CODE (exp);
3321
3322 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3323 {
3324 arg0 = TREE_OPERAND (exp, 0);
3325 if (TREE_CODE_CLASS (code) == '<'
3326 || TREE_CODE_CLASS (code) == '1'
3327 || TREE_CODE_CLASS (code) == '2')
3328 type = TREE_TYPE (arg0);
3329 if (TREE_CODE_CLASS (code) == '2'
3330 || TREE_CODE_CLASS (code) == '<'
3331 || (TREE_CODE_CLASS (code) == 'e'
3332 && tree_code_length[(int) code] > 1))
3333 arg1 = TREE_OPERAND (exp, 1);
3334 }
3335
3336 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3337 lose a cast by accident. */
3338 if (type != NULL_TREE && orig_type == NULL_TREE)
3339 orig_type = type;
3340
3341 switch (code)
3342 {
3343 case TRUTH_NOT_EXPR:
3344 in_p = ! in_p, exp = arg0;
3345 continue;
3346
3347 case EQ_EXPR: case NE_EXPR:
3348 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3349 /* We can only do something if the range is testing for zero
3350 and if the second operand is an integer constant. Note that
3351 saying something is "in" the range we make is done by
3352 complementing IN_P since it will set in the initial case of
3353 being not equal to zero; "out" is leaving it alone. */
3354 if (low == 0 || high == 0
3355 || ! integer_zerop (low) || ! integer_zerop (high)
3356 || TREE_CODE (arg1) != INTEGER_CST)
3357 break;
3358
3359 switch (code)
3360 {
3361 case NE_EXPR: /* - [c, c] */
3362 low = high = arg1;
3363 break;
3364 case EQ_EXPR: /* + [c, c] */
3365 in_p = ! in_p, low = high = arg1;
3366 break;
3367 case GT_EXPR: /* - [-, c] */
3368 low = 0, high = arg1;
3369 break;
3370 case GE_EXPR: /* + [c, -] */
3371 in_p = ! in_p, low = arg1, high = 0;
3372 break;
3373 case LT_EXPR: /* - [c, -] */
3374 low = arg1, high = 0;
3375 break;
3376 case LE_EXPR: /* + [-, c] */
3377 in_p = ! in_p, low = 0, high = arg1;
3378 break;
3379 default:
3380 abort ();
3381 }
3382
3383 exp = arg0;
3384
3385 /* If this is an unsigned comparison, we also know that EXP is
3386 greater than or equal to zero. We base the range tests we make
3387 on that fact, so we record it here so we can parse existing
3388 range tests. */
3389 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3390 {
3391 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3392 1, convert (type, integer_zero_node),
3393 NULL_TREE))
3394 break;
3395
3396 in_p = n_in_p, low = n_low, high = n_high;
3397
3398 /* If the high bound is missing, but we
3399 have a low bound, reverse the range so
3400 it goes from zero to the low bound minus 1. */
3401 if (high == 0 && low)
3402 {
3403 in_p = ! in_p;
3404 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3405 integer_one_node, 0);
3406 low = convert (type, integer_zero_node);
3407 }
3408 }
3409 continue;
3410
3411 case NEGATE_EXPR:
3412 /* (-x) IN [a,b] -> x in [-b, -a] */
3413 n_low = range_binop (MINUS_EXPR, type,
3414 convert (type, integer_zero_node), 0, high, 1);
3415 n_high = range_binop (MINUS_EXPR, type,
3416 convert (type, integer_zero_node), 0, low, 0);
3417 low = n_low, high = n_high;
3418 exp = arg0;
3419 continue;
3420
3421 case BIT_NOT_EXPR:
3422 /* ~ X -> -X - 1 */
3423 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3424 convert (type, integer_one_node));
3425 continue;
3426
3427 case PLUS_EXPR: case MINUS_EXPR:
3428 if (TREE_CODE (arg1) != INTEGER_CST)
3429 break;
3430
3431 /* If EXP is signed, any overflow in the computation is undefined,
3432 so we don't worry about it so long as our computations on
3433 the bounds don't overflow. For unsigned, overflow is defined
3434 and this is exactly the right thing. */
3435 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3436 type, low, 0, arg1, 0);
3437 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3438 type, high, 1, arg1, 0);
3439 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3440 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3441 break;
3442
3443 /* Check for an unsigned range which has wrapped around the maximum
3444 value thus making n_high < n_low, and normalize it. */
3445 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3446 {
3447 low = range_binop (PLUS_EXPR, type, n_high, 0,
3448 integer_one_node, 0);
3449 high = range_binop (MINUS_EXPR, type, n_low, 0,
3450 integer_one_node, 0);
3451 in_p = ! in_p;
3452 }
3453 else
3454 low = n_low, high = n_high;
3455
3456 exp = arg0;
3457 continue;
3458
3459 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3460 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3461 break;
3462
3463 if (! INTEGRAL_TYPE_P (type)
3464 || (low != 0 && ! int_fits_type_p (low, type))
3465 || (high != 0 && ! int_fits_type_p (high, type)))
3466 break;
3467
3468 n_low = low, n_high = high;
3469
3470 if (n_low != 0)
3471 n_low = convert (type, n_low);
3472
3473 if (n_high != 0)
3474 n_high = convert (type, n_high);
3475
3476 /* If we're converting from an unsigned to a signed type,
3477 we will be doing the comparison as unsigned. The tests above
3478 have already verified that LOW and HIGH are both positive.
3479
3480 So we have to make sure that the original unsigned value will
3481 be interpreted as positive. */
3482 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3483 {
3484 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
3485 tree high_positive;
3486
3487 /* A range without an upper bound is, naturally, unbounded.
3488 Since convert would have cropped a very large value, use
3489 the max value for the destination type. */
3490 high_positive
3491 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3492 : TYPE_MAX_VALUE (type);
3493
3494 high_positive = fold (build (RSHIFT_EXPR, type,
3495 convert (type, high_positive),
3496 convert (type, integer_one_node)));
3497
3498 /* If the low bound is specified, "and" the range with the
3499 range for which the original unsigned value will be
3500 positive. */
3501 if (low != 0)
3502 {
3503 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3504 1, n_low, n_high,
3505 1, convert (type, integer_zero_node),
3506 high_positive))
3507 break;
3508
3509 in_p = (n_in_p == in_p);
3510 }
3511 else
3512 {
3513 /* Otherwise, "or" the range with the range of the input
3514 that will be interpreted as negative. */
3515 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3516 0, n_low, n_high,
3517 1, convert (type, integer_zero_node),
3518 high_positive))
3519 break;
3520
3521 in_p = (in_p != n_in_p);
3522 }
3523 }
3524
3525 exp = arg0;
3526 low = n_low, high = n_high;
3527 continue;
3528
3529 default:
3530 break;
3531 }
3532
3533 break;
3534 }
3535
3536 /* If EXP is a constant, we can evaluate whether this is true or false. */
3537 if (TREE_CODE (exp) == INTEGER_CST)
3538 {
3539 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3540 exp, 0, low, 0))
3541 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3542 exp, 1, high, 1)));
3543 low = high = 0;
3544 exp = 0;
3545 }
3546
3547 *pin_p = in_p, *plow = low, *phigh = high;
3548 return exp;
3549 }
3550 \f
3551 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3552 type, TYPE, return an expression to test if EXP is in (or out of, depending
3553 on IN_P) the range. */
3554
3555 static tree
3556 build_range_check (type, exp, in_p, low, high)
3557 tree type;
3558 tree exp;
3559 int in_p;
3560 tree low, high;
3561 {
3562 tree etype = TREE_TYPE (exp);
3563 tree utype, value;
3564
3565 if (! in_p
3566 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3567 return invert_truthvalue (value);
3568
3569 else if (low == 0 && high == 0)
3570 return convert (type, integer_one_node);
3571
3572 else if (low == 0)
3573 return fold (build (LE_EXPR, type, exp, high));
3574
3575 else if (high == 0)
3576 return fold (build (GE_EXPR, type, exp, low));
3577
3578 else if (operand_equal_p (low, high, 0))
3579 return fold (build (EQ_EXPR, type, exp, low));
3580
3581 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3582 return build_range_check (type, exp, 1, 0, high);
3583
3584 else if (integer_zerop (low))
3585 {
3586 utype = unsigned_type (etype);
3587 return build_range_check (type, convert (utype, exp), 1, 0,
3588 convert (utype, high));
3589 }
3590
3591 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3592 && ! TREE_OVERFLOW (value))
3593 return build_range_check (type,
3594 fold (build (MINUS_EXPR, etype, exp, low)),
3595 1, convert (etype, integer_zero_node), value);
3596 else
3597 return 0;
3598 }
3599 \f
3600 /* Given two ranges, see if we can merge them into one. Return 1 if we
3601 can, 0 if we can't. Set the output range into the specified parameters. */
3602
3603 static int
3604 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3605 int *pin_p;
3606 tree *plow, *phigh;
3607 int in0_p, in1_p;
3608 tree low0, high0, low1, high1;
3609 {
3610 int no_overlap;
3611 int subset;
3612 int temp;
3613 tree tem;
3614 int in_p;
3615 tree low, high;
3616 int lowequal = ((low0 == 0 && low1 == 0)
3617 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3618 low0, 0, low1, 0)));
3619 int highequal = ((high0 == 0 && high1 == 0)
3620 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3621 high0, 1, high1, 1)));
3622
3623 /* Make range 0 be the range that starts first, or ends last if they
3624 start at the same value. Swap them if it isn't. */
3625 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3626 low0, 0, low1, 0))
3627 || (lowequal
3628 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3629 high1, 1, high0, 1))))
3630 {
3631 temp = in0_p, in0_p = in1_p, in1_p = temp;
3632 tem = low0, low0 = low1, low1 = tem;
3633 tem = high0, high0 = high1, high1 = tem;
3634 }
3635
3636 /* Now flag two cases, whether the ranges are disjoint or whether the
3637 second range is totally subsumed in the first. Note that the tests
3638 below are simplified by the ones above. */
3639 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3640 high0, 1, low1, 0));
3641 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3642 high1, 1, high0, 1));
3643
3644 /* We now have four cases, depending on whether we are including or
3645 excluding the two ranges. */
3646 if (in0_p && in1_p)
3647 {
3648 /* If they don't overlap, the result is false. If the second range
3649 is a subset it is the result. Otherwise, the range is from the start
3650 of the second to the end of the first. */
3651 if (no_overlap)
3652 in_p = 0, low = high = 0;
3653 else if (subset)
3654 in_p = 1, low = low1, high = high1;
3655 else
3656 in_p = 1, low = low1, high = high0;
3657 }
3658
3659 else if (in0_p && ! in1_p)
3660 {
3661 /* If they don't overlap, the result is the first range. If they are
3662 equal, the result is false. If the second range is a subset of the
3663 first, and the ranges begin at the same place, we go from just after
3664 the end of the first range to the end of the second. If the second
3665 range is not a subset of the first, or if it is a subset and both
3666 ranges end at the same place, the range starts at the start of the
3667 first range and ends just before the second range.
3668 Otherwise, we can't describe this as a single range. */
3669 if (no_overlap)
3670 in_p = 1, low = low0, high = high0;
3671 else if (lowequal && highequal)
3672 in_p = 0, low = high = 0;
3673 else if (subset && lowequal)
3674 {
3675 in_p = 1, high = high0;
3676 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3677 integer_one_node, 0);
3678 }
3679 else if (! subset || highequal)
3680 {
3681 in_p = 1, low = low0;
3682 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3683 integer_one_node, 0);
3684 }
3685 else
3686 return 0;
3687 }
3688
3689 else if (! in0_p && in1_p)
3690 {
3691 /* If they don't overlap, the result is the second range. If the second
3692 is a subset of the first, the result is false. Otherwise,
3693 the range starts just after the first range and ends at the
3694 end of the second. */
3695 if (no_overlap)
3696 in_p = 1, low = low1, high = high1;
3697 else if (subset || highequal)
3698 in_p = 0, low = high = 0;
3699 else
3700 {
3701 in_p = 1, high = high1;
3702 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3703 integer_one_node, 0);
3704 }
3705 }
3706
3707 else
3708 {
3709 /* The case where we are excluding both ranges. Here the complex case
3710 is if they don't overlap. In that case, the only time we have a
3711 range is if they are adjacent. If the second is a subset of the
3712 first, the result is the first. Otherwise, the range to exclude
3713 starts at the beginning of the first range and ends at the end of the
3714 second. */
3715 if (no_overlap)
3716 {
3717 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3718 range_binop (PLUS_EXPR, NULL_TREE,
3719 high0, 1,
3720 integer_one_node, 1),
3721 1, low1, 0)))
3722 in_p = 0, low = low0, high = high1;
3723 else
3724 return 0;
3725 }
3726 else if (subset)
3727 in_p = 0, low = low0, high = high0;
3728 else
3729 in_p = 0, low = low0, high = high1;
3730 }
3731
3732 *pin_p = in_p, *plow = low, *phigh = high;
3733 return 1;
3734 }
3735 \f
3736 /* EXP is some logical combination of boolean tests. See if we can
3737 merge it into some range test. Return the new tree if so. */
3738
3739 static tree
3740 fold_range_test (exp)
3741 tree exp;
3742 {
3743 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3744 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3745 int in0_p, in1_p, in_p;
3746 tree low0, low1, low, high0, high1, high;
3747 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3748 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3749 tree tem;
3750
3751 /* If this is an OR operation, invert both sides; we will invert
3752 again at the end. */
3753 if (or_op)
3754 in0_p = ! in0_p, in1_p = ! in1_p;
3755
3756 /* If both expressions are the same, if we can merge the ranges, and we
3757 can build the range test, return it or it inverted. If one of the
3758 ranges is always true or always false, consider it to be the same
3759 expression as the other. */
3760 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3761 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3762 in1_p, low1, high1)
3763 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3764 lhs != 0 ? lhs
3765 : rhs != 0 ? rhs : integer_zero_node,
3766 in_p, low, high))))
3767 return or_op ? invert_truthvalue (tem) : tem;
3768
3769 /* On machines where the branch cost is expensive, if this is a
3770 short-circuited branch and the underlying object on both sides
3771 is the same, make a non-short-circuit operation. */
3772 else if (BRANCH_COST >= 2
3773 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3774 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3775 && operand_equal_p (lhs, rhs, 0))
3776 {
3777 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3778 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3779 which cases we can't do this. */
3780 if (simple_operand_p (lhs))
3781 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3782 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3783 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3784 TREE_OPERAND (exp, 1));
3785
3786 else if (global_bindings_p () == 0
3787 && ! contains_placeholder_p (lhs))
3788 {
3789 tree common = save_expr (lhs);
3790
3791 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3792 or_op ? ! in0_p : in0_p,
3793 low0, high0))
3794 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3795 or_op ? ! in1_p : in1_p,
3796 low1, high1))))
3797 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3798 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3799 TREE_TYPE (exp), lhs, rhs);
3800 }
3801 }
3802
3803 return 0;
3804 }
3805 \f
3806 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3807 bit value. Arrange things so the extra bits will be set to zero if and
3808 only if C is signed-extended to its full width. If MASK is nonzero,
3809 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3810
3811 static tree
3812 unextend (c, p, unsignedp, mask)
3813 tree c;
3814 int p;
3815 int unsignedp;
3816 tree mask;
3817 {
3818 tree type = TREE_TYPE (c);
3819 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3820 tree temp;
3821
3822 if (p == modesize || unsignedp)
3823 return c;
3824
3825 /* We work by getting just the sign bit into the low-order bit, then
3826 into the high-order bit, then sign-extend. We then XOR that value
3827 with C. */
3828 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3829 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3830
3831 /* We must use a signed type in order to get an arithmetic right shift.
3832 However, we must also avoid introducing accidental overflows, so that
3833 a subsequent call to integer_zerop will work. Hence we must
3834 do the type conversion here. At this point, the constant is either
3835 zero or one, and the conversion to a signed type can never overflow.
3836 We could get an overflow if this conversion is done anywhere else. */
3837 if (TREE_UNSIGNED (type))
3838 temp = convert (signed_type (type), temp);
3839
3840 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3841 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3842 if (mask != 0)
3843 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3844 /* If necessary, convert the type back to match the type of C. */
3845 if (TREE_UNSIGNED (type))
3846 temp = convert (type, temp);
3847
3848 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3849 }
3850 \f
3851 /* Find ways of folding logical expressions of LHS and RHS:
3852 Try to merge two comparisons to the same innermost item.
3853 Look for range tests like "ch >= '0' && ch <= '9'".
3854 Look for combinations of simple terms on machines with expensive branches
3855 and evaluate the RHS unconditionally.
3856
3857 For example, if we have p->a == 2 && p->b == 4 and we can make an
3858 object large enough to span both A and B, we can do this with a comparison
3859 against the object ANDed with the a mask.
3860
3861 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3862 operations to do this with one comparison.
3863
3864 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3865 function and the one above.
3866
3867 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3868 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3869
3870 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3871 two operands.
3872
3873 We return the simplified tree or 0 if no optimization is possible. */
3874
3875 static tree
3876 fold_truthop (code, truth_type, lhs, rhs)
3877 enum tree_code code;
3878 tree truth_type, lhs, rhs;
3879 {
3880 /* If this is the "or" of two comparisons, we can do something if we
3881 the comparisons are NE_EXPR. If this is the "and", we can do something
3882 if the comparisons are EQ_EXPR. I.e.,
3883 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3884
3885 WANTED_CODE is this operation code. For single bit fields, we can
3886 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3887 comparison for one-bit fields. */
3888
3889 enum tree_code wanted_code;
3890 enum tree_code lcode, rcode;
3891 tree ll_arg, lr_arg, rl_arg, rr_arg;
3892 tree ll_inner, lr_inner, rl_inner, rr_inner;
3893 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3894 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3895 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3896 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3897 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3898 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3899 enum machine_mode lnmode, rnmode;
3900 tree ll_mask, lr_mask, rl_mask, rr_mask;
3901 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3902 tree l_const, r_const;
3903 tree lntype, rntype, result;
3904 int first_bit, end_bit;
3905 int volatilep;
3906
3907 /* Start by getting the comparison codes. Fail if anything is volatile.
3908 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3909 it were surrounded with a NE_EXPR. */
3910
3911 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3912 return 0;
3913
3914 lcode = TREE_CODE (lhs);
3915 rcode = TREE_CODE (rhs);
3916
3917 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3918 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3919
3920 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3921 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3922
3923 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3924 return 0;
3925
3926 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3927 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3928
3929 ll_arg = TREE_OPERAND (lhs, 0);
3930 lr_arg = TREE_OPERAND (lhs, 1);
3931 rl_arg = TREE_OPERAND (rhs, 0);
3932 rr_arg = TREE_OPERAND (rhs, 1);
3933
3934 /* If the RHS can be evaluated unconditionally and its operands are
3935 simple, it wins to evaluate the RHS unconditionally on machines
3936 with expensive branches. In this case, this isn't a comparison
3937 that can be merged. Avoid doing this if the RHS is a floating-point
3938 comparison since those can trap. */
3939
3940 if (BRANCH_COST >= 2
3941 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3942 && simple_operand_p (rl_arg)
3943 && simple_operand_p (rr_arg))
3944 return build (code, truth_type, lhs, rhs);
3945
3946 /* See if the comparisons can be merged. Then get all the parameters for
3947 each side. */
3948
3949 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3950 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3951 return 0;
3952
3953 volatilep = 0;
3954 ll_inner = decode_field_reference (ll_arg,
3955 &ll_bitsize, &ll_bitpos, &ll_mode,
3956 &ll_unsignedp, &volatilep, &ll_mask,
3957 &ll_and_mask);
3958 lr_inner = decode_field_reference (lr_arg,
3959 &lr_bitsize, &lr_bitpos, &lr_mode,
3960 &lr_unsignedp, &volatilep, &lr_mask,
3961 &lr_and_mask);
3962 rl_inner = decode_field_reference (rl_arg,
3963 &rl_bitsize, &rl_bitpos, &rl_mode,
3964 &rl_unsignedp, &volatilep, &rl_mask,
3965 &rl_and_mask);
3966 rr_inner = decode_field_reference (rr_arg,
3967 &rr_bitsize, &rr_bitpos, &rr_mode,
3968 &rr_unsignedp, &volatilep, &rr_mask,
3969 &rr_and_mask);
3970
3971 /* It must be true that the inner operation on the lhs of each
3972 comparison must be the same if we are to be able to do anything.
3973 Then see if we have constants. If not, the same must be true for
3974 the rhs's. */
3975 if (volatilep || ll_inner == 0 || rl_inner == 0
3976 || ! operand_equal_p (ll_inner, rl_inner, 0))
3977 return 0;
3978
3979 if (TREE_CODE (lr_arg) == INTEGER_CST
3980 && TREE_CODE (rr_arg) == INTEGER_CST)
3981 l_const = lr_arg, r_const = rr_arg;
3982 else if (lr_inner == 0 || rr_inner == 0
3983 || ! operand_equal_p (lr_inner, rr_inner, 0))
3984 return 0;
3985 else
3986 l_const = r_const = 0;
3987
3988 /* If either comparison code is not correct for our logical operation,
3989 fail. However, we can convert a one-bit comparison against zero into
3990 the opposite comparison against that bit being set in the field. */
3991
3992 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3993 if (lcode != wanted_code)
3994 {
3995 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3996 {
3997 /* Make the left operand unsigned, since we are only interested
3998 in the value of one bit. Otherwise we are doing the wrong
3999 thing below. */
4000 ll_unsignedp = 1;
4001 l_const = ll_mask;
4002 }
4003 else
4004 return 0;
4005 }
4006
4007 /* This is analogous to the code for l_const above. */
4008 if (rcode != wanted_code)
4009 {
4010 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4011 {
4012 rl_unsignedp = 1;
4013 r_const = rl_mask;
4014 }
4015 else
4016 return 0;
4017 }
4018
4019 /* See if we can find a mode that contains both fields being compared on
4020 the left. If we can't, fail. Otherwise, update all constants and masks
4021 to be relative to a field of that size. */
4022 first_bit = MIN (ll_bitpos, rl_bitpos);
4023 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4024 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4025 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4026 volatilep);
4027 if (lnmode == VOIDmode)
4028 return 0;
4029
4030 lnbitsize = GET_MODE_BITSIZE (lnmode);
4031 lnbitpos = first_bit & ~ (lnbitsize - 1);
4032 lntype = type_for_size (lnbitsize, 1);
4033 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4034
4035 if (BYTES_BIG_ENDIAN)
4036 {
4037 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4038 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4039 }
4040
4041 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
4042 size_int (xll_bitpos), 0);
4043 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
4044 size_int (xrl_bitpos), 0);
4045
4046 if (l_const)
4047 {
4048 l_const = convert (lntype, l_const);
4049 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4050 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4051 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4052 fold (build1 (BIT_NOT_EXPR,
4053 lntype, ll_mask)),
4054 0)))
4055 {
4056 warning ("comparison is always %d", wanted_code == NE_EXPR);
4057
4058 return convert (truth_type,
4059 wanted_code == NE_EXPR
4060 ? integer_one_node : integer_zero_node);
4061 }
4062 }
4063 if (r_const)
4064 {
4065 r_const = convert (lntype, r_const);
4066 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4067 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4068 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4069 fold (build1 (BIT_NOT_EXPR,
4070 lntype, rl_mask)),
4071 0)))
4072 {
4073 warning ("comparison is always %d", wanted_code == NE_EXPR);
4074
4075 return convert (truth_type,
4076 wanted_code == NE_EXPR
4077 ? integer_one_node : integer_zero_node);
4078 }
4079 }
4080
4081 /* If the right sides are not constant, do the same for it. Also,
4082 disallow this optimization if a size or signedness mismatch occurs
4083 between the left and right sides. */
4084 if (l_const == 0)
4085 {
4086 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4087 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4088 /* Make sure the two fields on the right
4089 correspond to the left without being swapped. */
4090 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4091 return 0;
4092
4093 first_bit = MIN (lr_bitpos, rr_bitpos);
4094 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4095 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4096 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4097 volatilep);
4098 if (rnmode == VOIDmode)
4099 return 0;
4100
4101 rnbitsize = GET_MODE_BITSIZE (rnmode);
4102 rnbitpos = first_bit & ~ (rnbitsize - 1);
4103 rntype = type_for_size (rnbitsize, 1);
4104 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4105
4106 if (BYTES_BIG_ENDIAN)
4107 {
4108 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4109 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4110 }
4111
4112 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
4113 size_int (xlr_bitpos), 0);
4114 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
4115 size_int (xrr_bitpos), 0);
4116
4117 /* Make a mask that corresponds to both fields being compared.
4118 Do this for both items being compared. If the operands are the
4119 same size and the bits being compared are in the same position
4120 then we can do this by masking both and comparing the masked
4121 results. */
4122 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4123 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4124 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4125 {
4126 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4127 ll_unsignedp || rl_unsignedp);
4128 if (! all_ones_mask_p (ll_mask, lnbitsize))
4129 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4130
4131 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4132 lr_unsignedp || rr_unsignedp);
4133 if (! all_ones_mask_p (lr_mask, rnbitsize))
4134 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4135
4136 return build (wanted_code, truth_type, lhs, rhs);
4137 }
4138
4139 /* There is still another way we can do something: If both pairs of
4140 fields being compared are adjacent, we may be able to make a wider
4141 field containing them both.
4142
4143 Note that we still must mask the lhs/rhs expressions. Furthermore,
4144 the mask must be shifted to account for the shift done by
4145 make_bit_field_ref. */
4146 if ((ll_bitsize + ll_bitpos == rl_bitpos
4147 && lr_bitsize + lr_bitpos == rr_bitpos)
4148 || (ll_bitpos == rl_bitpos + rl_bitsize
4149 && lr_bitpos == rr_bitpos + rr_bitsize))
4150 {
4151 tree type;
4152
4153 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4154 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4155 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4156 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4157
4158 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4159 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4160 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4161 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4162
4163 /* Convert to the smaller type before masking out unwanted bits. */
4164 type = lntype;
4165 if (lntype != rntype)
4166 {
4167 if (lnbitsize > rnbitsize)
4168 {
4169 lhs = convert (rntype, lhs);
4170 ll_mask = convert (rntype, ll_mask);
4171 type = rntype;
4172 }
4173 else if (lnbitsize < rnbitsize)
4174 {
4175 rhs = convert (lntype, rhs);
4176 lr_mask = convert (lntype, lr_mask);
4177 type = lntype;
4178 }
4179 }
4180
4181 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4182 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4183
4184 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4185 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4186
4187 return build (wanted_code, truth_type, lhs, rhs);
4188 }
4189
4190 return 0;
4191 }
4192
4193 /* Handle the case of comparisons with constants. If there is something in
4194 common between the masks, those bits of the constants must be the same.
4195 If not, the condition is always false. Test for this to avoid generating
4196 incorrect code below. */
4197 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4198 if (! integer_zerop (result)
4199 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4200 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4201 {
4202 if (wanted_code == NE_EXPR)
4203 {
4204 warning ("`or' of unmatched not-equal tests is always 1");
4205 return convert (truth_type, integer_one_node);
4206 }
4207 else
4208 {
4209 warning ("`and' of mutually exclusive equal-tests is always 0");
4210 return convert (truth_type, integer_zero_node);
4211 }
4212 }
4213
4214 /* Construct the expression we will return. First get the component
4215 reference we will make. Unless the mask is all ones the width of
4216 that field, perform the mask operation. Then compare with the
4217 merged constant. */
4218 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4219 ll_unsignedp || rl_unsignedp);
4220
4221 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4222 if (! all_ones_mask_p (ll_mask, lnbitsize))
4223 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4224
4225 return build (wanted_code, truth_type, result,
4226 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4227 }
4228 \f
4229 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4230 constant. */
4231
4232 static tree
4233 optimize_minmax_comparison (t)
4234 tree t;
4235 {
4236 tree type = TREE_TYPE (t);
4237 tree arg0 = TREE_OPERAND (t, 0);
4238 enum tree_code op_code;
4239 tree comp_const = TREE_OPERAND (t, 1);
4240 tree minmax_const;
4241 int consts_equal, consts_lt;
4242 tree inner;
4243
4244 STRIP_SIGN_NOPS (arg0);
4245
4246 op_code = TREE_CODE (arg0);
4247 minmax_const = TREE_OPERAND (arg0, 1);
4248 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4249 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4250 inner = TREE_OPERAND (arg0, 0);
4251
4252 /* If something does not permit us to optimize, return the original tree. */
4253 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4254 || TREE_CODE (comp_const) != INTEGER_CST
4255 || TREE_CONSTANT_OVERFLOW (comp_const)
4256 || TREE_CODE (minmax_const) != INTEGER_CST
4257 || TREE_CONSTANT_OVERFLOW (minmax_const))
4258 return t;
4259
4260 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4261 and GT_EXPR, doing the rest with recursive calls using logical
4262 simplifications. */
4263 switch (TREE_CODE (t))
4264 {
4265 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4266 return
4267 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4268
4269 case GE_EXPR:
4270 return
4271 fold (build (TRUTH_ORIF_EXPR, type,
4272 optimize_minmax_comparison
4273 (build (EQ_EXPR, type, arg0, comp_const)),
4274 optimize_minmax_comparison
4275 (build (GT_EXPR, type, arg0, comp_const))));
4276
4277 case EQ_EXPR:
4278 if (op_code == MAX_EXPR && consts_equal)
4279 /* MAX (X, 0) == 0 -> X <= 0 */
4280 return fold (build (LE_EXPR, type, inner, comp_const));
4281
4282 else if (op_code == MAX_EXPR && consts_lt)
4283 /* MAX (X, 0) == 5 -> X == 5 */
4284 return fold (build (EQ_EXPR, type, inner, comp_const));
4285
4286 else if (op_code == MAX_EXPR)
4287 /* MAX (X, 0) == -1 -> false */
4288 return omit_one_operand (type, integer_zero_node, inner);
4289
4290 else if (consts_equal)
4291 /* MIN (X, 0) == 0 -> X >= 0 */
4292 return fold (build (GE_EXPR, type, inner, comp_const));
4293
4294 else if (consts_lt)
4295 /* MIN (X, 0) == 5 -> false */
4296 return omit_one_operand (type, integer_zero_node, inner);
4297
4298 else
4299 /* MIN (X, 0) == -1 -> X == -1 */
4300 return fold (build (EQ_EXPR, type, inner, comp_const));
4301
4302 case GT_EXPR:
4303 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4304 /* MAX (X, 0) > 0 -> X > 0
4305 MAX (X, 0) > 5 -> X > 5 */
4306 return fold (build (GT_EXPR, type, inner, comp_const));
4307
4308 else if (op_code == MAX_EXPR)
4309 /* MAX (X, 0) > -1 -> true */
4310 return omit_one_operand (type, integer_one_node, inner);
4311
4312 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4313 /* MIN (X, 0) > 0 -> false
4314 MIN (X, 0) > 5 -> false */
4315 return omit_one_operand (type, integer_zero_node, inner);
4316
4317 else
4318 /* MIN (X, 0) > -1 -> X > -1 */
4319 return fold (build (GT_EXPR, type, inner, comp_const));
4320
4321 default:
4322 return t;
4323 }
4324 }
4325 \f
4326 /* T is an integer expression that is being multiplied, divided, or taken a
4327 modulus (CODE says which and what kind of divide or modulus) by a
4328 constant C. See if we can eliminate that operation by folding it with
4329 other operations already in T. WIDE_TYPE, if non-null, is a type that
4330 should be used for the computation if wider than our type.
4331
4332 For example, if we are dividing (X * 8) + (Y + 16) by 4, we can return
4333 (X * 2) + (Y + 4). We must, however, be assured that either the original
4334 expression would not overflow or that overflow is undefined for the type
4335 in the language in question.
4336
4337 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4338 the machine has a multiply-accumulate insn or that this is part of an
4339 addressing calculation.
4340
4341 If we return a non-null expression, it is an equivalent form of the
4342 original computation, but need not be in the original type. */
4343
4344 static tree
4345 extract_muldiv (t, c, code, wide_type)
4346 tree t;
4347 tree c;
4348 enum tree_code code;
4349 tree wide_type;
4350 {
4351 tree type = TREE_TYPE (t);
4352 enum tree_code tcode = TREE_CODE (t);
4353 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4354 > GET_MODE_SIZE (TYPE_MODE (type)))
4355 ? wide_type : type);
4356 tree t1, t2;
4357 int same_p = tcode == code;
4358 tree op0 = NULL_TREE, op1 = NULL_TREE;
4359
4360 /* Don't deal with constants of zero here; they confuse the code below. */
4361 if (integer_zerop (c))
4362 return NULL_TREE;
4363
4364 if (TREE_CODE_CLASS (tcode) == '1')
4365 op0 = TREE_OPERAND (t, 0);
4366
4367 if (TREE_CODE_CLASS (tcode) == '2')
4368 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4369
4370 /* Note that we need not handle conditional operations here since fold
4371 already handles those cases. So just do arithmetic here. */
4372 switch (tcode)
4373 {
4374 case INTEGER_CST:
4375 /* For a constant, we can always simplify if we are a multiply
4376 or (for divide and modulus) if it is a multiple of our constant. */
4377 if (code == MULT_EXPR
4378 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4379 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4380 break;
4381
4382 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4383 /* Pass the constant down and see if we can make a simplification. If
4384 we can, replace this expression with the inner simplification for
4385 possible later conversion to our or some other type. */
4386 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4387 code == MULT_EXPR ? ctype : NULL_TREE)))
4388 return t1;
4389 break;
4390
4391 case NEGATE_EXPR: case ABS_EXPR:
4392 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4393 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4394 break;
4395
4396 case MIN_EXPR: case MAX_EXPR:
4397 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4398 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4399 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4400 {
4401 if (tree_int_cst_sgn (c) < 0)
4402 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4403
4404 return fold (build (tcode, ctype, convert (ctype, t1),
4405 convert (ctype, t2)));
4406 }
4407 break;
4408
4409 case WITH_RECORD_EXPR:
4410 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4411 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4412 TREE_OPERAND (t, 1));
4413 break;
4414
4415 case SAVE_EXPR:
4416 /* If this has not been evaluated and the operand has no side effects,
4417 we can see if we can do something inside it and make a new one.
4418 Note that this test is overly conservative since we can do this
4419 if the only reason it had side effects is that it was another
4420 similar SAVE_EXPR, but that isn't worth bothering with. */
4421 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4422 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4423 wide_type)))
4424 return save_expr (t1);
4425 break;
4426
4427 case LSHIFT_EXPR: case RSHIFT_EXPR:
4428 /* If the second operand is constant, this is a multiplication
4429 or floor division, by a power of two, so we can treat it that
4430 way unless the multiplier or divisor overflows. */
4431 if (TREE_CODE (op1) == INTEGER_CST
4432 && 0 != (t1 = convert (ctype,
4433 const_binop (LSHIFT_EXPR, size_one_node,
4434 op1, 0)))
4435 && ! TREE_OVERFLOW (t1))
4436 return extract_muldiv (build (tcode == LSHIFT_EXPR
4437 ? MULT_EXPR : FLOOR_DIV_EXPR,
4438 ctype, convert (ctype, op0), t1),
4439 c, code, wide_type);
4440 break;
4441
4442 case PLUS_EXPR: case MINUS_EXPR:
4443 /* See if we can eliminate the operation on both sides. If we can, we
4444 can return a new PLUS or MINUS. If we can't, the only remaining
4445 cases where we can do anything are if the second operand is a
4446 constant. */
4447 t1 = extract_muldiv (op0, c, code, wide_type);
4448 t2 = extract_muldiv (op1, c, code, wide_type);
4449 if (t1 != 0 && t2 != 0)
4450 return fold (build (tcode, ctype, convert (ctype, t1),
4451 convert (ctype, t2)));
4452
4453 /* If this was a subtraction, negate OP1 and set it to be an addition.
4454 This simplifies the logic below. */
4455 if (tcode == MINUS_EXPR)
4456 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4457
4458 if (TREE_CODE (op1) != INTEGER_CST)
4459 break;
4460
4461 /* If either OP1 or C are negative, this optimization is not safe for
4462 some of the division and remainder types while for others we need
4463 to change the code. */
4464 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4465 {
4466 if (code == CEIL_DIV_EXPR)
4467 code = FLOOR_DIV_EXPR;
4468 else if (code == CEIL_MOD_EXPR)
4469 code = FLOOR_MOD_EXPR;
4470 else if (code == FLOOR_DIV_EXPR)
4471 code = CEIL_DIV_EXPR;
4472 else if (code == FLOOR_MOD_EXPR)
4473 code = CEIL_MOD_EXPR;
4474 else if (code != MULT_EXPR)
4475 break;
4476 }
4477
4478 /* Now do the operation and verify it doesn't overflow. */
4479 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4480 if (op1 == 0 || TREE_OVERFLOW (op1))
4481 break;
4482
4483 /* If we were able to eliminate our operation from the first side,
4484 apply our operation to the second side and reform the PLUS. */
4485 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4486 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4487
4488 /* The last case is if we are a multiply. In that case, we can
4489 apply the distributive law to commute the multiply and addition
4490 if the multiplication of the constants doesn't overflow. */
4491 if (code == MULT_EXPR)
4492 return fold (build (tcode, ctype, fold (build (code, ctype,
4493 convert (ctype, op0),
4494 convert (ctype, c))),
4495 op1));
4496
4497 break;
4498
4499 case MULT_EXPR:
4500 /* We have a special case here if we are doing something like
4501 (C * 8) % 4 since we know that's zero. */
4502 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4503 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4504 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4505 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4506 return omit_one_operand (type, integer_zero_node, op0);
4507
4508 /* ... fall through ... */
4509
4510 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4511 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4512 /* If we can extract our operation from the LHS, do so and return a
4513 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4514 do something only if the second operand is a constant. */
4515 if (same_p
4516 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4517 return fold (build (tcode, ctype, convert (ctype, t1),
4518 convert (ctype, op1)));
4519 else if (tcode == MULT_EXPR && code == MULT_EXPR
4520 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4521 return fold (build (tcode, ctype, convert (ctype, op0),
4522 convert (ctype, t1)));
4523 else if (TREE_CODE (op1) != INTEGER_CST)
4524 return 0;
4525
4526 /* If these are the same operation types, we can associate them
4527 assuming no overflow. */
4528 if (tcode == code
4529 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4530 convert (ctype, c), 0))
4531 && ! TREE_OVERFLOW (t1))
4532 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4533
4534 /* If these operations "cancel" each other, we have the main
4535 optimizations of this pass, which occur when either constant is a
4536 multiple of the other, in which case we replace this with either an
4537 operation or CODE or TCODE.
4538
4539 If we have an unsigned type that is not a sizetype, we canot do
4540 this since it will change the result if the original computation
4541 overflowed. */
4542 if ((! TREE_UNSIGNED (ctype)
4543 || (TREE_CODE (ctype) == INTEGER_TYPE
4544 && TYPE_IS_SIZETYPE (ctype)))
4545 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4546 || (tcode == MULT_EXPR
4547 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4548 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4549 {
4550 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4551 return fold (build (tcode, ctype, convert (ctype, op0),
4552 convert (ctype,
4553 const_binop (TRUNC_DIV_EXPR,
4554 op1, c, 0))));
4555 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4556 return fold (build (code, ctype, convert (ctype, op0),
4557 convert (ctype,
4558 const_binop (TRUNC_DIV_EXPR,
4559 c, op1, 0))));
4560 }
4561 break;
4562
4563 default:
4564 break;
4565 }
4566
4567 return 0;
4568 }
4569 \f
4570 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4571 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4572 that we may sometimes modify the tree. */
4573
4574 static tree
4575 strip_compound_expr (t, s)
4576 tree t;
4577 tree s;
4578 {
4579 enum tree_code code = TREE_CODE (t);
4580
4581 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4582 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4583 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4584 return TREE_OPERAND (t, 1);
4585
4586 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4587 don't bother handling any other types. */
4588 else if (code == COND_EXPR)
4589 {
4590 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4591 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4592 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4593 }
4594 else if (TREE_CODE_CLASS (code) == '1')
4595 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4596 else if (TREE_CODE_CLASS (code) == '<'
4597 || TREE_CODE_CLASS (code) == '2')
4598 {
4599 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4600 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4601 }
4602
4603 return t;
4604 }
4605 \f
4606 /* Return a node which has the indicated constant VALUE (either 0 or
4607 1), and is of the indicated TYPE. */
4608
4609 static tree
4610 constant_boolean_node (value, type)
4611 int value;
4612 tree type;
4613 {
4614 if (type == integer_type_node)
4615 return value ? integer_one_node : integer_zero_node;
4616 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4617 return truthvalue_conversion (value ? integer_one_node :
4618 integer_zero_node);
4619 else
4620 {
4621 tree t = build_int_2 (value, 0);
4622
4623 TREE_TYPE (t) = type;
4624 return t;
4625 }
4626 }
4627
4628 /* Utility function for the following routine, to see how complex a nesting of
4629 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4630 we don't care (to avoid spending too much time on complex expressions.). */
4631
4632 static int
4633 count_cond (expr, lim)
4634 tree expr;
4635 int lim;
4636 {
4637 int true, false;
4638
4639 if (TREE_CODE (expr) != COND_EXPR)
4640 return 0;
4641 else if (lim <= 0)
4642 return 0;
4643
4644 true = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4645 false = count_cond (TREE_OPERAND (expr, 2), lim - 1 - true);
4646 return MIN (lim, 1 + true + false);
4647 }
4648 \f
4649 /* Perform constant folding and related simplification of EXPR.
4650 The related simplifications include x*1 => x, x*0 => 0, etc.,
4651 and application of the associative law.
4652 NOP_EXPR conversions may be removed freely (as long as we
4653 are careful not to change the C type of the overall expression)
4654 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4655 but we can constant-fold them if they have constant operands. */
4656
4657 tree
4658 fold (expr)
4659 tree expr;
4660 {
4661 register tree t = expr;
4662 tree t1 = NULL_TREE;
4663 tree tem;
4664 tree type = TREE_TYPE (expr);
4665 register tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4666 register enum tree_code code = TREE_CODE (t);
4667 register int kind;
4668 int invert;
4669 /* WINS will be nonzero when the switch is done
4670 if all operands are constant. */
4671 int wins = 1;
4672
4673 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4674 Likewise for a SAVE_EXPR that's already been evaluated. */
4675 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
4676 return t;
4677
4678 /* Return right away if already constant. */
4679 if (TREE_CONSTANT (t))
4680 {
4681 if (code == CONST_DECL)
4682 return DECL_INITIAL (t);
4683 return t;
4684 }
4685
4686 #ifdef MAX_INTEGER_COMPUTATION_MODE
4687 check_max_integer_computation_mode (expr);
4688 #endif
4689
4690 kind = TREE_CODE_CLASS (code);
4691 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4692 {
4693 tree subop;
4694
4695 /* Special case for conversion ops that can have fixed point args. */
4696 arg0 = TREE_OPERAND (t, 0);
4697
4698 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4699 if (arg0 != 0)
4700 STRIP_SIGN_NOPS (arg0);
4701
4702 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4703 subop = TREE_REALPART (arg0);
4704 else
4705 subop = arg0;
4706
4707 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4708 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4709 && TREE_CODE (subop) != REAL_CST
4710 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4711 )
4712 /* Note that TREE_CONSTANT isn't enough:
4713 static var addresses are constant but we can't
4714 do arithmetic on them. */
4715 wins = 0;
4716 }
4717 else if (kind == 'e' || kind == '<'
4718 || kind == '1' || kind == '2' || kind == 'r')
4719 {
4720 register int len = tree_code_length[(int) code];
4721 register int i;
4722 for (i = 0; i < len; i++)
4723 {
4724 tree op = TREE_OPERAND (t, i);
4725 tree subop;
4726
4727 if (op == 0)
4728 continue; /* Valid for CALL_EXPR, at least. */
4729
4730 if (kind == '<' || code == RSHIFT_EXPR)
4731 {
4732 /* Signedness matters here. Perhaps we can refine this
4733 later. */
4734 STRIP_SIGN_NOPS (op);
4735 }
4736 else
4737 /* Strip any conversions that don't change the mode. */
4738 STRIP_NOPS (op);
4739
4740 if (TREE_CODE (op) == COMPLEX_CST)
4741 subop = TREE_REALPART (op);
4742 else
4743 subop = op;
4744
4745 if (TREE_CODE (subop) != INTEGER_CST
4746 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4747 && TREE_CODE (subop) != REAL_CST
4748 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4749 )
4750 /* Note that TREE_CONSTANT isn't enough:
4751 static var addresses are constant but we can't
4752 do arithmetic on them. */
4753 wins = 0;
4754
4755 if (i == 0)
4756 arg0 = op;
4757 else if (i == 1)
4758 arg1 = op;
4759 }
4760 }
4761
4762 /* If this is a commutative operation, and ARG0 is a constant, move it
4763 to ARG1 to reduce the number of tests below. */
4764 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4765 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4766 || code == BIT_AND_EXPR)
4767 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4768 {
4769 tem = arg0; arg0 = arg1; arg1 = tem;
4770
4771 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4772 TREE_OPERAND (t, 1) = tem;
4773 }
4774
4775 /* Now WINS is set as described above,
4776 ARG0 is the first operand of EXPR,
4777 and ARG1 is the second operand (if it has more than one operand).
4778
4779 First check for cases where an arithmetic operation is applied to a
4780 compound, conditional, or comparison operation. Push the arithmetic
4781 operation inside the compound or conditional to see if any folding
4782 can then be done. Convert comparison to conditional for this purpose.
4783 The also optimizes non-constant cases that used to be done in
4784 expand_expr.
4785
4786 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
4787 one of the operands is a comparison and the other is a comparison, a
4788 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4789 code below would make the expression more complex. Change it to a
4790 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4791 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4792
4793 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4794 || code == EQ_EXPR || code == NE_EXPR)
4795 && ((truth_value_p (TREE_CODE (arg0))
4796 && (truth_value_p (TREE_CODE (arg1))
4797 || (TREE_CODE (arg1) == BIT_AND_EXPR
4798 && integer_onep (TREE_OPERAND (arg1, 1)))))
4799 || (truth_value_p (TREE_CODE (arg1))
4800 && (truth_value_p (TREE_CODE (arg0))
4801 || (TREE_CODE (arg0) == BIT_AND_EXPR
4802 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4803 {
4804 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4805 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4806 : TRUTH_XOR_EXPR,
4807 type, arg0, arg1));
4808
4809 if (code == EQ_EXPR)
4810 t = invert_truthvalue (t);
4811
4812 return t;
4813 }
4814
4815 if (TREE_CODE_CLASS (code) == '1')
4816 {
4817 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4818 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4819 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4820 else if (TREE_CODE (arg0) == COND_EXPR)
4821 {
4822 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4823 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4824 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4825
4826 /* If this was a conversion, and all we did was to move into
4827 inside the COND_EXPR, bring it back out. But leave it if
4828 it is a conversion from integer to integer and the
4829 result precision is no wider than a word since such a
4830 conversion is cheap and may be optimized away by combine,
4831 while it couldn't if it were outside the COND_EXPR. Then return
4832 so we don't get into an infinite recursion loop taking the
4833 conversion out and then back in. */
4834
4835 if ((code == NOP_EXPR || code == CONVERT_EXPR
4836 || code == NON_LVALUE_EXPR)
4837 && TREE_CODE (t) == COND_EXPR
4838 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4839 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4840 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4841 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4842 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4843 && (INTEGRAL_TYPE_P
4844 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4845 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4846 t = build1 (code, type,
4847 build (COND_EXPR,
4848 TREE_TYPE (TREE_OPERAND
4849 (TREE_OPERAND (t, 1), 0)),
4850 TREE_OPERAND (t, 0),
4851 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4852 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4853 return t;
4854 }
4855 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4856 return fold (build (COND_EXPR, type, arg0,
4857 fold (build1 (code, type, integer_one_node)),
4858 fold (build1 (code, type, integer_zero_node))));
4859 }
4860 else if (TREE_CODE_CLASS (code) == '2'
4861 || TREE_CODE_CLASS (code) == '<')
4862 {
4863 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4864 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4865 fold (build (code, type,
4866 arg0, TREE_OPERAND (arg1, 1))));
4867 else if ((TREE_CODE (arg1) == COND_EXPR
4868 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4869 && TREE_CODE_CLASS (code) != '<'))
4870 && (TREE_CODE (arg0) != COND_EXPR
4871 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4872 && (! TREE_SIDE_EFFECTS (arg0)
4873 || (global_bindings_p () == 0
4874 && ! contains_placeholder_p (arg0))))
4875 {
4876 tree test, true_value, false_value;
4877 tree lhs = 0, rhs = 0;
4878
4879 if (TREE_CODE (arg1) == COND_EXPR)
4880 {
4881 test = TREE_OPERAND (arg1, 0);
4882 true_value = TREE_OPERAND (arg1, 1);
4883 false_value = TREE_OPERAND (arg1, 2);
4884 }
4885 else
4886 {
4887 tree testtype = TREE_TYPE (arg1);
4888 test = arg1;
4889 true_value = convert (testtype, integer_one_node);
4890 false_value = convert (testtype, integer_zero_node);
4891 }
4892
4893 /* If ARG0 is complex we want to make sure we only evaluate
4894 it once. Though this is only required if it is volatile, it
4895 might be more efficient even if it is not. However, if we
4896 succeed in folding one part to a constant, we do not need
4897 to make this SAVE_EXPR. Since we do this optimization
4898 primarily to see if we do end up with constant and this
4899 SAVE_EXPR interferes with later optimizations, suppressing
4900 it when we can is important.
4901
4902 If we are not in a function, we can't make a SAVE_EXPR, so don't
4903 try to do so. Don't try to see if the result is a constant
4904 if an arm is a COND_EXPR since we get exponential behavior
4905 in that case. */
4906
4907 if (TREE_CODE (arg0) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4908 && global_bindings_p () == 0
4909 && ((TREE_CODE (arg0) != VAR_DECL
4910 && TREE_CODE (arg0) != PARM_DECL)
4911 || TREE_SIDE_EFFECTS (arg0)))
4912 {
4913 if (TREE_CODE (true_value) != COND_EXPR)
4914 lhs = fold (build (code, type, arg0, true_value));
4915
4916 if (TREE_CODE (false_value) != COND_EXPR)
4917 rhs = fold (build (code, type, arg0, false_value));
4918
4919 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4920 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4921 arg0 = save_expr (arg0), lhs = rhs = 0;
4922 }
4923
4924 if (lhs == 0)
4925 lhs = fold (build (code, type, arg0, true_value));
4926 if (rhs == 0)
4927 rhs = fold (build (code, type, arg0, false_value));
4928
4929 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4930
4931 if (TREE_CODE (arg0) == SAVE_EXPR)
4932 return build (COMPOUND_EXPR, type,
4933 convert (void_type_node, arg0),
4934 strip_compound_expr (test, arg0));
4935 else
4936 return convert (type, test);
4937 }
4938
4939 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4940 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4941 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4942 else if ((TREE_CODE (arg0) == COND_EXPR
4943 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4944 && TREE_CODE_CLASS (code) != '<'))
4945 && (TREE_CODE (arg1) != COND_EXPR
4946 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4947 && (! TREE_SIDE_EFFECTS (arg1)
4948 || (global_bindings_p () == 0
4949 && ! contains_placeholder_p (arg1))))
4950 {
4951 tree test, true_value, false_value;
4952 tree lhs = 0, rhs = 0;
4953
4954 if (TREE_CODE (arg0) == COND_EXPR)
4955 {
4956 test = TREE_OPERAND (arg0, 0);
4957 true_value = TREE_OPERAND (arg0, 1);
4958 false_value = TREE_OPERAND (arg0, 2);
4959 }
4960 else
4961 {
4962 tree testtype = TREE_TYPE (arg0);
4963 test = arg0;
4964 true_value = convert (testtype, integer_one_node);
4965 false_value = convert (testtype, integer_zero_node);
4966 }
4967
4968 if (TREE_CODE (arg1) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4969 && global_bindings_p () == 0
4970 && ((TREE_CODE (arg1) != VAR_DECL
4971 && TREE_CODE (arg1) != PARM_DECL)
4972 || TREE_SIDE_EFFECTS (arg1)))
4973 {
4974 if (TREE_CODE (true_value) != COND_EXPR)
4975 lhs = fold (build (code, type, true_value, arg1));
4976
4977 if (TREE_CODE (false_value) != COND_EXPR)
4978 rhs = fold (build (code, type, false_value, arg1));
4979
4980 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4981 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4982 arg1 = save_expr (arg1), lhs = rhs = 0;
4983 }
4984
4985 if (lhs == 0)
4986 lhs = fold (build (code, type, true_value, arg1));
4987
4988 if (rhs == 0)
4989 rhs = fold (build (code, type, false_value, arg1));
4990
4991 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4992 if (TREE_CODE (arg1) == SAVE_EXPR)
4993 return build (COMPOUND_EXPR, type,
4994 convert (void_type_node, arg1),
4995 strip_compound_expr (test, arg1));
4996 else
4997 return convert (type, test);
4998 }
4999 }
5000 else if (TREE_CODE_CLASS (code) == '<'
5001 && TREE_CODE (arg0) == COMPOUND_EXPR)
5002 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5003 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5004 else if (TREE_CODE_CLASS (code) == '<'
5005 && TREE_CODE (arg1) == COMPOUND_EXPR)
5006 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5007 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5008
5009 switch (code)
5010 {
5011 case INTEGER_CST:
5012 case REAL_CST:
5013 case STRING_CST:
5014 case COMPLEX_CST:
5015 case CONSTRUCTOR:
5016 return t;
5017
5018 case CONST_DECL:
5019 return fold (DECL_INITIAL (t));
5020
5021 case NOP_EXPR:
5022 case FLOAT_EXPR:
5023 case CONVERT_EXPR:
5024 case FIX_TRUNC_EXPR:
5025 /* Other kinds of FIX are not handled properly by fold_convert. */
5026
5027 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5028 return TREE_OPERAND (t, 0);
5029
5030 /* Handle cases of two conversions in a row. */
5031 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5032 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5033 {
5034 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5035 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5036 tree final_type = TREE_TYPE (t);
5037 int inside_int = INTEGRAL_TYPE_P (inside_type);
5038 int inside_ptr = POINTER_TYPE_P (inside_type);
5039 int inside_float = FLOAT_TYPE_P (inside_type);
5040 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5041 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5042 int inter_int = INTEGRAL_TYPE_P (inter_type);
5043 int inter_ptr = POINTER_TYPE_P (inter_type);
5044 int inter_float = FLOAT_TYPE_P (inter_type);
5045 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5046 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5047 int final_int = INTEGRAL_TYPE_P (final_type);
5048 int final_ptr = POINTER_TYPE_P (final_type);
5049 int final_float = FLOAT_TYPE_P (final_type);
5050 unsigned int final_prec = TYPE_PRECISION (final_type);
5051 int final_unsignedp = TREE_UNSIGNED (final_type);
5052
5053 /* In addition to the cases of two conversions in a row
5054 handled below, if we are converting something to its own
5055 type via an object of identical or wider precision, neither
5056 conversion is needed. */
5057 if (inside_type == final_type
5058 && ((inter_int && final_int) || (inter_float && final_float))
5059 && inter_prec >= final_prec)
5060 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
5061
5062 /* Likewise, if the intermediate and final types are either both
5063 float or both integer, we don't need the middle conversion if
5064 it is wider than the final type and doesn't change the signedness
5065 (for integers). Avoid this if the final type is a pointer
5066 since then we sometimes need the inner conversion. Likewise if
5067 the outer has a precision not equal to the size of its mode. */
5068 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5069 || (inter_float && inside_float))
5070 && inter_prec >= inside_prec
5071 && (inter_float || inter_unsignedp == inside_unsignedp)
5072 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5073 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5074 && ! final_ptr)
5075 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5076
5077 /* If we have a sign-extension of a zero-extended value, we can
5078 replace that by a single zero-extension. */
5079 if (inside_int && inter_int && final_int
5080 && inside_prec < inter_prec && inter_prec < final_prec
5081 && inside_unsignedp && !inter_unsignedp)
5082 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5083
5084 /* Two conversions in a row are not needed unless:
5085 - some conversion is floating-point (overstrict for now), or
5086 - the intermediate type is narrower than both initial and
5087 final, or
5088 - the intermediate type and innermost type differ in signedness,
5089 and the outermost type is wider than the intermediate, or
5090 - the initial type is a pointer type and the precisions of the
5091 intermediate and final types differ, or
5092 - the final type is a pointer type and the precisions of the
5093 initial and intermediate types differ. */
5094 if (! inside_float && ! inter_float && ! final_float
5095 && (inter_prec > inside_prec || inter_prec > final_prec)
5096 && ! (inside_int && inter_int
5097 && inter_unsignedp != inside_unsignedp
5098 && inter_prec < final_prec)
5099 && ((inter_unsignedp && inter_prec > inside_prec)
5100 == (final_unsignedp && final_prec > inter_prec))
5101 && ! (inside_ptr && inter_prec != final_prec)
5102 && ! (final_ptr && inside_prec != inter_prec)
5103 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5104 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5105 && ! final_ptr)
5106 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5107 }
5108
5109 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5110 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5111 /* Detect assigning a bitfield. */
5112 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5113 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5114 {
5115 /* Don't leave an assignment inside a conversion
5116 unless assigning a bitfield. */
5117 tree prev = TREE_OPERAND (t, 0);
5118 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5119 /* First do the assignment, then return converted constant. */
5120 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5121 TREE_USED (t) = 1;
5122 return t;
5123 }
5124 if (!wins)
5125 {
5126 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5127 return t;
5128 }
5129 return fold_convert (t, arg0);
5130
5131 #if 0 /* This loses on &"foo"[0]. */
5132 case ARRAY_REF:
5133 {
5134 int i;
5135
5136 /* Fold an expression like: "foo"[2] */
5137 if (TREE_CODE (arg0) == STRING_CST
5138 && TREE_CODE (arg1) == INTEGER_CST
5139 && compare_tree_int (arg1, TREE_STRING_LENGTH (arg0)) < 0)
5140 {
5141 t = build_int_2 (TREE_STRING_POINTER (arg0)[TREE_INT_CST_LOW (arg))], 0);
5142 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
5143 force_fit_type (t, 0);
5144 }
5145 }
5146 return t;
5147 #endif /* 0 */
5148
5149 case COMPONENT_REF:
5150 if (TREE_CODE (arg0) == CONSTRUCTOR)
5151 {
5152 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5153 if (m)
5154 t = TREE_VALUE (m);
5155 }
5156 return t;
5157
5158 case RANGE_EXPR:
5159 TREE_CONSTANT (t) = wins;
5160 return t;
5161
5162 case NEGATE_EXPR:
5163 if (wins)
5164 {
5165 if (TREE_CODE (arg0) == INTEGER_CST)
5166 {
5167 HOST_WIDE_INT low, high;
5168 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5169 TREE_INT_CST_HIGH (arg0),
5170 &low, &high);
5171 t = build_int_2 (low, high);
5172 TREE_TYPE (t) = type;
5173 TREE_OVERFLOW (t)
5174 = (TREE_OVERFLOW (arg0)
5175 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5176 TREE_CONSTANT_OVERFLOW (t)
5177 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5178 }
5179 else if (TREE_CODE (arg0) == REAL_CST)
5180 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5181 }
5182 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5183 return TREE_OPERAND (arg0, 0);
5184
5185 /* Convert - (a - b) to (b - a) for non-floating-point. */
5186 else if (TREE_CODE (arg0) == MINUS_EXPR
5187 && (! FLOAT_TYPE_P (type) || flag_fast_math))
5188 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5189 TREE_OPERAND (arg0, 0));
5190
5191 return t;
5192
5193 case ABS_EXPR:
5194 if (wins)
5195 {
5196 if (TREE_CODE (arg0) == INTEGER_CST)
5197 {
5198 if (! TREE_UNSIGNED (type)
5199 && TREE_INT_CST_HIGH (arg0) < 0)
5200 {
5201 HOST_WIDE_INT low, high;
5202 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5203 TREE_INT_CST_HIGH (arg0),
5204 &low, &high);
5205 t = build_int_2 (low, high);
5206 TREE_TYPE (t) = type;
5207 TREE_OVERFLOW (t)
5208 = (TREE_OVERFLOW (arg0)
5209 | force_fit_type (t, overflow));
5210 TREE_CONSTANT_OVERFLOW (t)
5211 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5212 }
5213 }
5214 else if (TREE_CODE (arg0) == REAL_CST)
5215 {
5216 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5217 t = build_real (type,
5218 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5219 }
5220 }
5221 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5222 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5223 return t;
5224
5225 case CONJ_EXPR:
5226 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5227 return convert (type, arg0);
5228 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5229 return build (COMPLEX_EXPR, type,
5230 TREE_OPERAND (arg0, 0),
5231 negate_expr (TREE_OPERAND (arg0, 1)));
5232 else if (TREE_CODE (arg0) == COMPLEX_CST)
5233 return build_complex (type, TREE_OPERAND (arg0, 0),
5234 negate_expr (TREE_OPERAND (arg0, 1)));
5235 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5236 return fold (build (TREE_CODE (arg0), type,
5237 fold (build1 (CONJ_EXPR, type,
5238 TREE_OPERAND (arg0, 0))),
5239 fold (build1 (CONJ_EXPR,
5240 type, TREE_OPERAND (arg0, 1)))));
5241 else if (TREE_CODE (arg0) == CONJ_EXPR)
5242 return TREE_OPERAND (arg0, 0);
5243 return t;
5244
5245 case BIT_NOT_EXPR:
5246 if (wins)
5247 {
5248 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5249 ~ TREE_INT_CST_HIGH (arg0));
5250 TREE_TYPE (t) = type;
5251 force_fit_type (t, 0);
5252 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5253 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5254 }
5255 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5256 return TREE_OPERAND (arg0, 0);
5257 return t;
5258
5259 case PLUS_EXPR:
5260 /* A + (-B) -> A - B */
5261 if (TREE_CODE (arg1) == NEGATE_EXPR)
5262 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5263 /* (-A) + B -> B - A */
5264 if (TREE_CODE (arg0) == NEGATE_EXPR)
5265 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5266 else if (! FLOAT_TYPE_P (type))
5267 {
5268 if (integer_zerop (arg1))
5269 return non_lvalue (convert (type, arg0));
5270
5271 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5272 with a constant, and the two constants have no bits in common,
5273 we should treat this as a BIT_IOR_EXPR since this may produce more
5274 simplifications. */
5275 if (TREE_CODE (arg0) == BIT_AND_EXPR
5276 && TREE_CODE (arg1) == BIT_AND_EXPR
5277 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5278 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5279 && integer_zerop (const_binop (BIT_AND_EXPR,
5280 TREE_OPERAND (arg0, 1),
5281 TREE_OPERAND (arg1, 1), 0)))
5282 {
5283 code = BIT_IOR_EXPR;
5284 goto bit_ior;
5285 }
5286
5287 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5288 (plus (plus (mult) (mult)) (foo)) so that we can
5289 take advantage of the factoring cases below. */
5290 if ((TREE_CODE (arg0) == PLUS_EXPR
5291 && TREE_CODE (arg1) == MULT_EXPR)
5292 || (TREE_CODE (arg1) == PLUS_EXPR
5293 && TREE_CODE (arg0) == MULT_EXPR))
5294 {
5295 tree parg0, parg1, parg, marg;
5296
5297 if (TREE_CODE (arg0) == PLUS_EXPR)
5298 parg = arg0, marg = arg1;
5299 else
5300 parg = arg1, marg = arg0;
5301 parg0 = TREE_OPERAND (parg, 0);
5302 parg1 = TREE_OPERAND (parg, 1);
5303 STRIP_NOPS (parg0);
5304 STRIP_NOPS (parg1);
5305
5306 if (TREE_CODE (parg0) == MULT_EXPR
5307 && TREE_CODE (parg1) != MULT_EXPR)
5308 return fold (build (PLUS_EXPR, type,
5309 fold (build (PLUS_EXPR, type, parg0, marg)),
5310 parg1));
5311 if (TREE_CODE (parg0) != MULT_EXPR
5312 && TREE_CODE (parg1) == MULT_EXPR)
5313 return fold (build (PLUS_EXPR, type,
5314 fold (build (PLUS_EXPR, type, parg1, marg)),
5315 parg0));
5316 }
5317
5318 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5319 {
5320 tree arg00, arg01, arg10, arg11;
5321 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5322
5323 /* (A * C) + (B * C) -> (A+B) * C.
5324 We are most concerned about the case where C is a constant,
5325 but other combinations show up during loop reduction. Since
5326 it is not difficult, try all four possibilities. */
5327
5328 arg00 = TREE_OPERAND (arg0, 0);
5329 arg01 = TREE_OPERAND (arg0, 1);
5330 arg10 = TREE_OPERAND (arg1, 0);
5331 arg11 = TREE_OPERAND (arg1, 1);
5332 same = NULL_TREE;
5333
5334 if (operand_equal_p (arg01, arg11, 0))
5335 same = arg01, alt0 = arg00, alt1 = arg10;
5336 else if (operand_equal_p (arg00, arg10, 0))
5337 same = arg00, alt0 = arg01, alt1 = arg11;
5338 else if (operand_equal_p (arg00, arg11, 0))
5339 same = arg00, alt0 = arg01, alt1 = arg10;
5340 else if (operand_equal_p (arg01, arg10, 0))
5341 same = arg01, alt0 = arg00, alt1 = arg11;
5342
5343 /* No identical multiplicands; see if we can find a common
5344 power-of-two factor in non-power-of-two multiplies. This
5345 can help in multi-dimensional array access. */
5346 else if (TREE_CODE (arg01) == INTEGER_CST
5347 && TREE_CODE (arg11) == INTEGER_CST
5348 && TREE_INT_CST_HIGH (arg01) == 0
5349 && TREE_INT_CST_HIGH (arg11) == 0)
5350 {
5351 HOST_WIDE_INT int01, int11, tmp;
5352 int01 = TREE_INT_CST_LOW (arg01);
5353 int11 = TREE_INT_CST_LOW (arg11);
5354
5355 /* Move min of absolute values to int11. */
5356 if ((int01 >= 0 ? int01 : -int01)
5357 < (int11 >= 0 ? int11 : -int11))
5358 {
5359 tmp = int01, int01 = int11, int11 = tmp;
5360 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5361 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5362 }
5363
5364 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5365 {
5366 alt0 = fold (build (MULT_EXPR, type, arg00,
5367 build_int_2 (int01 / int11, 0)));
5368 alt1 = arg10;
5369 same = arg11;
5370 }
5371 }
5372
5373 if (same)
5374 return fold (build (MULT_EXPR, type,
5375 fold (build (PLUS_EXPR, type, alt0, alt1)),
5376 same));
5377 }
5378 }
5379 /* In IEEE floating point, x+0 may not equal x. */
5380 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5381 || flag_fast_math)
5382 && real_zerop (arg1))
5383 return non_lvalue (convert (type, arg0));
5384 /* x+(-0) equals x, even for IEEE. */
5385 else if (TREE_CODE (arg1) == REAL_CST
5386 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5387 return non_lvalue (convert (type, arg0));
5388
5389 bit_rotate:
5390 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5391 is a rotate of A by C1 bits. */
5392 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5393 is a rotate of A by B bits. */
5394 {
5395 register enum tree_code code0, code1;
5396 code0 = TREE_CODE (arg0);
5397 code1 = TREE_CODE (arg1);
5398 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5399 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5400 && operand_equal_p (TREE_OPERAND (arg0, 0),
5401 TREE_OPERAND (arg1,0), 0)
5402 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5403 {
5404 register tree tree01, tree11;
5405 register enum tree_code code01, code11;
5406
5407 tree01 = TREE_OPERAND (arg0, 1);
5408 tree11 = TREE_OPERAND (arg1, 1);
5409 STRIP_NOPS (tree01);
5410 STRIP_NOPS (tree11);
5411 code01 = TREE_CODE (tree01);
5412 code11 = TREE_CODE (tree11);
5413 if (code01 == INTEGER_CST
5414 && code11 == INTEGER_CST
5415 && TREE_INT_CST_HIGH (tree01) == 0
5416 && TREE_INT_CST_HIGH (tree11) == 0
5417 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5418 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5419 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5420 code0 == LSHIFT_EXPR ? tree01 : tree11);
5421 else if (code11 == MINUS_EXPR)
5422 {
5423 tree tree110, tree111;
5424 tree110 = TREE_OPERAND (tree11, 0);
5425 tree111 = TREE_OPERAND (tree11, 1);
5426 STRIP_NOPS (tree110);
5427 STRIP_NOPS (tree111);
5428 if (TREE_CODE (tree110) == INTEGER_CST
5429 && 0 == compare_tree_int (tree110,
5430 TYPE_PRECISION
5431 (TREE_TYPE (TREE_OPERAND
5432 (arg0, 0))))
5433 && operand_equal_p (tree01, tree111, 0))
5434 return build ((code0 == LSHIFT_EXPR
5435 ? LROTATE_EXPR
5436 : RROTATE_EXPR),
5437 type, TREE_OPERAND (arg0, 0), tree01);
5438 }
5439 else if (code01 == MINUS_EXPR)
5440 {
5441 tree tree010, tree011;
5442 tree010 = TREE_OPERAND (tree01, 0);
5443 tree011 = TREE_OPERAND (tree01, 1);
5444 STRIP_NOPS (tree010);
5445 STRIP_NOPS (tree011);
5446 if (TREE_CODE (tree010) == INTEGER_CST
5447 && 0 == compare_tree_int (tree010,
5448 TYPE_PRECISION
5449 (TREE_TYPE (TREE_OPERAND
5450 (arg0, 0))))
5451 && operand_equal_p (tree11, tree011, 0))
5452 return build ((code0 != LSHIFT_EXPR
5453 ? LROTATE_EXPR
5454 : RROTATE_EXPR),
5455 type, TREE_OPERAND (arg0, 0), tree11);
5456 }
5457 }
5458 }
5459
5460
5461 associate:
5462 /* In most languages, can't associate operations on floats through
5463 parentheses. Rather than remember where the parentheses were, we
5464 don't associate floats at all. It shouldn't matter much. However,
5465 associating multiplications is only very slightly inaccurate, so do
5466 that if -ffast-math is specified. */
5467
5468 if (! wins
5469 && (! FLOAT_TYPE_P (type)
5470 || (flag_fast_math && code != MULT_EXPR)))
5471 {
5472 tree var0, con0, lit0, var1, con1, lit1;
5473
5474 /* Split both trees into variables, constants, and literals. Then
5475 associate each group together, the constants with literals,
5476 then the result with variables. This increases the chances of
5477 literals being recombined later and of generating relocatable
5478 expressions for the sum of a constant and literal. */
5479 var0 = split_tree (arg0, code, &con0, &lit0, 0);
5480 var1 = split_tree (arg1, code, &con1, &lit1, code == MINUS_EXPR);
5481
5482 /* Only do something if we found more than two objects. Otherwise,
5483 nothing has changed and we risk infinite recursion. */
5484 if (2 < ((var0 != 0) + (var1 != 0) + (con0 != 0) + (con1 != 0)
5485 + (lit0 != 0) + (lit1 != 0)))
5486 {
5487 var0 = associate_trees (var0, var1, code, type);
5488 con0 = associate_trees (con0, con1, code, type);
5489 lit0 = associate_trees (lit0, lit1, code, type);
5490 con0 = associate_trees (con0, lit0, code, type);
5491 return convert (type, associate_trees (var0, con0, code, type));
5492 }
5493 }
5494
5495 binary:
5496 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
5497 if (TREE_CODE (arg1) == REAL_CST)
5498 return t;
5499 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
5500 if (wins)
5501 t1 = const_binop (code, arg0, arg1, 0);
5502 if (t1 != NULL_TREE)
5503 {
5504 /* The return value should always have
5505 the same type as the original expression. */
5506 if (TREE_TYPE (t1) != TREE_TYPE (t))
5507 t1 = convert (TREE_TYPE (t), t1);
5508
5509 return t1;
5510 }
5511 return t;
5512
5513 case MINUS_EXPR:
5514 /* A - (-B) -> A + B */
5515 if (TREE_CODE (arg1) == NEGATE_EXPR)
5516 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5517 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5518 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5519 return
5520 fold (build (MINUS_EXPR, type,
5521 build_real (TREE_TYPE (arg1),
5522 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5523 TREE_OPERAND (arg0, 0)));
5524
5525 if (! FLOAT_TYPE_P (type))
5526 {
5527 if (! wins && integer_zerop (arg0))
5528 return convert (type, negate_expr (arg1));
5529 if (integer_zerop (arg1))
5530 return non_lvalue (convert (type, arg0));
5531
5532 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5533 about the case where C is a constant, just try one of the
5534 four possibilities. */
5535
5536 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5537 && operand_equal_p (TREE_OPERAND (arg0, 1),
5538 TREE_OPERAND (arg1, 1), 0))
5539 return fold (build (MULT_EXPR, type,
5540 fold (build (MINUS_EXPR, type,
5541 TREE_OPERAND (arg0, 0),
5542 TREE_OPERAND (arg1, 0))),
5543 TREE_OPERAND (arg0, 1)));
5544 }
5545
5546 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5547 || flag_fast_math)
5548 {
5549 /* Except with IEEE floating point, 0-x equals -x. */
5550 if (! wins && real_zerop (arg0))
5551 return convert (type, negate_expr (arg1));
5552 /* Except with IEEE floating point, x-0 equals x. */
5553 if (real_zerop (arg1))
5554 return non_lvalue (convert (type, arg0));
5555 }
5556
5557 /* Fold &x - &x. This can happen from &x.foo - &x.
5558 This is unsafe for certain floats even in non-IEEE formats.
5559 In IEEE, it is unsafe because it does wrong for NaNs.
5560 Also note that operand_equal_p is always false if an operand
5561 is volatile. */
5562
5563 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
5564 && operand_equal_p (arg0, arg1, 0))
5565 return convert (type, integer_zero_node);
5566
5567 goto associate;
5568
5569 case MULT_EXPR:
5570 /* (-A) * (-B) -> A * B */
5571 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5572 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5573 TREE_OPERAND (arg1, 0)));
5574
5575 if (! FLOAT_TYPE_P (type))
5576 {
5577 if (integer_zerop (arg1))
5578 return omit_one_operand (type, arg1, arg0);
5579 if (integer_onep (arg1))
5580 return non_lvalue (convert (type, arg0));
5581
5582 /* (a * (1 << b)) is (a << b) */
5583 if (TREE_CODE (arg1) == LSHIFT_EXPR
5584 && integer_onep (TREE_OPERAND (arg1, 0)))
5585 return fold (build (LSHIFT_EXPR, type, arg0,
5586 TREE_OPERAND (arg1, 1)));
5587 if (TREE_CODE (arg0) == LSHIFT_EXPR
5588 && integer_onep (TREE_OPERAND (arg0, 0)))
5589 return fold (build (LSHIFT_EXPR, type, arg1,
5590 TREE_OPERAND (arg0, 1)));
5591
5592 if (TREE_CODE (arg1) == INTEGER_CST
5593 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5594 code, NULL_TREE)))
5595 return convert (type, tem);
5596
5597 }
5598 else
5599 {
5600 /* x*0 is 0, except for IEEE floating point. */
5601 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5602 || flag_fast_math)
5603 && real_zerop (arg1))
5604 return omit_one_operand (type, arg1, arg0);
5605 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5606 However, ANSI says we can drop signals,
5607 so we can do this anyway. */
5608 if (real_onep (arg1))
5609 return non_lvalue (convert (type, arg0));
5610 /* x*2 is x+x */
5611 if (! wins && real_twop (arg1) && global_bindings_p () == 0
5612 && ! contains_placeholder_p (arg0))
5613 {
5614 tree arg = save_expr (arg0);
5615 return build (PLUS_EXPR, type, arg, arg);
5616 }
5617 }
5618 goto associate;
5619
5620 case BIT_IOR_EXPR:
5621 bit_ior:
5622 if (integer_all_onesp (arg1))
5623 return omit_one_operand (type, arg1, arg0);
5624 if (integer_zerop (arg1))
5625 return non_lvalue (convert (type, arg0));
5626 t1 = distribute_bit_expr (code, type, arg0, arg1);
5627 if (t1 != NULL_TREE)
5628 return t1;
5629
5630 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5631
5632 This results in more efficient code for machines without a NAND
5633 instruction. Combine will canonicalize to the first form
5634 which will allow use of NAND instructions provided by the
5635 backend if they exist. */
5636 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5637 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5638 {
5639 return fold (build1 (BIT_NOT_EXPR, type,
5640 build (BIT_AND_EXPR, type,
5641 TREE_OPERAND (arg0, 0),
5642 TREE_OPERAND (arg1, 0))));
5643 }
5644
5645 /* See if this can be simplified into a rotate first. If that
5646 is unsuccessful continue in the association code. */
5647 goto bit_rotate;
5648
5649 case BIT_XOR_EXPR:
5650 if (integer_zerop (arg1))
5651 return non_lvalue (convert (type, arg0));
5652 if (integer_all_onesp (arg1))
5653 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5654
5655 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5656 with a constant, and the two constants have no bits in common,
5657 we should treat this as a BIT_IOR_EXPR since this may produce more
5658 simplifications. */
5659 if (TREE_CODE (arg0) == BIT_AND_EXPR
5660 && TREE_CODE (arg1) == BIT_AND_EXPR
5661 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5662 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5663 && integer_zerop (const_binop (BIT_AND_EXPR,
5664 TREE_OPERAND (arg0, 1),
5665 TREE_OPERAND (arg1, 1), 0)))
5666 {
5667 code = BIT_IOR_EXPR;
5668 goto bit_ior;
5669 }
5670
5671 /* See if this can be simplified into a rotate first. If that
5672 is unsuccessful continue in the association code. */
5673 goto bit_rotate;
5674
5675 case BIT_AND_EXPR:
5676 bit_and:
5677 if (integer_all_onesp (arg1))
5678 return non_lvalue (convert (type, arg0));
5679 if (integer_zerop (arg1))
5680 return omit_one_operand (type, arg1, arg0);
5681 t1 = distribute_bit_expr (code, type, arg0, arg1);
5682 if (t1 != NULL_TREE)
5683 return t1;
5684 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5685 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
5686 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5687 {
5688 unsigned int prec
5689 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
5690
5691 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5692 && (~TREE_INT_CST_LOW (arg0)
5693 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5694 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
5695 }
5696 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5697 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5698 {
5699 unsigned int prec
5700 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5701
5702 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5703 && (~TREE_INT_CST_LOW (arg1)
5704 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5705 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5706 }
5707
5708 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5709
5710 This results in more efficient code for machines without a NOR
5711 instruction. Combine will canonicalize to the first form
5712 which will allow use of NOR instructions provided by the
5713 backend if they exist. */
5714 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5715 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5716 {
5717 return fold (build1 (BIT_NOT_EXPR, type,
5718 build (BIT_IOR_EXPR, type,
5719 TREE_OPERAND (arg0, 0),
5720 TREE_OPERAND (arg1, 0))));
5721 }
5722
5723 goto associate;
5724
5725 case BIT_ANDTC_EXPR:
5726 if (integer_all_onesp (arg0))
5727 return non_lvalue (convert (type, arg1));
5728 if (integer_zerop (arg0))
5729 return omit_one_operand (type, arg0, arg1);
5730 if (TREE_CODE (arg1) == INTEGER_CST)
5731 {
5732 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5733 code = BIT_AND_EXPR;
5734 goto bit_and;
5735 }
5736 goto binary;
5737
5738 case RDIV_EXPR:
5739 /* In most cases, do nothing with a divide by zero. */
5740 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
5741 #ifndef REAL_INFINITY
5742 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
5743 return t;
5744 #endif
5745 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
5746
5747 /* (-A) / (-B) -> A / B */
5748 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5749 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5750 TREE_OPERAND (arg1, 0)));
5751
5752 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5753 However, ANSI says we can drop signals, so we can do this anyway. */
5754 if (real_onep (arg1))
5755 return non_lvalue (convert (type, arg0));
5756
5757 /* If ARG1 is a constant, we can convert this to a multiply by the
5758 reciprocal. This does not have the same rounding properties,
5759 so only do this if -ffast-math. We can actually always safely
5760 do it if ARG1 is a power of two, but it's hard to tell if it is
5761 or not in a portable manner. */
5762 if (TREE_CODE (arg1) == REAL_CST)
5763 {
5764 if (flag_fast_math
5765 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5766 arg1, 0)))
5767 return fold (build (MULT_EXPR, type, arg0, tem));
5768 /* Find the reciprocal if optimizing and the result is exact. */
5769 else if (optimize)
5770 {
5771 REAL_VALUE_TYPE r;
5772 r = TREE_REAL_CST (arg1);
5773 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5774 {
5775 tem = build_real (type, r);
5776 return fold (build (MULT_EXPR, type, arg0, tem));
5777 }
5778 }
5779 }
5780 goto binary;
5781
5782 case TRUNC_DIV_EXPR:
5783 case ROUND_DIV_EXPR:
5784 case FLOOR_DIV_EXPR:
5785 case CEIL_DIV_EXPR:
5786 case EXACT_DIV_EXPR:
5787 if (integer_onep (arg1))
5788 return non_lvalue (convert (type, arg0));
5789 if (integer_zerop (arg1))
5790 return t;
5791
5792 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5793 operation, EXACT_DIV_EXPR.
5794
5795 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5796 At one time others generated faster code, it's not clear if they do
5797 after the last round to changes to the DIV code in expmed.c. */
5798 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5799 && multiple_of_p (type, arg0, arg1))
5800 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5801
5802 if (TREE_CODE (arg1) == INTEGER_CST
5803 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5804 code, NULL_TREE)))
5805 return convert (type, tem);
5806
5807 goto binary;
5808
5809 case CEIL_MOD_EXPR:
5810 case FLOOR_MOD_EXPR:
5811 case ROUND_MOD_EXPR:
5812 case TRUNC_MOD_EXPR:
5813 if (integer_onep (arg1))
5814 return omit_one_operand (type, integer_zero_node, arg0);
5815 if (integer_zerop (arg1))
5816 return t;
5817
5818 if (TREE_CODE (arg1) == INTEGER_CST
5819 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5820 code, NULL_TREE)))
5821 return convert (type, tem);
5822
5823 goto binary;
5824
5825 case LSHIFT_EXPR:
5826 case RSHIFT_EXPR:
5827 case LROTATE_EXPR:
5828 case RROTATE_EXPR:
5829 if (integer_zerop (arg1))
5830 return non_lvalue (convert (type, arg0));
5831 /* Since negative shift count is not well-defined,
5832 don't try to compute it in the compiler. */
5833 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5834 return t;
5835 /* Rewrite an LROTATE_EXPR by a constant into an
5836 RROTATE_EXPR by a new constant. */
5837 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5838 {
5839 TREE_SET_CODE (t, RROTATE_EXPR);
5840 code = RROTATE_EXPR;
5841 TREE_OPERAND (t, 1) = arg1
5842 = const_binop
5843 (MINUS_EXPR,
5844 convert (TREE_TYPE (arg1),
5845 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5846 arg1, 0);
5847 if (tree_int_cst_sgn (arg1) < 0)
5848 return t;
5849 }
5850
5851 /* If we have a rotate of a bit operation with the rotate count and
5852 the second operand of the bit operation both constant,
5853 permute the two operations. */
5854 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5855 && (TREE_CODE (arg0) == BIT_AND_EXPR
5856 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5857 || TREE_CODE (arg0) == BIT_IOR_EXPR
5858 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5859 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5860 return fold (build (TREE_CODE (arg0), type,
5861 fold (build (code, type,
5862 TREE_OPERAND (arg0, 0), arg1)),
5863 fold (build (code, type,
5864 TREE_OPERAND (arg0, 1), arg1))));
5865
5866 /* Two consecutive rotates adding up to the width of the mode can
5867 be ignored. */
5868 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5869 && TREE_CODE (arg0) == RROTATE_EXPR
5870 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5871 && TREE_INT_CST_HIGH (arg1) == 0
5872 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5873 && ((TREE_INT_CST_LOW (arg1)
5874 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5875 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5876 return TREE_OPERAND (arg0, 0);
5877
5878 goto binary;
5879
5880 case MIN_EXPR:
5881 if (operand_equal_p (arg0, arg1, 0))
5882 return omit_one_operand (type, arg0, arg1);
5883 if (INTEGRAL_TYPE_P (type)
5884 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5885 return omit_one_operand (type, arg1, arg0);
5886 goto associate;
5887
5888 case MAX_EXPR:
5889 if (operand_equal_p (arg0, arg1, 0))
5890 return omit_one_operand (type, arg0, arg1);
5891 if (INTEGRAL_TYPE_P (type)
5892 && TYPE_MAX_VALUE (type)
5893 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5894 return omit_one_operand (type, arg1, arg0);
5895 goto associate;
5896
5897 case TRUTH_NOT_EXPR:
5898 /* Note that the operand of this must be an int
5899 and its values must be 0 or 1.
5900 ("true" is a fixed value perhaps depending on the language,
5901 but we don't handle values other than 1 correctly yet.) */
5902 tem = invert_truthvalue (arg0);
5903 /* Avoid infinite recursion. */
5904 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5905 return t;
5906 return convert (type, tem);
5907
5908 case TRUTH_ANDIF_EXPR:
5909 /* Note that the operands of this must be ints
5910 and their values must be 0 or 1.
5911 ("true" is a fixed value perhaps depending on the language.) */
5912 /* If first arg is constant zero, return it. */
5913 if (integer_zerop (arg0))
5914 return convert (type, arg0);
5915 case TRUTH_AND_EXPR:
5916 /* If either arg is constant true, drop it. */
5917 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5918 return non_lvalue (convert (type, arg1));
5919 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5920 return non_lvalue (convert (type, arg0));
5921 /* If second arg is constant zero, result is zero, but first arg
5922 must be evaluated. */
5923 if (integer_zerop (arg1))
5924 return omit_one_operand (type, arg1, arg0);
5925 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5926 case will be handled here. */
5927 if (integer_zerop (arg0))
5928 return omit_one_operand (type, arg0, arg1);
5929
5930 truth_andor:
5931 /* We only do these simplifications if we are optimizing. */
5932 if (!optimize)
5933 return t;
5934
5935 /* Check for things like (A || B) && (A || C). We can convert this
5936 to A || (B && C). Note that either operator can be any of the four
5937 truth and/or operations and the transformation will still be
5938 valid. Also note that we only care about order for the
5939 ANDIF and ORIF operators. If B contains side effects, this
5940 might change the truth-value of A. */
5941 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5942 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5943 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5944 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5945 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5946 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5947 {
5948 tree a00 = TREE_OPERAND (arg0, 0);
5949 tree a01 = TREE_OPERAND (arg0, 1);
5950 tree a10 = TREE_OPERAND (arg1, 0);
5951 tree a11 = TREE_OPERAND (arg1, 1);
5952 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5953 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5954 && (code == TRUTH_AND_EXPR
5955 || code == TRUTH_OR_EXPR));
5956
5957 if (operand_equal_p (a00, a10, 0))
5958 return fold (build (TREE_CODE (arg0), type, a00,
5959 fold (build (code, type, a01, a11))));
5960 else if (commutative && operand_equal_p (a00, a11, 0))
5961 return fold (build (TREE_CODE (arg0), type, a00,
5962 fold (build (code, type, a01, a10))));
5963 else if (commutative && operand_equal_p (a01, a10, 0))
5964 return fold (build (TREE_CODE (arg0), type, a01,
5965 fold (build (code, type, a00, a11))));
5966
5967 /* This case if tricky because we must either have commutative
5968 operators or else A10 must not have side-effects. */
5969
5970 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5971 && operand_equal_p (a01, a11, 0))
5972 return fold (build (TREE_CODE (arg0), type,
5973 fold (build (code, type, a00, a10)),
5974 a01));
5975 }
5976
5977 /* See if we can build a range comparison. */
5978 if (0 != (tem = fold_range_test (t)))
5979 return tem;
5980
5981 /* Check for the possibility of merging component references. If our
5982 lhs is another similar operation, try to merge its rhs with our
5983 rhs. Then try to merge our lhs and rhs. */
5984 if (TREE_CODE (arg0) == code
5985 && 0 != (tem = fold_truthop (code, type,
5986 TREE_OPERAND (arg0, 1), arg1)))
5987 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5988
5989 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5990 return tem;
5991
5992 return t;
5993
5994 case TRUTH_ORIF_EXPR:
5995 /* Note that the operands of this must be ints
5996 and their values must be 0 or true.
5997 ("true" is a fixed value perhaps depending on the language.) */
5998 /* If first arg is constant true, return it. */
5999 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6000 return convert (type, arg0);
6001 case TRUTH_OR_EXPR:
6002 /* If either arg is constant zero, drop it. */
6003 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6004 return non_lvalue (convert (type, arg1));
6005 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
6006 return non_lvalue (convert (type, arg0));
6007 /* If second arg is constant true, result is true, but we must
6008 evaluate first arg. */
6009 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6010 return omit_one_operand (type, arg1, arg0);
6011 /* Likewise for first arg, but note this only occurs here for
6012 TRUTH_OR_EXPR. */
6013 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6014 return omit_one_operand (type, arg0, arg1);
6015 goto truth_andor;
6016
6017 case TRUTH_XOR_EXPR:
6018 /* If either arg is constant zero, drop it. */
6019 if (integer_zerop (arg0))
6020 return non_lvalue (convert (type, arg1));
6021 if (integer_zerop (arg1))
6022 return non_lvalue (convert (type, arg0));
6023 /* If either arg is constant true, this is a logical inversion. */
6024 if (integer_onep (arg0))
6025 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6026 if (integer_onep (arg1))
6027 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6028 return t;
6029
6030 case EQ_EXPR:
6031 case NE_EXPR:
6032 case LT_EXPR:
6033 case GT_EXPR:
6034 case LE_EXPR:
6035 case GE_EXPR:
6036 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6037 {
6038 /* (-a) CMP (-b) -> b CMP a */
6039 if (TREE_CODE (arg0) == NEGATE_EXPR
6040 && TREE_CODE (arg1) == NEGATE_EXPR)
6041 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6042 TREE_OPERAND (arg0, 0)));
6043 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6044 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
6045 return
6046 fold (build
6047 (swap_tree_comparison (code), type,
6048 TREE_OPERAND (arg0, 0),
6049 build_real (TREE_TYPE (arg1),
6050 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
6051 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6052 /* a CMP (-0) -> a CMP 0 */
6053 if (TREE_CODE (arg1) == REAL_CST
6054 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
6055 return fold (build (code, type, arg0,
6056 build_real (TREE_TYPE (arg1), dconst0)));
6057 }
6058
6059
6060 /* If one arg is a constant integer, put it last. */
6061 if (TREE_CODE (arg0) == INTEGER_CST
6062 && TREE_CODE (arg1) != INTEGER_CST)
6063 {
6064 TREE_OPERAND (t, 0) = arg1;
6065 TREE_OPERAND (t, 1) = arg0;
6066 arg0 = TREE_OPERAND (t, 0);
6067 arg1 = TREE_OPERAND (t, 1);
6068 code = swap_tree_comparison (code);
6069 TREE_SET_CODE (t, code);
6070 }
6071
6072 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6073 First, see if one arg is constant; find the constant arg
6074 and the other one. */
6075 {
6076 tree constop = 0, varop = NULL_TREE;
6077 int constopnum = -1;
6078
6079 if (TREE_CONSTANT (arg1))
6080 constopnum = 1, constop = arg1, varop = arg0;
6081 if (TREE_CONSTANT (arg0))
6082 constopnum = 0, constop = arg0, varop = arg1;
6083
6084 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6085 {
6086 /* This optimization is invalid for ordered comparisons
6087 if CONST+INCR overflows or if foo+incr might overflow.
6088 This optimization is invalid for floating point due to rounding.
6089 For pointer types we assume overflow doesn't happen. */
6090 if (POINTER_TYPE_P (TREE_TYPE (varop))
6091 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6092 && (code == EQ_EXPR || code == NE_EXPR)))
6093 {
6094 tree newconst
6095 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6096 constop, TREE_OPERAND (varop, 1)));
6097 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
6098
6099 /* If VAROP is a reference to a bitfield, we must mask
6100 the constant by the width of the field. */
6101 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6102 && DECL_BIT_FIELD(TREE_OPERAND
6103 (TREE_OPERAND (varop, 0), 1)))
6104 {
6105 int size
6106 = TREE_INT_CST_LOW (DECL_SIZE
6107 (TREE_OPERAND
6108 (TREE_OPERAND (varop, 0), 1)));
6109 tree mask, unsigned_type;
6110 unsigned int precision;
6111 tree folded_compare;
6112
6113 /* First check whether the comparison would come out
6114 always the same. If we don't do that we would
6115 change the meaning with the masking. */
6116 if (constopnum == 0)
6117 folded_compare = fold (build (code, type, constop,
6118 TREE_OPERAND (varop, 0)));
6119 else
6120 folded_compare = fold (build (code, type,
6121 TREE_OPERAND (varop, 0),
6122 constop));
6123 if (integer_zerop (folded_compare)
6124 || integer_onep (folded_compare))
6125 return omit_one_operand (type, folded_compare, varop);
6126
6127 unsigned_type = type_for_size (size, 1);
6128 precision = TYPE_PRECISION (unsigned_type);
6129 mask = build_int_2 (~0, ~0);
6130 TREE_TYPE (mask) = unsigned_type;
6131 force_fit_type (mask, 0);
6132 mask = const_binop (RSHIFT_EXPR, mask,
6133 size_int (precision - size), 0);
6134 newconst = fold (build (BIT_AND_EXPR,
6135 TREE_TYPE (varop), newconst,
6136 convert (TREE_TYPE (varop),
6137 mask)));
6138 }
6139
6140
6141 t = build (code, type, TREE_OPERAND (t, 0),
6142 TREE_OPERAND (t, 1));
6143 TREE_OPERAND (t, constopnum) = newconst;
6144 return t;
6145 }
6146 }
6147 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6148 {
6149 if (POINTER_TYPE_P (TREE_TYPE (varop))
6150 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6151 && (code == EQ_EXPR || code == NE_EXPR)))
6152 {
6153 tree newconst
6154 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6155 constop, TREE_OPERAND (varop, 1)));
6156 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
6157
6158 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6159 && DECL_BIT_FIELD(TREE_OPERAND
6160 (TREE_OPERAND (varop, 0), 1)))
6161 {
6162 int size
6163 = TREE_INT_CST_LOW (DECL_SIZE
6164 (TREE_OPERAND
6165 (TREE_OPERAND (varop, 0), 1)));
6166 tree mask, unsigned_type;
6167 unsigned int precision;
6168 tree folded_compare;
6169
6170 if (constopnum == 0)
6171 folded_compare = fold (build (code, type, constop,
6172 TREE_OPERAND (varop, 0)));
6173 else
6174 folded_compare = fold (build (code, type,
6175 TREE_OPERAND (varop, 0),
6176 constop));
6177 if (integer_zerop (folded_compare)
6178 || integer_onep (folded_compare))
6179 return omit_one_operand (type, folded_compare, varop);
6180
6181 unsigned_type = type_for_size (size, 1);
6182 precision = TYPE_PRECISION (unsigned_type);
6183 mask = build_int_2 (~0, ~0);
6184 TREE_TYPE (mask) = TREE_TYPE (varop);
6185 force_fit_type (mask, 0);
6186 mask = const_binop (RSHIFT_EXPR, mask,
6187 size_int (precision - size), 0);
6188 newconst = fold (build (BIT_AND_EXPR,
6189 TREE_TYPE (varop), newconst,
6190 convert (TREE_TYPE (varop),
6191 mask)));
6192 }
6193
6194
6195 t = build (code, type, TREE_OPERAND (t, 0),
6196 TREE_OPERAND (t, 1));
6197 TREE_OPERAND (t, constopnum) = newconst;
6198 return t;
6199 }
6200 }
6201 }
6202
6203 /* Change X >= CST to X > (CST - 1) if CST is positive. */
6204 if (TREE_CODE (arg1) == INTEGER_CST
6205 && TREE_CODE (arg0) != INTEGER_CST
6206 && tree_int_cst_sgn (arg1) > 0)
6207 {
6208 switch (TREE_CODE (t))
6209 {
6210 case GE_EXPR:
6211 code = GT_EXPR;
6212 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6213 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6214 break;
6215
6216 case LT_EXPR:
6217 code = LE_EXPR;
6218 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6219 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6220 break;
6221
6222 default:
6223 break;
6224 }
6225 }
6226
6227 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6228 a MINUS_EXPR of a constant, we can convert it into a comparison with
6229 a revised constant as long as no overflow occurs. */
6230 if ((code == EQ_EXPR || code == NE_EXPR)
6231 && TREE_CODE (arg1) == INTEGER_CST
6232 && (TREE_CODE (arg0) == PLUS_EXPR
6233 || TREE_CODE (arg0) == MINUS_EXPR)
6234 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6235 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6236 ? MINUS_EXPR : PLUS_EXPR,
6237 arg1, TREE_OPERAND (arg0, 1), 0))
6238 && ! TREE_CONSTANT_OVERFLOW (tem))
6239 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6240
6241 /* Similarly for a NEGATE_EXPR. */
6242 else if ((code == EQ_EXPR || code == NE_EXPR)
6243 && TREE_CODE (arg0) == NEGATE_EXPR
6244 && TREE_CODE (arg1) == INTEGER_CST
6245 && 0 != (tem = negate_expr (arg1))
6246 && TREE_CODE (tem) == INTEGER_CST
6247 && ! TREE_CONSTANT_OVERFLOW (tem))
6248 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6249
6250 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6251 for !=. Don't do this for ordered comparisons due to overflow. */
6252 else if ((code == NE_EXPR || code == EQ_EXPR)
6253 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6254 return fold (build (code, type,
6255 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6256
6257 /* If we are widening one operand of an integer comparison,
6258 see if the other operand is similarly being widened. Perhaps we
6259 can do the comparison in the narrower type. */
6260 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6261 && TREE_CODE (arg0) == NOP_EXPR
6262 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6263 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6264 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6265 || (TREE_CODE (t1) == INTEGER_CST
6266 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6267 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6268
6269 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6270 constant, we can simplify it. */
6271 else if (TREE_CODE (arg1) == INTEGER_CST
6272 && (TREE_CODE (arg0) == MIN_EXPR
6273 || TREE_CODE (arg0) == MAX_EXPR)
6274 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6275 return optimize_minmax_comparison (t);
6276
6277 /* If we are comparing an ABS_EXPR with a constant, we can
6278 convert all the cases into explicit comparisons, but they may
6279 well not be faster than doing the ABS and one comparison.
6280 But ABS (X) <= C is a range comparison, which becomes a subtraction
6281 and a comparison, and is probably faster. */
6282 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6283 && TREE_CODE (arg0) == ABS_EXPR
6284 && ! TREE_SIDE_EFFECTS (arg0)
6285 && (0 != (tem = negate_expr (arg1)))
6286 && TREE_CODE (tem) == INTEGER_CST
6287 && ! TREE_CONSTANT_OVERFLOW (tem))
6288 return fold (build (TRUTH_ANDIF_EXPR, type,
6289 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6290 build (LE_EXPR, type,
6291 TREE_OPERAND (arg0, 0), arg1)));
6292
6293 /* If this is an EQ or NE comparison with zero and ARG0 is
6294 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6295 two operations, but the latter can be done in one less insn
6296 on machines that have only two-operand insns or on which a
6297 constant cannot be the first operand. */
6298 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6299 && TREE_CODE (arg0) == BIT_AND_EXPR)
6300 {
6301 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6302 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6303 return
6304 fold (build (code, type,
6305 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6306 build (RSHIFT_EXPR,
6307 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6308 TREE_OPERAND (arg0, 1),
6309 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6310 convert (TREE_TYPE (arg0),
6311 integer_one_node)),
6312 arg1));
6313 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6314 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6315 return
6316 fold (build (code, type,
6317 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6318 build (RSHIFT_EXPR,
6319 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6320 TREE_OPERAND (arg0, 0),
6321 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6322 convert (TREE_TYPE (arg0),
6323 integer_one_node)),
6324 arg1));
6325 }
6326
6327 /* If this is an NE or EQ comparison of zero against the result of a
6328 signed MOD operation whose second operand is a power of 2, make
6329 the MOD operation unsigned since it is simpler and equivalent. */
6330 if ((code == NE_EXPR || code == EQ_EXPR)
6331 && integer_zerop (arg1)
6332 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6333 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6334 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6335 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6336 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6337 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6338 {
6339 tree newtype = unsigned_type (TREE_TYPE (arg0));
6340 tree newmod = build (TREE_CODE (arg0), newtype,
6341 convert (newtype, TREE_OPERAND (arg0, 0)),
6342 convert (newtype, TREE_OPERAND (arg0, 1)));
6343
6344 return build (code, type, newmod, convert (newtype, arg1));
6345 }
6346
6347 /* If this is an NE comparison of zero with an AND of one, remove the
6348 comparison since the AND will give the correct value. */
6349 if (code == NE_EXPR && integer_zerop (arg1)
6350 && TREE_CODE (arg0) == BIT_AND_EXPR
6351 && integer_onep (TREE_OPERAND (arg0, 1)))
6352 return convert (type, arg0);
6353
6354 /* If we have (A & C) == C where C is a power of 2, convert this into
6355 (A & C) != 0. Similarly for NE_EXPR. */
6356 if ((code == EQ_EXPR || code == NE_EXPR)
6357 && TREE_CODE (arg0) == BIT_AND_EXPR
6358 && integer_pow2p (TREE_OPERAND (arg0, 1))
6359 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6360 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6361 arg0, integer_zero_node);
6362
6363 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6364 and similarly for >= into !=. */
6365 if ((code == LT_EXPR || code == GE_EXPR)
6366 && TREE_UNSIGNED (TREE_TYPE (arg0))
6367 && TREE_CODE (arg1) == LSHIFT_EXPR
6368 && integer_onep (TREE_OPERAND (arg1, 0)))
6369 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6370 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6371 TREE_OPERAND (arg1, 1)),
6372 convert (TREE_TYPE (arg0), integer_zero_node));
6373
6374 else if ((code == LT_EXPR || code == GE_EXPR)
6375 && TREE_UNSIGNED (TREE_TYPE (arg0))
6376 && (TREE_CODE (arg1) == NOP_EXPR
6377 || TREE_CODE (arg1) == CONVERT_EXPR)
6378 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6379 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6380 return
6381 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6382 convert (TREE_TYPE (arg0),
6383 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6384 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6385 convert (TREE_TYPE (arg0), integer_zero_node));
6386
6387 /* Simplify comparison of something with itself. (For IEEE
6388 floating-point, we can only do some of these simplifications.) */
6389 if (operand_equal_p (arg0, arg1, 0))
6390 {
6391 switch (code)
6392 {
6393 case EQ_EXPR:
6394 case GE_EXPR:
6395 case LE_EXPR:
6396 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6397 return constant_boolean_node (1, type);
6398 code = EQ_EXPR;
6399 TREE_SET_CODE (t, code);
6400 break;
6401
6402 case NE_EXPR:
6403 /* For NE, we can only do this simplification if integer. */
6404 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6405 break;
6406 /* ... fall through ... */
6407 case GT_EXPR:
6408 case LT_EXPR:
6409 return constant_boolean_node (0, type);
6410 default:
6411 abort ();
6412 }
6413 }
6414
6415 /* An unsigned comparison against 0 can be simplified. */
6416 if (integer_zerop (arg1)
6417 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6418 || POINTER_TYPE_P (TREE_TYPE (arg1)))
6419 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6420 {
6421 switch (TREE_CODE (t))
6422 {
6423 case GT_EXPR:
6424 code = NE_EXPR;
6425 TREE_SET_CODE (t, NE_EXPR);
6426 break;
6427 case LE_EXPR:
6428 code = EQ_EXPR;
6429 TREE_SET_CODE (t, EQ_EXPR);
6430 break;
6431 case GE_EXPR:
6432 return omit_one_operand (type,
6433 convert (type, integer_one_node),
6434 arg0);
6435 case LT_EXPR:
6436 return omit_one_operand (type,
6437 convert (type, integer_zero_node),
6438 arg0);
6439 default:
6440 break;
6441 }
6442 }
6443
6444 /* Comparisons with the highest or lowest possible integer of
6445 the specified size will have known values and an unsigned
6446 <= 0x7fffffff can be simplified. */
6447 {
6448 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6449
6450 if (TREE_CODE (arg1) == INTEGER_CST
6451 && ! TREE_CONSTANT_OVERFLOW (arg1)
6452 && width <= HOST_BITS_PER_WIDE_INT
6453 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6454 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6455 {
6456 if (TREE_INT_CST_HIGH (arg1) == 0
6457 && (TREE_INT_CST_LOW (arg1)
6458 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6459 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6460 switch (TREE_CODE (t))
6461 {
6462 case GT_EXPR:
6463 return omit_one_operand (type,
6464 convert (type, integer_zero_node),
6465 arg0);
6466 case GE_EXPR:
6467 TREE_SET_CODE (t, EQ_EXPR);
6468 break;
6469
6470 case LE_EXPR:
6471 return omit_one_operand (type,
6472 convert (type, integer_one_node),
6473 arg0);
6474 case LT_EXPR:
6475 TREE_SET_CODE (t, NE_EXPR);
6476 break;
6477
6478 default:
6479 break;
6480 }
6481
6482 else if (TREE_INT_CST_HIGH (arg1) == -1
6483 && (- TREE_INT_CST_LOW (arg1)
6484 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
6485 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6486 switch (TREE_CODE (t))
6487 {
6488 case LT_EXPR:
6489 return omit_one_operand (type,
6490 convert (type, integer_zero_node),
6491 arg0);
6492 case LE_EXPR:
6493 TREE_SET_CODE (t, EQ_EXPR);
6494 break;
6495
6496 case GE_EXPR:
6497 return omit_one_operand (type,
6498 convert (type, integer_one_node),
6499 arg0);
6500 case GT_EXPR:
6501 TREE_SET_CODE (t, NE_EXPR);
6502 break;
6503
6504 default:
6505 break;
6506 }
6507
6508 else if (TREE_INT_CST_HIGH (arg1) == 0
6509 && (TREE_INT_CST_LOW (arg1)
6510 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6511 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6512
6513 switch (TREE_CODE (t))
6514 {
6515 case LE_EXPR:
6516 return fold (build (GE_EXPR, type,
6517 convert (signed_type (TREE_TYPE (arg0)),
6518 arg0),
6519 convert (signed_type (TREE_TYPE (arg1)),
6520 integer_zero_node)));
6521 case GT_EXPR:
6522 return fold (build (LT_EXPR, type,
6523 convert (signed_type (TREE_TYPE (arg0)),
6524 arg0),
6525 convert (signed_type (TREE_TYPE (arg1)),
6526 integer_zero_node)));
6527
6528 default:
6529 break;
6530 }
6531 }
6532 }
6533
6534 /* If we are comparing an expression that just has comparisons
6535 of two integer values, arithmetic expressions of those comparisons,
6536 and constants, we can simplify it. There are only three cases
6537 to check: the two values can either be equal, the first can be
6538 greater, or the second can be greater. Fold the expression for
6539 those three values. Since each value must be 0 or 1, we have
6540 eight possibilities, each of which corresponds to the constant 0
6541 or 1 or one of the six possible comparisons.
6542
6543 This handles common cases like (a > b) == 0 but also handles
6544 expressions like ((x > y) - (y > x)) > 0, which supposedly
6545 occur in macroized code. */
6546
6547 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6548 {
6549 tree cval1 = 0, cval2 = 0;
6550 int save_p = 0;
6551
6552 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6553 /* Don't handle degenerate cases here; they should already
6554 have been handled anyway. */
6555 && cval1 != 0 && cval2 != 0
6556 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6557 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6558 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6559 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6560 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6561 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6562 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6563 {
6564 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6565 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6566
6567 /* We can't just pass T to eval_subst in case cval1 or cval2
6568 was the same as ARG1. */
6569
6570 tree high_result
6571 = fold (build (code, type,
6572 eval_subst (arg0, cval1, maxval, cval2, minval),
6573 arg1));
6574 tree equal_result
6575 = fold (build (code, type,
6576 eval_subst (arg0, cval1, maxval, cval2, maxval),
6577 arg1));
6578 tree low_result
6579 = fold (build (code, type,
6580 eval_subst (arg0, cval1, minval, cval2, maxval),
6581 arg1));
6582
6583 /* All three of these results should be 0 or 1. Confirm they
6584 are. Then use those values to select the proper code
6585 to use. */
6586
6587 if ((integer_zerop (high_result)
6588 || integer_onep (high_result))
6589 && (integer_zerop (equal_result)
6590 || integer_onep (equal_result))
6591 && (integer_zerop (low_result)
6592 || integer_onep (low_result)))
6593 {
6594 /* Make a 3-bit mask with the high-order bit being the
6595 value for `>', the next for '=', and the low for '<'. */
6596 switch ((integer_onep (high_result) * 4)
6597 + (integer_onep (equal_result) * 2)
6598 + integer_onep (low_result))
6599 {
6600 case 0:
6601 /* Always false. */
6602 return omit_one_operand (type, integer_zero_node, arg0);
6603 case 1:
6604 code = LT_EXPR;
6605 break;
6606 case 2:
6607 code = EQ_EXPR;
6608 break;
6609 case 3:
6610 code = LE_EXPR;
6611 break;
6612 case 4:
6613 code = GT_EXPR;
6614 break;
6615 case 5:
6616 code = NE_EXPR;
6617 break;
6618 case 6:
6619 code = GE_EXPR;
6620 break;
6621 case 7:
6622 /* Always true. */
6623 return omit_one_operand (type, integer_one_node, arg0);
6624 }
6625
6626 t = build (code, type, cval1, cval2);
6627 if (save_p)
6628 return save_expr (t);
6629 else
6630 return fold (t);
6631 }
6632 }
6633 }
6634
6635 /* If this is a comparison of a field, we may be able to simplify it. */
6636 if ((TREE_CODE (arg0) == COMPONENT_REF
6637 || TREE_CODE (arg0) == BIT_FIELD_REF)
6638 && (code == EQ_EXPR || code == NE_EXPR)
6639 /* Handle the constant case even without -O
6640 to make sure the warnings are given. */
6641 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6642 {
6643 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6644 return t1 ? t1 : t;
6645 }
6646
6647 /* If this is a comparison of complex values and either or both sides
6648 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6649 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6650 This may prevent needless evaluations. */
6651 if ((code == EQ_EXPR || code == NE_EXPR)
6652 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6653 && (TREE_CODE (arg0) == COMPLEX_EXPR
6654 || TREE_CODE (arg1) == COMPLEX_EXPR
6655 || TREE_CODE (arg0) == COMPLEX_CST
6656 || TREE_CODE (arg1) == COMPLEX_CST))
6657 {
6658 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6659 tree real0, imag0, real1, imag1;
6660
6661 arg0 = save_expr (arg0);
6662 arg1 = save_expr (arg1);
6663 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6664 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6665 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6666 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6667
6668 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6669 : TRUTH_ORIF_EXPR),
6670 type,
6671 fold (build (code, type, real0, real1)),
6672 fold (build (code, type, imag0, imag1))));
6673 }
6674
6675 /* From here on, the only cases we handle are when the result is
6676 known to be a constant.
6677
6678 To compute GT, swap the arguments and do LT.
6679 To compute GE, do LT and invert the result.
6680 To compute LE, swap the arguments, do LT and invert the result.
6681 To compute NE, do EQ and invert the result.
6682
6683 Therefore, the code below must handle only EQ and LT. */
6684
6685 if (code == LE_EXPR || code == GT_EXPR)
6686 {
6687 tem = arg0, arg0 = arg1, arg1 = tem;
6688 code = swap_tree_comparison (code);
6689 }
6690
6691 /* Note that it is safe to invert for real values here because we
6692 will check below in the one case that it matters. */
6693
6694 t1 = NULL_TREE;
6695 invert = 0;
6696 if (code == NE_EXPR || code == GE_EXPR)
6697 {
6698 invert = 1;
6699 code = invert_tree_comparison (code);
6700 }
6701
6702 /* Compute a result for LT or EQ if args permit;
6703 otherwise return T. */
6704 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6705 {
6706 if (code == EQ_EXPR)
6707 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6708 else
6709 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6710 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6711 : INT_CST_LT (arg0, arg1)),
6712 0);
6713 }
6714
6715 #if 0 /* This is no longer useful, but breaks some real code. */
6716 /* Assume a nonexplicit constant cannot equal an explicit one,
6717 since such code would be undefined anyway.
6718 Exception: on sysvr4, using #pragma weak,
6719 a label can come out as 0. */
6720 else if (TREE_CODE (arg1) == INTEGER_CST
6721 && !integer_zerop (arg1)
6722 && TREE_CONSTANT (arg0)
6723 && TREE_CODE (arg0) == ADDR_EXPR
6724 && code == EQ_EXPR)
6725 t1 = build_int_2 (0, 0);
6726 #endif
6727 /* Two real constants can be compared explicitly. */
6728 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6729 {
6730 /* If either operand is a NaN, the result is false with two
6731 exceptions: First, an NE_EXPR is true on NaNs, but that case
6732 is already handled correctly since we will be inverting the
6733 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6734 or a GE_EXPR into a LT_EXPR, we must return true so that it
6735 will be inverted into false. */
6736
6737 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6738 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6739 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6740
6741 else if (code == EQ_EXPR)
6742 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6743 TREE_REAL_CST (arg1)),
6744 0);
6745 else
6746 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6747 TREE_REAL_CST (arg1)),
6748 0);
6749 }
6750
6751 if (t1 == NULL_TREE)
6752 return t;
6753
6754 if (invert)
6755 TREE_INT_CST_LOW (t1) ^= 1;
6756
6757 TREE_TYPE (t1) = type;
6758 if (TREE_CODE (type) == BOOLEAN_TYPE)
6759 return truthvalue_conversion (t1);
6760 return t1;
6761
6762 case COND_EXPR:
6763 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6764 so all simple results must be passed through pedantic_non_lvalue. */
6765 if (TREE_CODE (arg0) == INTEGER_CST)
6766 return pedantic_non_lvalue
6767 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6768 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6769 return pedantic_omit_one_operand (type, arg1, arg0);
6770
6771 /* If the second operand is zero, invert the comparison and swap
6772 the second and third operands. Likewise if the second operand
6773 is constant and the third is not or if the third operand is
6774 equivalent to the first operand of the comparison. */
6775
6776 if (integer_zerop (arg1)
6777 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6778 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6779 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6780 TREE_OPERAND (t, 2),
6781 TREE_OPERAND (arg0, 1))))
6782 {
6783 /* See if this can be inverted. If it can't, possibly because
6784 it was a floating-point inequality comparison, don't do
6785 anything. */
6786 tem = invert_truthvalue (arg0);
6787
6788 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6789 {
6790 t = build (code, type, tem,
6791 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6792 arg0 = tem;
6793 /* arg1 should be the first argument of the new T. */
6794 arg1 = TREE_OPERAND (t, 1);
6795 STRIP_NOPS (arg1);
6796 }
6797 }
6798
6799 /* If we have A op B ? A : C, we may be able to convert this to a
6800 simpler expression, depending on the operation and the values
6801 of B and C. IEEE floating point prevents this though,
6802 because A or B might be -0.0 or a NaN. */
6803
6804 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6805 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
6806 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
6807 || flag_fast_math)
6808 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6809 arg1, TREE_OPERAND (arg0, 1)))
6810 {
6811 tree arg2 = TREE_OPERAND (t, 2);
6812 enum tree_code comp_code = TREE_CODE (arg0);
6813
6814 STRIP_NOPS (arg2);
6815
6816 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
6817 depending on the comparison operation. */
6818 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6819 ? real_zerop (TREE_OPERAND (arg0, 1))
6820 : integer_zerop (TREE_OPERAND (arg0, 1)))
6821 && TREE_CODE (arg2) == NEGATE_EXPR
6822 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6823 switch (comp_code)
6824 {
6825 case EQ_EXPR:
6826 return
6827 pedantic_non_lvalue (convert (type, negate_expr (arg1)));
6828 case NE_EXPR:
6829 return pedantic_non_lvalue (convert (type, arg1));
6830 case GE_EXPR:
6831 case GT_EXPR:
6832 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6833 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6834 return pedantic_non_lvalue
6835 (convert (type, fold (build1 (ABS_EXPR,
6836 TREE_TYPE (arg1), arg1))));
6837 case LE_EXPR:
6838 case LT_EXPR:
6839 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6840 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6841 return pedantic_non_lvalue
6842 (negate_expr (convert (type,
6843 fold (build1 (ABS_EXPR,
6844 TREE_TYPE (arg1),
6845 arg1)))));
6846 default:
6847 abort ();
6848 }
6849
6850 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
6851 always zero. */
6852
6853 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6854 {
6855 if (comp_code == NE_EXPR)
6856 return pedantic_non_lvalue (convert (type, arg1));
6857 else if (comp_code == EQ_EXPR)
6858 return pedantic_non_lvalue (convert (type, integer_zero_node));
6859 }
6860
6861 /* If this is A op B ? A : B, this is either A, B, min (A, B),
6862 or max (A, B), depending on the operation. */
6863
6864 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6865 arg2, TREE_OPERAND (arg0, 0)))
6866 {
6867 tree comp_op0 = TREE_OPERAND (arg0, 0);
6868 tree comp_op1 = TREE_OPERAND (arg0, 1);
6869 tree comp_type = TREE_TYPE (comp_op0);
6870
6871 switch (comp_code)
6872 {
6873 case EQ_EXPR:
6874 return pedantic_non_lvalue (convert (type, arg2));
6875 case NE_EXPR:
6876 return pedantic_non_lvalue (convert (type, arg1));
6877 case LE_EXPR:
6878 case LT_EXPR:
6879 /* In C++ a ?: expression can be an lvalue, so put the
6880 operand which will be used if they are equal first
6881 so that we can convert this back to the
6882 corresponding COND_EXPR. */
6883 return pedantic_non_lvalue
6884 (convert (type, (fold (build (MIN_EXPR, comp_type,
6885 (comp_code == LE_EXPR
6886 ? comp_op0 : comp_op1),
6887 (comp_code == LE_EXPR
6888 ? comp_op1 : comp_op0))))));
6889 break;
6890 case GE_EXPR:
6891 case GT_EXPR:
6892 return pedantic_non_lvalue
6893 (convert (type, fold (build (MAX_EXPR, comp_type,
6894 (comp_code == GE_EXPR
6895 ? comp_op0 : comp_op1),
6896 (comp_code == GE_EXPR
6897 ? comp_op1 : comp_op0)))));
6898 break;
6899 default:
6900 abort ();
6901 }
6902 }
6903
6904 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6905 we might still be able to simplify this. For example,
6906 if C1 is one less or one more than C2, this might have started
6907 out as a MIN or MAX and been transformed by this function.
6908 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6909
6910 if (INTEGRAL_TYPE_P (type)
6911 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6912 && TREE_CODE (arg2) == INTEGER_CST)
6913 switch (comp_code)
6914 {
6915 case EQ_EXPR:
6916 /* We can replace A with C1 in this case. */
6917 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6918 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6919 TREE_OPERAND (t, 2));
6920 break;
6921
6922 case LT_EXPR:
6923 /* If C1 is C2 + 1, this is min(A, C2). */
6924 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6925 && operand_equal_p (TREE_OPERAND (arg0, 1),
6926 const_binop (PLUS_EXPR, arg2,
6927 integer_one_node, 0), 1))
6928 return pedantic_non_lvalue
6929 (fold (build (MIN_EXPR, type, arg1, arg2)));
6930 break;
6931
6932 case LE_EXPR:
6933 /* If C1 is C2 - 1, this is min(A, C2). */
6934 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6935 && operand_equal_p (TREE_OPERAND (arg0, 1),
6936 const_binop (MINUS_EXPR, arg2,
6937 integer_one_node, 0), 1))
6938 return pedantic_non_lvalue
6939 (fold (build (MIN_EXPR, type, arg1, arg2)));
6940 break;
6941
6942 case GT_EXPR:
6943 /* If C1 is C2 - 1, this is max(A, C2). */
6944 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6945 && operand_equal_p (TREE_OPERAND (arg0, 1),
6946 const_binop (MINUS_EXPR, arg2,
6947 integer_one_node, 0), 1))
6948 return pedantic_non_lvalue
6949 (fold (build (MAX_EXPR, type, arg1, arg2)));
6950 break;
6951
6952 case GE_EXPR:
6953 /* If C1 is C2 + 1, this is max(A, C2). */
6954 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6955 && operand_equal_p (TREE_OPERAND (arg0, 1),
6956 const_binop (PLUS_EXPR, arg2,
6957 integer_one_node, 0), 1))
6958 return pedantic_non_lvalue
6959 (fold (build (MAX_EXPR, type, arg1, arg2)));
6960 break;
6961 case NE_EXPR:
6962 break;
6963 default:
6964 abort ();
6965 }
6966 }
6967
6968 /* If the second operand is simpler than the third, swap them
6969 since that produces better jump optimization results. */
6970 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
6971 || TREE_CODE (arg1) == SAVE_EXPR)
6972 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6973 || DECL_P (TREE_OPERAND (t, 2))
6974 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6975 {
6976 /* See if this can be inverted. If it can't, possibly because
6977 it was a floating-point inequality comparison, don't do
6978 anything. */
6979 tem = invert_truthvalue (arg0);
6980
6981 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6982 {
6983 t = build (code, type, tem,
6984 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6985 arg0 = tem;
6986 /* arg1 should be the first argument of the new T. */
6987 arg1 = TREE_OPERAND (t, 1);
6988 STRIP_NOPS (arg1);
6989 }
6990 }
6991
6992 /* Convert A ? 1 : 0 to simply A. */
6993 if (integer_onep (TREE_OPERAND (t, 1))
6994 && integer_zerop (TREE_OPERAND (t, 2))
6995 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
6996 call to fold will try to move the conversion inside
6997 a COND, which will recurse. In that case, the COND_EXPR
6998 is probably the best choice, so leave it alone. */
6999 && type == TREE_TYPE (arg0))
7000 return pedantic_non_lvalue (arg0);
7001
7002 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7003 operation is simply A & 2. */
7004
7005 if (integer_zerop (TREE_OPERAND (t, 2))
7006 && TREE_CODE (arg0) == NE_EXPR
7007 && integer_zerop (TREE_OPERAND (arg0, 1))
7008 && integer_pow2p (arg1)
7009 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7010 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7011 arg1, 1))
7012 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7013
7014 return t;
7015
7016 case COMPOUND_EXPR:
7017 /* When pedantic, a compound expression can be neither an lvalue
7018 nor an integer constant expression. */
7019 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7020 return t;
7021 /* Don't let (0, 0) be null pointer constant. */
7022 if (integer_zerop (arg1))
7023 return build1 (NOP_EXPR, type, arg1);
7024 return convert (type, arg1);
7025
7026 case COMPLEX_EXPR:
7027 if (wins)
7028 return build_complex (type, arg0, arg1);
7029 return t;
7030
7031 case REALPART_EXPR:
7032 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7033 return t;
7034 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7035 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7036 TREE_OPERAND (arg0, 1));
7037 else if (TREE_CODE (arg0) == COMPLEX_CST)
7038 return TREE_REALPART (arg0);
7039 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7040 return fold (build (TREE_CODE (arg0), type,
7041 fold (build1 (REALPART_EXPR, type,
7042 TREE_OPERAND (arg0, 0))),
7043 fold (build1 (REALPART_EXPR,
7044 type, TREE_OPERAND (arg0, 1)))));
7045 return t;
7046
7047 case IMAGPART_EXPR:
7048 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7049 return convert (type, integer_zero_node);
7050 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7051 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7052 TREE_OPERAND (arg0, 0));
7053 else if (TREE_CODE (arg0) == COMPLEX_CST)
7054 return TREE_IMAGPART (arg0);
7055 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7056 return fold (build (TREE_CODE (arg0), type,
7057 fold (build1 (IMAGPART_EXPR, type,
7058 TREE_OPERAND (arg0, 0))),
7059 fold (build1 (IMAGPART_EXPR, type,
7060 TREE_OPERAND (arg0, 1)))));
7061 return t;
7062
7063 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7064 appropriate. */
7065 case CLEANUP_POINT_EXPR:
7066 if (! has_cleanups (arg0))
7067 return TREE_OPERAND (t, 0);
7068
7069 {
7070 enum tree_code code0 = TREE_CODE (arg0);
7071 int kind0 = TREE_CODE_CLASS (code0);
7072 tree arg00 = TREE_OPERAND (arg0, 0);
7073 tree arg01;
7074
7075 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7076 return fold (build1 (code0, type,
7077 fold (build1 (CLEANUP_POINT_EXPR,
7078 TREE_TYPE (arg00), arg00))));
7079
7080 if (kind0 == '<' || kind0 == '2'
7081 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7082 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7083 || code0 == TRUTH_XOR_EXPR)
7084 {
7085 arg01 = TREE_OPERAND (arg0, 1);
7086
7087 if (TREE_CONSTANT (arg00)
7088 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7089 && ! has_cleanups (arg00)))
7090 return fold (build (code0, type, arg00,
7091 fold (build1 (CLEANUP_POINT_EXPR,
7092 TREE_TYPE (arg01), arg01))));
7093
7094 if (TREE_CONSTANT (arg01))
7095 return fold (build (code0, type,
7096 fold (build1 (CLEANUP_POINT_EXPR,
7097 TREE_TYPE (arg00), arg00)),
7098 arg01));
7099 }
7100
7101 return t;
7102 }
7103
7104 default:
7105 return t;
7106 } /* switch (code) */
7107 }
7108
7109 /* Determine if first argument is a multiple of second argument. Return 0 if
7110 it is not, or we cannot easily determined it to be.
7111
7112 An example of the sort of thing we care about (at this point; this routine
7113 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7114 fold cases do now) is discovering that
7115
7116 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7117
7118 is a multiple of
7119
7120 SAVE_EXPR (J * 8)
7121
7122 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7123
7124 This code also handles discovering that
7125
7126 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7127
7128 is a multiple of 8 so we don't have to worry about dealing with a
7129 possible remainder.
7130
7131 Note that we *look* inside a SAVE_EXPR only to determine how it was
7132 calculated; it is not safe for fold to do much of anything else with the
7133 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7134 at run time. For example, the latter example above *cannot* be implemented
7135 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7136 evaluation time of the original SAVE_EXPR is not necessarily the same at
7137 the time the new expression is evaluated. The only optimization of this
7138 sort that would be valid is changing
7139
7140 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7141
7142 divided by 8 to
7143
7144 SAVE_EXPR (I) * SAVE_EXPR (J)
7145
7146 (where the same SAVE_EXPR (J) is used in the original and the
7147 transformed version). */
7148
7149 static int
7150 multiple_of_p (type, top, bottom)
7151 tree type;
7152 tree top;
7153 tree bottom;
7154 {
7155 if (operand_equal_p (top, bottom, 0))
7156 return 1;
7157
7158 if (TREE_CODE (type) != INTEGER_TYPE)
7159 return 0;
7160
7161 switch (TREE_CODE (top))
7162 {
7163 case MULT_EXPR:
7164 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7165 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7166
7167 case PLUS_EXPR:
7168 case MINUS_EXPR:
7169 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7170 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7171
7172 case NOP_EXPR:
7173 /* Can't handle conversions from non-integral or wider integral type. */
7174 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7175 || (TYPE_PRECISION (type)
7176 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7177 return 0;
7178
7179 /* .. fall through ... */
7180
7181 case SAVE_EXPR:
7182 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7183
7184 case INTEGER_CST:
7185 if ((TREE_CODE (bottom) != INTEGER_CST)
7186 || (tree_int_cst_sgn (top) < 0)
7187 || (tree_int_cst_sgn (bottom) < 0))
7188 return 0;
7189 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7190 top, bottom, 0));
7191
7192 default:
7193 return 0;
7194 }
7195 }