]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fold-const.c
Merge from gcc-2.8
[thirdparty/gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92-96, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
28
29
30 /* The entry points in this file are fold, size_int, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include <stdio.h>
47 #include <setjmp.h>
48 #include "flags.h"
49 #include "tree.h"
50
51 /* Handle floating overflow for `const_binop'. */
52 static jmp_buf float_error;
53
54 static void encode PROTO((HOST_WIDE_INT *,
55 HOST_WIDE_INT, HOST_WIDE_INT));
56 static void decode PROTO((HOST_WIDE_INT *,
57 HOST_WIDE_INT *, HOST_WIDE_INT *));
58 int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
59 HOST_WIDE_INT, HOST_WIDE_INT,
60 HOST_WIDE_INT, HOST_WIDE_INT *,
61 HOST_WIDE_INT *, HOST_WIDE_INT *,
62 HOST_WIDE_INT *));
63 static int split_tree PROTO((tree, enum tree_code, tree *,
64 tree *, int *));
65 static tree int_const_binop PROTO((enum tree_code, tree, tree, int, int));
66 static tree const_binop PROTO((enum tree_code, tree, tree, int));
67 static tree fold_convert PROTO((tree, tree));
68 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
69 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
70 static int truth_value_p PROTO((enum tree_code));
71 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
72 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
73 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
74 static tree omit_one_operand PROTO((tree, tree, tree));
75 static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
76 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
77 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
78 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
79 tree, tree));
80 static tree decode_field_reference PROTO((tree, int *, int *,
81 enum machine_mode *, int *,
82 int *, tree *, tree *));
83 static int all_ones_mask_p PROTO((tree, int));
84 static int simple_operand_p PROTO((tree));
85 static tree range_binop PROTO((enum tree_code, tree, tree, int,
86 tree, int));
87 static tree make_range PROTO((tree, int *, tree *, tree *));
88 static tree build_range_check PROTO((tree, tree, int, tree, tree));
89 static int merge_ranges PROTO((int *, tree *, tree *, int, tree, tree,
90 int, tree, tree));
91 static tree fold_range_test PROTO((tree));
92 static tree unextend PROTO((tree, int, int, tree));
93 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
94 static tree strip_compound_expr PROTO((tree, tree));
95 static int multiple_of_p PROTO((tree, tree, tree));
96
97 #ifndef BRANCH_COST
98 #define BRANCH_COST 1
99 #endif
100
101 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
102 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
103 Then this yields nonzero if overflow occurred during the addition.
104 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
105 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
106 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
107 \f
108 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
109 We do that by representing the two-word integer in 4 words, with only
110 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
111
112 #define LOWPART(x) \
113 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
114 #define HIGHPART(x) \
115 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
116 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
117
118 /* Unpack a two-word integer into 4 words.
119 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
120 WORDS points to the array of HOST_WIDE_INTs. */
121
122 static void
123 encode (words, low, hi)
124 HOST_WIDE_INT *words;
125 HOST_WIDE_INT low, hi;
126 {
127 words[0] = LOWPART (low);
128 words[1] = HIGHPART (low);
129 words[2] = LOWPART (hi);
130 words[3] = HIGHPART (hi);
131 }
132
133 /* Pack an array of 4 words into a two-word integer.
134 WORDS points to the array of words.
135 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
136
137 static void
138 decode (words, low, hi)
139 HOST_WIDE_INT *words;
140 HOST_WIDE_INT *low, *hi;
141 {
142 *low = words[0] | words[1] * BASE;
143 *hi = words[2] | words[3] * BASE;
144 }
145 \f
146 /* Make the integer constant T valid for its type
147 by setting to 0 or 1 all the bits in the constant
148 that don't belong in the type.
149 Yield 1 if a signed overflow occurs, 0 otherwise.
150 If OVERFLOW is nonzero, a signed overflow has already occurred
151 in calculating T, so propagate it.
152
153 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
154 if it exists. */
155
156 int
157 force_fit_type (t, overflow)
158 tree t;
159 int overflow;
160 {
161 HOST_WIDE_INT low, high;
162 register int prec;
163
164 if (TREE_CODE (t) == REAL_CST)
165 {
166 #ifdef CHECK_FLOAT_VALUE
167 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
168 overflow);
169 #endif
170 return overflow;
171 }
172
173 else if (TREE_CODE (t) != INTEGER_CST)
174 return overflow;
175
176 low = TREE_INT_CST_LOW (t);
177 high = TREE_INT_CST_HIGH (t);
178
179 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
180 prec = POINTER_SIZE;
181 else
182 prec = TYPE_PRECISION (TREE_TYPE (t));
183
184 /* First clear all bits that are beyond the type's precision. */
185
186 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
187 ;
188 else if (prec > HOST_BITS_PER_WIDE_INT)
189 {
190 TREE_INT_CST_HIGH (t)
191 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
192 }
193 else
194 {
195 TREE_INT_CST_HIGH (t) = 0;
196 if (prec < HOST_BITS_PER_WIDE_INT)
197 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
198 }
199
200 /* Unsigned types do not suffer sign extension or overflow. */
201 if (TREE_UNSIGNED (TREE_TYPE (t)))
202 return overflow;
203
204 /* If the value's sign bit is set, extend the sign. */
205 if (prec != 2 * HOST_BITS_PER_WIDE_INT
206 && (prec > HOST_BITS_PER_WIDE_INT
207 ? (TREE_INT_CST_HIGH (t)
208 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
209 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
210 {
211 /* Value is negative:
212 set to 1 all the bits that are outside this type's precision. */
213 if (prec > HOST_BITS_PER_WIDE_INT)
214 {
215 TREE_INT_CST_HIGH (t)
216 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
217 }
218 else
219 {
220 TREE_INT_CST_HIGH (t) = -1;
221 if (prec < HOST_BITS_PER_WIDE_INT)
222 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
223 }
224 }
225
226 /* Yield nonzero if signed overflow occurred. */
227 return
228 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
229 != 0);
230 }
231 \f
232 /* Add two doubleword integers with doubleword result.
233 Each argument is given as two `HOST_WIDE_INT' pieces.
234 One argument is L1 and H1; the other, L2 and H2.
235 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
236
237 int
238 add_double (l1, h1, l2, h2, lv, hv)
239 HOST_WIDE_INT l1, h1, l2, h2;
240 HOST_WIDE_INT *lv, *hv;
241 {
242 HOST_WIDE_INT l, h;
243
244 l = l1 + l2;
245 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
246
247 *lv = l;
248 *hv = h;
249 return overflow_sum_sign (h1, h2, h);
250 }
251
252 /* Negate a doubleword integer with doubleword result.
253 Return nonzero if the operation overflows, assuming it's signed.
254 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
255 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
256
257 int
258 neg_double (l1, h1, lv, hv)
259 HOST_WIDE_INT l1, h1;
260 HOST_WIDE_INT *lv, *hv;
261 {
262 if (l1 == 0)
263 {
264 *lv = 0;
265 *hv = - h1;
266 return (*hv & h1) < 0;
267 }
268 else
269 {
270 *lv = - l1;
271 *hv = ~ h1;
272 return 0;
273 }
274 }
275 \f
276 /* Multiply two doubleword integers with doubleword result.
277 Return nonzero if the operation overflows, assuming it's signed.
278 Each argument is given as two `HOST_WIDE_INT' pieces.
279 One argument is L1 and H1; the other, L2 and H2.
280 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
281
282 int
283 mul_double (l1, h1, l2, h2, lv, hv)
284 HOST_WIDE_INT l1, h1, l2, h2;
285 HOST_WIDE_INT *lv, *hv;
286 {
287 HOST_WIDE_INT arg1[4];
288 HOST_WIDE_INT arg2[4];
289 HOST_WIDE_INT prod[4 * 2];
290 register unsigned HOST_WIDE_INT carry;
291 register int i, j, k;
292 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
293
294 encode (arg1, l1, h1);
295 encode (arg2, l2, h2);
296
297 bzero ((char *) prod, sizeof prod);
298
299 for (i = 0; i < 4; i++)
300 {
301 carry = 0;
302 for (j = 0; j < 4; j++)
303 {
304 k = i + j;
305 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
306 carry += arg1[i] * arg2[j];
307 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
308 carry += prod[k];
309 prod[k] = LOWPART (carry);
310 carry = HIGHPART (carry);
311 }
312 prod[i + 4] = carry;
313 }
314
315 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
316
317 /* Check for overflow by calculating the top half of the answer in full;
318 it should agree with the low half's sign bit. */
319 decode (prod+4, &toplow, &tophigh);
320 if (h1 < 0)
321 {
322 neg_double (l2, h2, &neglow, &neghigh);
323 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
324 }
325 if (h2 < 0)
326 {
327 neg_double (l1, h1, &neglow, &neghigh);
328 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
329 }
330 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
331 }
332 \f
333 /* Shift the doubleword integer in L1, H1 left by COUNT places
334 keeping only PREC bits of result.
335 Shift right if COUNT is negative.
336 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
337 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338
339 void
340 lshift_double (l1, h1, count, prec, lv, hv, arith)
341 HOST_WIDE_INT l1, h1, count;
342 int prec;
343 HOST_WIDE_INT *lv, *hv;
344 int arith;
345 {
346 if (count < 0)
347 {
348 rshift_double (l1, h1, - count, prec, lv, hv, arith);
349 return;
350 }
351
352 #ifdef SHIFT_COUNT_TRUNCATED
353 if (SHIFT_COUNT_TRUNCATED)
354 count %= prec;
355 #endif
356
357 if (count >= HOST_BITS_PER_WIDE_INT)
358 {
359 *hv = (unsigned HOST_WIDE_INT) l1 << count - HOST_BITS_PER_WIDE_INT;
360 *lv = 0;
361 }
362 else
363 {
364 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
365 | ((unsigned HOST_WIDE_INT) l1 >> HOST_BITS_PER_WIDE_INT - count - 1 >> 1));
366 *lv = (unsigned HOST_WIDE_INT) l1 << count;
367 }
368 }
369
370 /* Shift the doubleword integer in L1, H1 right by COUNT places
371 keeping only PREC bits of result. COUNT must be positive.
372 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
373 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374
375 void
376 rshift_double (l1, h1, count, prec, lv, hv, arith)
377 HOST_WIDE_INT l1, h1, count;
378 int prec;
379 HOST_WIDE_INT *lv, *hv;
380 int arith;
381 {
382 unsigned HOST_WIDE_INT signmask;
383 signmask = (arith
384 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
385 : 0);
386
387 #ifdef SHIFT_COUNT_TRUNCATED
388 if (SHIFT_COUNT_TRUNCATED)
389 count %= prec;
390 #endif
391
392 if (count >= HOST_BITS_PER_WIDE_INT)
393 {
394 *hv = signmask;
395 *lv = ((signmask << 2 * HOST_BITS_PER_WIDE_INT - count - 1 << 1)
396 | ((unsigned HOST_WIDE_INT) h1 >> count - HOST_BITS_PER_WIDE_INT));
397 }
398 else
399 {
400 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
401 | ((unsigned HOST_WIDE_INT) h1 << HOST_BITS_PER_WIDE_INT - count - 1 << 1));
402 *hv = ((signmask << HOST_BITS_PER_WIDE_INT - count)
403 | ((unsigned HOST_WIDE_INT) h1 >> count));
404 }
405 }
406 \f
407 /* Rotate the doubleword integer in L1, H1 left by COUNT places
408 keeping only PREC bits of result.
409 Rotate right if COUNT is negative.
410 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
411
412 void
413 lrotate_double (l1, h1, count, prec, lv, hv)
414 HOST_WIDE_INT l1, h1, count;
415 int prec;
416 HOST_WIDE_INT *lv, *hv;
417 {
418 HOST_WIDE_INT s1l, s1h, s2l, s2h;
419
420 count %= prec;
421 if (count < 0)
422 count += prec;
423
424 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
425 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
426 *lv = s1l | s2l;
427 *hv = s1h | s2h;
428 }
429
430 /* Rotate the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result. COUNT must be positive.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
433
434 void
435 rrotate_double (l1, h1, count, prec, lv, hv)
436 HOST_WIDE_INT l1, h1, count;
437 int prec;
438 HOST_WIDE_INT *lv, *hv;
439 {
440 HOST_WIDE_INT s1l, s1h, s2l, s2h;
441
442 count %= prec;
443 if (count < 0)
444 count += prec;
445
446 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
447 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
448 *lv = s1l | s2l;
449 *hv = s1h | s2h;
450 }
451 \f
452 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
453 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
454 CODE is a tree code for a kind of division, one of
455 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
456 or EXACT_DIV_EXPR
457 It controls how the quotient is rounded to a integer.
458 Return nonzero if the operation overflows.
459 UNS nonzero says do unsigned division. */
460
461 int
462 div_and_round_double (code, uns,
463 lnum_orig, hnum_orig, lden_orig, hden_orig,
464 lquo, hquo, lrem, hrem)
465 enum tree_code code;
466 int uns;
467 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
468 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
469 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
470 {
471 int quo_neg = 0;
472 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
473 HOST_WIDE_INT den[4], quo[4];
474 register int i, j;
475 unsigned HOST_WIDE_INT work;
476 register unsigned HOST_WIDE_INT carry = 0;
477 HOST_WIDE_INT lnum = lnum_orig;
478 HOST_WIDE_INT hnum = hnum_orig;
479 HOST_WIDE_INT lden = lden_orig;
480 HOST_WIDE_INT hden = hden_orig;
481 int overflow = 0;
482
483 if ((hden == 0) && (lden == 0))
484 overflow = 1, lden = 1;
485
486 /* calculate quotient sign and convert operands to unsigned. */
487 if (!uns)
488 {
489 if (hnum < 0)
490 {
491 quo_neg = ~ quo_neg;
492 /* (minimum integer) / (-1) is the only overflow case. */
493 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
494 overflow = 1;
495 }
496 if (hden < 0)
497 {
498 quo_neg = ~ quo_neg;
499 neg_double (lden, hden, &lden, &hden);
500 }
501 }
502
503 if (hnum == 0 && hden == 0)
504 { /* single precision */
505 *hquo = *hrem = 0;
506 /* This unsigned division rounds toward zero. */
507 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
508 goto finish_up;
509 }
510
511 if (hnum == 0)
512 { /* trivial case: dividend < divisor */
513 /* hden != 0 already checked. */
514 *hquo = *lquo = 0;
515 *hrem = hnum;
516 *lrem = lnum;
517 goto finish_up;
518 }
519
520 bzero ((char *) quo, sizeof quo);
521
522 bzero ((char *) num, sizeof num); /* to zero 9th element */
523 bzero ((char *) den, sizeof den);
524
525 encode (num, lnum, hnum);
526 encode (den, lden, hden);
527
528 /* Special code for when the divisor < BASE. */
529 if (hden == 0 && lden < BASE)
530 {
531 /* hnum != 0 already checked. */
532 for (i = 4 - 1; i >= 0; i--)
533 {
534 work = num[i] + carry * BASE;
535 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
536 carry = work % (unsigned HOST_WIDE_INT) lden;
537 }
538 }
539 else
540 {
541 /* Full double precision division,
542 with thanks to Don Knuth's "Seminumerical Algorithms". */
543 int num_hi_sig, den_hi_sig;
544 unsigned HOST_WIDE_INT quo_est, scale;
545
546 /* Find the highest non-zero divisor digit. */
547 for (i = 4 - 1; ; i--)
548 if (den[i] != 0) {
549 den_hi_sig = i;
550 break;
551 }
552
553 /* Insure that the first digit of the divisor is at least BASE/2.
554 This is required by the quotient digit estimation algorithm. */
555
556 scale = BASE / (den[den_hi_sig] + 1);
557 if (scale > 1) { /* scale divisor and dividend */
558 carry = 0;
559 for (i = 0; i <= 4 - 1; i++) {
560 work = (num[i] * scale) + carry;
561 num[i] = LOWPART (work);
562 carry = HIGHPART (work);
563 } num[4] = carry;
564 carry = 0;
565 for (i = 0; i <= 4 - 1; i++) {
566 work = (den[i] * scale) + carry;
567 den[i] = LOWPART (work);
568 carry = HIGHPART (work);
569 if (den[i] != 0) den_hi_sig = i;
570 }
571 }
572
573 num_hi_sig = 4;
574
575 /* Main loop */
576 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
577 /* guess the next quotient digit, quo_est, by dividing the first
578 two remaining dividend digits by the high order quotient digit.
579 quo_est is never low and is at most 2 high. */
580 unsigned HOST_WIDE_INT tmp;
581
582 num_hi_sig = i + den_hi_sig + 1;
583 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
584 if (num[num_hi_sig] != den[den_hi_sig])
585 quo_est = work / den[den_hi_sig];
586 else
587 quo_est = BASE - 1;
588
589 /* refine quo_est so it's usually correct, and at most one high. */
590 tmp = work - quo_est * den[den_hi_sig];
591 if (tmp < BASE
592 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
593 quo_est--;
594
595 /* Try QUO_EST as the quotient digit, by multiplying the
596 divisor by QUO_EST and subtracting from the remaining dividend.
597 Keep in mind that QUO_EST is the I - 1st digit. */
598
599 carry = 0;
600 for (j = 0; j <= den_hi_sig; j++)
601 {
602 work = quo_est * den[j] + carry;
603 carry = HIGHPART (work);
604 work = num[i + j] - LOWPART (work);
605 num[i + j] = LOWPART (work);
606 carry += HIGHPART (work) != 0;
607 }
608
609 /* if quo_est was high by one, then num[i] went negative and
610 we need to correct things. */
611
612 if (num[num_hi_sig] < carry)
613 {
614 quo_est--;
615 carry = 0; /* add divisor back in */
616 for (j = 0; j <= den_hi_sig; j++)
617 {
618 work = num[i + j] + den[j] + carry;
619 carry = HIGHPART (work);
620 num[i + j] = LOWPART (work);
621 }
622 num [num_hi_sig] += carry;
623 }
624
625 /* store the quotient digit. */
626 quo[i] = quo_est;
627 }
628 }
629
630 decode (quo, lquo, hquo);
631
632 finish_up:
633 /* if result is negative, make it so. */
634 if (quo_neg)
635 neg_double (*lquo, *hquo, lquo, hquo);
636
637 /* compute trial remainder: rem = num - (quo * den) */
638 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
639 neg_double (*lrem, *hrem, lrem, hrem);
640 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
641
642 switch (code)
643 {
644 case TRUNC_DIV_EXPR:
645 case TRUNC_MOD_EXPR: /* round toward zero */
646 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
647 return overflow;
648
649 case FLOOR_DIV_EXPR:
650 case FLOOR_MOD_EXPR: /* round toward negative infinity */
651 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
652 {
653 /* quo = quo - 1; */
654 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
655 lquo, hquo);
656 }
657 else return overflow;
658 break;
659
660 case CEIL_DIV_EXPR:
661 case CEIL_MOD_EXPR: /* round toward positive infinity */
662 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
663 {
664 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
665 lquo, hquo);
666 }
667 else return overflow;
668 break;
669
670 case ROUND_DIV_EXPR:
671 case ROUND_MOD_EXPR: /* round to closest integer */
672 {
673 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
674 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
675
676 /* get absolute values */
677 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
678 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
679
680 /* if (2 * abs (lrem) >= abs (lden)) */
681 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
682 labs_rem, habs_rem, &ltwice, &htwice);
683 if (((unsigned HOST_WIDE_INT) habs_den
684 < (unsigned HOST_WIDE_INT) htwice)
685 || (((unsigned HOST_WIDE_INT) habs_den
686 == (unsigned HOST_WIDE_INT) htwice)
687 && ((HOST_WIDE_INT unsigned) labs_den
688 < (unsigned HOST_WIDE_INT) ltwice)))
689 {
690 if (*hquo < 0)
691 /* quo = quo - 1; */
692 add_double (*lquo, *hquo,
693 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
694 else
695 /* quo = quo + 1; */
696 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
697 lquo, hquo);
698 }
699 else return overflow;
700 }
701 break;
702
703 default:
704 abort ();
705 }
706
707 /* compute true remainder: rem = num - (quo * den) */
708 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
709 neg_double (*lrem, *hrem, lrem, hrem);
710 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
711 return overflow;
712 }
713 \f
714 #ifndef REAL_ARITHMETIC
715 /* Effectively truncate a real value to represent the nearest possible value
716 in a narrower mode. The result is actually represented in the same data
717 type as the argument, but its value is usually different.
718
719 A trap may occur during the FP operations and it is the responsibility
720 of the calling function to have a handler established. */
721
722 REAL_VALUE_TYPE
723 real_value_truncate (mode, arg)
724 enum machine_mode mode;
725 REAL_VALUE_TYPE arg;
726 {
727 return REAL_VALUE_TRUNCATE (mode, arg);
728 }
729
730 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
731
732 /* Check for infinity in an IEEE double precision number. */
733
734 int
735 target_isinf (x)
736 REAL_VALUE_TYPE x;
737 {
738 /* The IEEE 64-bit double format. */
739 union {
740 REAL_VALUE_TYPE d;
741 struct {
742 unsigned sign : 1;
743 unsigned exponent : 11;
744 unsigned mantissa1 : 20;
745 unsigned mantissa2;
746 } little_endian;
747 struct {
748 unsigned mantissa2;
749 unsigned mantissa1 : 20;
750 unsigned exponent : 11;
751 unsigned sign : 1;
752 } big_endian;
753 } u;
754
755 u.d = dconstm1;
756 if (u.big_endian.sign == 1)
757 {
758 u.d = x;
759 return (u.big_endian.exponent == 2047
760 && u.big_endian.mantissa1 == 0
761 && u.big_endian.mantissa2 == 0);
762 }
763 else
764 {
765 u.d = x;
766 return (u.little_endian.exponent == 2047
767 && u.little_endian.mantissa1 == 0
768 && u.little_endian.mantissa2 == 0);
769 }
770 }
771
772 /* Check whether an IEEE double precision number is a NaN. */
773
774 int
775 target_isnan (x)
776 REAL_VALUE_TYPE x;
777 {
778 /* The IEEE 64-bit double format. */
779 union {
780 REAL_VALUE_TYPE d;
781 struct {
782 unsigned sign : 1;
783 unsigned exponent : 11;
784 unsigned mantissa1 : 20;
785 unsigned mantissa2;
786 } little_endian;
787 struct {
788 unsigned mantissa2;
789 unsigned mantissa1 : 20;
790 unsigned exponent : 11;
791 unsigned sign : 1;
792 } big_endian;
793 } u;
794
795 u.d = dconstm1;
796 if (u.big_endian.sign == 1)
797 {
798 u.d = x;
799 return (u.big_endian.exponent == 2047
800 && (u.big_endian.mantissa1 != 0
801 || u.big_endian.mantissa2 != 0));
802 }
803 else
804 {
805 u.d = x;
806 return (u.little_endian.exponent == 2047
807 && (u.little_endian.mantissa1 != 0
808 || u.little_endian.mantissa2 != 0));
809 }
810 }
811
812 /* Check for a negative IEEE double precision number. */
813
814 int
815 target_negative (x)
816 REAL_VALUE_TYPE x;
817 {
818 /* The IEEE 64-bit double format. */
819 union {
820 REAL_VALUE_TYPE d;
821 struct {
822 unsigned sign : 1;
823 unsigned exponent : 11;
824 unsigned mantissa1 : 20;
825 unsigned mantissa2;
826 } little_endian;
827 struct {
828 unsigned mantissa2;
829 unsigned mantissa1 : 20;
830 unsigned exponent : 11;
831 unsigned sign : 1;
832 } big_endian;
833 } u;
834
835 u.d = dconstm1;
836 if (u.big_endian.sign == 1)
837 {
838 u.d = x;
839 return u.big_endian.sign;
840 }
841 else
842 {
843 u.d = x;
844 return u.little_endian.sign;
845 }
846 }
847 #else /* Target not IEEE */
848
849 /* Let's assume other float formats don't have infinity.
850 (This can be overridden by redefining REAL_VALUE_ISINF.) */
851
852 target_isinf (x)
853 REAL_VALUE_TYPE x;
854 {
855 return 0;
856 }
857
858 /* Let's assume other float formats don't have NaNs.
859 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
860
861 target_isnan (x)
862 REAL_VALUE_TYPE x;
863 {
864 return 0;
865 }
866
867 /* Let's assume other float formats don't have minus zero.
868 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
869
870 target_negative (x)
871 REAL_VALUE_TYPE x;
872 {
873 return x < 0;
874 }
875 #endif /* Target not IEEE */
876
877 /* Try to change R into its exact multiplicative inverse in machine mode
878 MODE. Return nonzero function value if successful. */
879
880 int
881 exact_real_inverse (mode, r)
882 enum machine_mode mode;
883 REAL_VALUE_TYPE *r;
884 {
885 union
886 {
887 double d;
888 unsigned short i[4];
889 }x, t, y;
890 int i;
891
892 /* Usually disable if bounds checks are not reliable. */
893 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
894 return 0;
895
896 /* Set array index to the less significant bits in the unions, depending
897 on the endian-ness of the host doubles.
898 Disable if insufficient information on the data structure. */
899 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
900 return 0;
901 #else
902 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
903 #define K 2
904 #else
905 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
906 #define K 2
907 #else
908 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
909 #endif
910 #endif
911 #endif
912
913 if (setjmp (float_error))
914 {
915 /* Don't do the optimization if there was an arithmetic error. */
916 fail:
917 set_float_handler (NULL_PTR);
918 return 0;
919 }
920 set_float_handler (float_error);
921
922 /* Domain check the argument. */
923 x.d = *r;
924 if (x.d == 0.0)
925 goto fail;
926
927 #ifdef REAL_INFINITY
928 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
929 goto fail;
930 #endif
931
932 /* Compute the reciprocal and check for numerical exactness.
933 It is unnecessary to check all the significand bits to determine
934 whether X is a power of 2. If X is not, then it is impossible for
935 the bottom half significand of both X and 1/X to be all zero bits.
936 Hence we ignore the data structure of the top half and examine only
937 the low order bits of the two significands. */
938 t.d = 1.0 / x.d;
939 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
940 goto fail;
941
942 /* Truncate to the required mode and range-check the result. */
943 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
944 #ifdef CHECK_FLOAT_VALUE
945 i = 0;
946 if (CHECK_FLOAT_VALUE (mode, y.d, i))
947 goto fail;
948 #endif
949
950 /* Fail if truncation changed the value. */
951 if (y.d != t.d || y.d == 0.0)
952 goto fail;
953
954 #ifdef REAL_INFINITY
955 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
956 goto fail;
957 #endif
958
959 /* Output the reciprocal and return success flag. */
960 set_float_handler (NULL_PTR);
961 *r = y.d;
962 return 1;
963 }
964 #endif /* no REAL_ARITHMETIC */
965 \f
966 /* Split a tree IN into a constant and a variable part
967 that could be combined with CODE to make IN.
968 CODE must be a commutative arithmetic operation.
969 Store the constant part into *CONP and the variable in &VARP.
970 Return 1 if this was done; zero means the tree IN did not decompose
971 this way.
972
973 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
974 Therefore, we must tell the caller whether the variable part
975 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
976 The value stored is the coefficient for the variable term.
977 The constant term we return should always be added;
978 we negate it if necessary. */
979
980 static int
981 split_tree (in, code, varp, conp, varsignp)
982 tree in;
983 enum tree_code code;
984 tree *varp, *conp;
985 int *varsignp;
986 {
987 register tree outtype = TREE_TYPE (in);
988 *varp = 0;
989 *conp = 0;
990
991 /* Strip any conversions that don't change the machine mode. */
992 while ((TREE_CODE (in) == NOP_EXPR
993 || TREE_CODE (in) == CONVERT_EXPR)
994 && (TYPE_MODE (TREE_TYPE (in))
995 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
996 in = TREE_OPERAND (in, 0);
997
998 if (TREE_CODE (in) == code
999 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1000 /* We can associate addition and subtraction together
1001 (even though the C standard doesn't say so)
1002 for integers because the value is not affected.
1003 For reals, the value might be affected, so we can't. */
1004 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1005 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1006 {
1007 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
1008 if (code == INTEGER_CST)
1009 {
1010 *conp = TREE_OPERAND (in, 0);
1011 *varp = TREE_OPERAND (in, 1);
1012 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1013 && TREE_TYPE (*varp) != outtype)
1014 *varp = convert (outtype, *varp);
1015 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1016 return 1;
1017 }
1018 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
1019 {
1020 *conp = TREE_OPERAND (in, 1);
1021 *varp = TREE_OPERAND (in, 0);
1022 *varsignp = 1;
1023 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1024 && TREE_TYPE (*varp) != outtype)
1025 *varp = convert (outtype, *varp);
1026 if (TREE_CODE (in) == MINUS_EXPR)
1027 {
1028 /* If operation is subtraction and constant is second,
1029 must negate it to get an additive constant.
1030 And this cannot be done unless it is a manifest constant.
1031 It could also be the address of a static variable.
1032 We cannot negate that, so give up. */
1033 if (TREE_CODE (*conp) == INTEGER_CST)
1034 /* Subtracting from integer_zero_node loses for long long. */
1035 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
1036 else
1037 return 0;
1038 }
1039 return 1;
1040 }
1041 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
1042 {
1043 *conp = TREE_OPERAND (in, 0);
1044 *varp = TREE_OPERAND (in, 1);
1045 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1046 && TREE_TYPE (*varp) != outtype)
1047 *varp = convert (outtype, *varp);
1048 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1049 return 1;
1050 }
1051 }
1052 return 0;
1053 }
1054 \f
1055 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1056 to produce a new constant.
1057
1058 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1059 If FORSIZE is nonzero, compute overflow for unsigned types. */
1060
1061 static tree
1062 int_const_binop (code, arg1, arg2, notrunc, forsize)
1063 enum tree_code code;
1064 register tree arg1, arg2;
1065 int notrunc, forsize;
1066 {
1067 HOST_WIDE_INT int1l, int1h, int2l, int2h;
1068 HOST_WIDE_INT low, hi;
1069 HOST_WIDE_INT garbagel, garbageh;
1070 register tree t;
1071 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1072 int overflow = 0;
1073 int no_overflow = 0;
1074
1075 int1l = TREE_INT_CST_LOW (arg1);
1076 int1h = TREE_INT_CST_HIGH (arg1);
1077 int2l = TREE_INT_CST_LOW (arg2);
1078 int2h = TREE_INT_CST_HIGH (arg2);
1079
1080 switch (code)
1081 {
1082 case BIT_IOR_EXPR:
1083 low = int1l | int2l, hi = int1h | int2h;
1084 break;
1085
1086 case BIT_XOR_EXPR:
1087 low = int1l ^ int2l, hi = int1h ^ int2h;
1088 break;
1089
1090 case BIT_AND_EXPR:
1091 low = int1l & int2l, hi = int1h & int2h;
1092 break;
1093
1094 case BIT_ANDTC_EXPR:
1095 low = int1l & ~int2l, hi = int1h & ~int2h;
1096 break;
1097
1098 case RSHIFT_EXPR:
1099 int2l = - int2l;
1100 case LSHIFT_EXPR:
1101 /* It's unclear from the C standard whether shifts can overflow.
1102 The following code ignores overflow; perhaps a C standard
1103 interpretation ruling is needed. */
1104 lshift_double (int1l, int1h, int2l,
1105 TYPE_PRECISION (TREE_TYPE (arg1)),
1106 &low, &hi,
1107 !uns);
1108 no_overflow = 1;
1109 break;
1110
1111 case RROTATE_EXPR:
1112 int2l = - int2l;
1113 case LROTATE_EXPR:
1114 lrotate_double (int1l, int1h, int2l,
1115 TYPE_PRECISION (TREE_TYPE (arg1)),
1116 &low, &hi);
1117 break;
1118
1119 case PLUS_EXPR:
1120 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1121 break;
1122
1123 case MINUS_EXPR:
1124 neg_double (int2l, int2h, &low, &hi);
1125 add_double (int1l, int1h, low, hi, &low, &hi);
1126 overflow = overflow_sum_sign (hi, int2h, int1h);
1127 break;
1128
1129 case MULT_EXPR:
1130 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1131 break;
1132
1133 case TRUNC_DIV_EXPR:
1134 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1135 case EXACT_DIV_EXPR:
1136 /* This is a shortcut for a common special case. */
1137 if (int2h == 0 && int2l > 0
1138 && ! TREE_CONSTANT_OVERFLOW (arg1)
1139 && ! TREE_CONSTANT_OVERFLOW (arg2)
1140 && int1h == 0 && int1l >= 0)
1141 {
1142 if (code == CEIL_DIV_EXPR)
1143 int1l += int2l - 1;
1144 low = int1l / int2l, hi = 0;
1145 break;
1146 }
1147
1148 /* ... fall through ... */
1149
1150 case ROUND_DIV_EXPR:
1151 if (int2h == 0 && int2l == 1)
1152 {
1153 low = int1l, hi = int1h;
1154 break;
1155 }
1156 if (int1l == int2l && int1h == int2h
1157 && ! (int1l == 0 && int1h == 0))
1158 {
1159 low = 1, hi = 0;
1160 break;
1161 }
1162 overflow = div_and_round_double (code, uns,
1163 int1l, int1h, int2l, int2h,
1164 &low, &hi, &garbagel, &garbageh);
1165 break;
1166
1167 case TRUNC_MOD_EXPR:
1168 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1169 /* This is a shortcut for a common special case. */
1170 if (int2h == 0 && int2l > 0
1171 && ! TREE_CONSTANT_OVERFLOW (arg1)
1172 && ! TREE_CONSTANT_OVERFLOW (arg2)
1173 && int1h == 0 && int1l >= 0)
1174 {
1175 if (code == CEIL_MOD_EXPR)
1176 int1l += int2l - 1;
1177 low = int1l % int2l, hi = 0;
1178 break;
1179 }
1180
1181 /* ... fall through ... */
1182
1183 case ROUND_MOD_EXPR:
1184 overflow = div_and_round_double (code, uns,
1185 int1l, int1h, int2l, int2h,
1186 &garbagel, &garbageh, &low, &hi);
1187 break;
1188
1189 case MIN_EXPR:
1190 case MAX_EXPR:
1191 if (uns)
1192 {
1193 low = (((unsigned HOST_WIDE_INT) int1h
1194 < (unsigned HOST_WIDE_INT) int2h)
1195 || (((unsigned HOST_WIDE_INT) int1h
1196 == (unsigned HOST_WIDE_INT) int2h)
1197 && ((unsigned HOST_WIDE_INT) int1l
1198 < (unsigned HOST_WIDE_INT) int2l)));
1199 }
1200 else
1201 {
1202 low = ((int1h < int2h)
1203 || ((int1h == int2h)
1204 && ((unsigned HOST_WIDE_INT) int1l
1205 < (unsigned HOST_WIDE_INT) int2l)));
1206 }
1207 if (low == (code == MIN_EXPR))
1208 low = int1l, hi = int1h;
1209 else
1210 low = int2l, hi = int2h;
1211 break;
1212
1213 default:
1214 abort ();
1215 }
1216
1217 if (TREE_TYPE (arg1) == sizetype && hi == 0
1218 && low >= 0 && low <= TREE_INT_CST_LOW (TYPE_MAX_VALUE (sizetype))
1219 && ! overflow
1220 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1221 t = size_int (low);
1222 else
1223 {
1224 t = build_int_2 (low, hi);
1225 TREE_TYPE (t) = TREE_TYPE (arg1);
1226 }
1227
1228 TREE_OVERFLOW (t)
1229 = ((notrunc ? (!uns || forsize) && overflow
1230 : force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1231 | TREE_OVERFLOW (arg1)
1232 | TREE_OVERFLOW (arg2));
1233 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1234 So check if force_fit_type truncated the value. */
1235 if (forsize
1236 && ! TREE_OVERFLOW (t)
1237 && (TREE_INT_CST_HIGH (t) != hi
1238 || TREE_INT_CST_LOW (t) != low))
1239 TREE_OVERFLOW (t) = 1;
1240 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1241 | TREE_CONSTANT_OVERFLOW (arg1)
1242 | TREE_CONSTANT_OVERFLOW (arg2));
1243 return t;
1244 }
1245
1246 /* Combine two constants ARG1 and ARG2 under operation CODE
1247 to produce a new constant.
1248 We assume ARG1 and ARG2 have the same data type,
1249 or at least are the same kind of constant and the same machine mode.
1250
1251 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1252
1253 static tree
1254 const_binop (code, arg1, arg2, notrunc)
1255 enum tree_code code;
1256 register tree arg1, arg2;
1257 int notrunc;
1258 {
1259 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1260
1261 if (TREE_CODE (arg1) == INTEGER_CST)
1262 return int_const_binop (code, arg1, arg2, notrunc, 0);
1263
1264 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1265 if (TREE_CODE (arg1) == REAL_CST)
1266 {
1267 REAL_VALUE_TYPE d1;
1268 REAL_VALUE_TYPE d2;
1269 int overflow = 0;
1270 REAL_VALUE_TYPE value;
1271 tree t;
1272
1273 d1 = TREE_REAL_CST (arg1);
1274 d2 = TREE_REAL_CST (arg2);
1275
1276 /* If either operand is a NaN, just return it. Otherwise, set up
1277 for floating-point trap; we return an overflow. */
1278 if (REAL_VALUE_ISNAN (d1))
1279 return arg1;
1280 else if (REAL_VALUE_ISNAN (d2))
1281 return arg2;
1282 else if (setjmp (float_error))
1283 {
1284 t = copy_node (arg1);
1285 overflow = 1;
1286 goto got_float;
1287 }
1288
1289 set_float_handler (float_error);
1290
1291 #ifdef REAL_ARITHMETIC
1292 REAL_ARITHMETIC (value, code, d1, d2);
1293 #else
1294 switch (code)
1295 {
1296 case PLUS_EXPR:
1297 value = d1 + d2;
1298 break;
1299
1300 case MINUS_EXPR:
1301 value = d1 - d2;
1302 break;
1303
1304 case MULT_EXPR:
1305 value = d1 * d2;
1306 break;
1307
1308 case RDIV_EXPR:
1309 #ifndef REAL_INFINITY
1310 if (d2 == 0)
1311 abort ();
1312 #endif
1313
1314 value = d1 / d2;
1315 break;
1316
1317 case MIN_EXPR:
1318 value = MIN (d1, d2);
1319 break;
1320
1321 case MAX_EXPR:
1322 value = MAX (d1, d2);
1323 break;
1324
1325 default:
1326 abort ();
1327 }
1328 #endif /* no REAL_ARITHMETIC */
1329 t = build_real (TREE_TYPE (arg1),
1330 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1331 got_float:
1332 set_float_handler (NULL_PTR);
1333
1334 TREE_OVERFLOW (t)
1335 = (force_fit_type (t, overflow)
1336 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1337 TREE_CONSTANT_OVERFLOW (t)
1338 = TREE_OVERFLOW (t)
1339 | TREE_CONSTANT_OVERFLOW (arg1)
1340 | TREE_CONSTANT_OVERFLOW (arg2);
1341 return t;
1342 }
1343 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1344 if (TREE_CODE (arg1) == COMPLEX_CST)
1345 {
1346 register tree type = TREE_TYPE (arg1);
1347 register tree r1 = TREE_REALPART (arg1);
1348 register tree i1 = TREE_IMAGPART (arg1);
1349 register tree r2 = TREE_REALPART (arg2);
1350 register tree i2 = TREE_IMAGPART (arg2);
1351 register tree t;
1352
1353 switch (code)
1354 {
1355 case PLUS_EXPR:
1356 t = build_complex (type,
1357 const_binop (PLUS_EXPR, r1, r2, notrunc),
1358 const_binop (PLUS_EXPR, i1, i2, notrunc));
1359 break;
1360
1361 case MINUS_EXPR:
1362 t = build_complex (type,
1363 const_binop (MINUS_EXPR, r1, r2, notrunc),
1364 const_binop (MINUS_EXPR, i1, i2, notrunc));
1365 break;
1366
1367 case MULT_EXPR:
1368 t = build_complex (type,
1369 const_binop (MINUS_EXPR,
1370 const_binop (MULT_EXPR,
1371 r1, r2, notrunc),
1372 const_binop (MULT_EXPR,
1373 i1, i2, notrunc),
1374 notrunc),
1375 const_binop (PLUS_EXPR,
1376 const_binop (MULT_EXPR,
1377 r1, i2, notrunc),
1378 const_binop (MULT_EXPR,
1379 i1, r2, notrunc),
1380 notrunc));
1381 break;
1382
1383 case RDIV_EXPR:
1384 {
1385 register tree magsquared
1386 = const_binop (PLUS_EXPR,
1387 const_binop (MULT_EXPR, r2, r2, notrunc),
1388 const_binop (MULT_EXPR, i2, i2, notrunc),
1389 notrunc);
1390
1391 t = build_complex (type,
1392 const_binop
1393 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1394 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1395 const_binop (PLUS_EXPR,
1396 const_binop (MULT_EXPR, r1, r2,
1397 notrunc),
1398 const_binop (MULT_EXPR, i1, i2,
1399 notrunc),
1400 notrunc),
1401 magsquared, notrunc),
1402 const_binop
1403 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1404 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1405 const_binop (MINUS_EXPR,
1406 const_binop (MULT_EXPR, i1, r2,
1407 notrunc),
1408 const_binop (MULT_EXPR, r1, i2,
1409 notrunc),
1410 notrunc),
1411 magsquared, notrunc));
1412 }
1413 break;
1414
1415 default:
1416 abort ();
1417 }
1418 return t;
1419 }
1420 return 0;
1421 }
1422 \f
1423 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1424
1425 tree
1426 size_int (number)
1427 unsigned HOST_WIDE_INT number;
1428 {
1429 register tree t;
1430 /* Type-size nodes already made for small sizes. */
1431 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1];
1432
1433 if (number < 2*HOST_BITS_PER_WIDE_INT + 1
1434 && size_table[number] != 0)
1435 return size_table[number];
1436 if (number < 2*HOST_BITS_PER_WIDE_INT + 1)
1437 {
1438 push_obstacks_nochange ();
1439 /* Make this a permanent node. */
1440 end_temporary_allocation ();
1441 t = build_int_2 (number, 0);
1442 TREE_TYPE (t) = sizetype;
1443 size_table[number] = t;
1444 pop_obstacks ();
1445 }
1446 else
1447 {
1448 t = build_int_2 (number, 0);
1449 TREE_TYPE (t) = sizetype;
1450 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1451 }
1452 return t;
1453 }
1454
1455 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1456 CODE is a tree code. Data type is taken from `sizetype',
1457 If the operands are constant, so is the result. */
1458
1459 tree
1460 size_binop (code, arg0, arg1)
1461 enum tree_code code;
1462 tree arg0, arg1;
1463 {
1464 /* Handle the special case of two integer constants faster. */
1465 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1466 {
1467 /* And some specific cases even faster than that. */
1468 if (code == PLUS_EXPR && integer_zerop (arg0))
1469 return arg1;
1470 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1471 && integer_zerop (arg1))
1472 return arg0;
1473 else if (code == MULT_EXPR && integer_onep (arg0))
1474 return arg1;
1475
1476 /* Handle general case of two integer constants. */
1477 return int_const_binop (code, arg0, arg1, 0, 1);
1478 }
1479
1480 if (arg0 == error_mark_node || arg1 == error_mark_node)
1481 return error_mark_node;
1482
1483 return fold (build (code, sizetype, arg0, arg1));
1484 }
1485 \f
1486 /* Given T, a tree representing type conversion of ARG1, a constant,
1487 return a constant tree representing the result of conversion. */
1488
1489 static tree
1490 fold_convert (t, arg1)
1491 register tree t;
1492 register tree arg1;
1493 {
1494 register tree type = TREE_TYPE (t);
1495 int overflow = 0;
1496
1497 if (TREE_CODE (type) == POINTER_TYPE || INTEGRAL_TYPE_P (type))
1498 {
1499 if (TREE_CODE (arg1) == INTEGER_CST)
1500 {
1501 /* If we would build a constant wider than GCC supports,
1502 leave the conversion unfolded. */
1503 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1504 return t;
1505
1506 /* Given an integer constant, make new constant with new type,
1507 appropriately sign-extended or truncated. */
1508 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1509 TREE_INT_CST_HIGH (arg1));
1510 TREE_TYPE (t) = type;
1511 /* Indicate an overflow if (1) ARG1 already overflowed,
1512 or (2) force_fit_type indicates an overflow.
1513 Tell force_fit_type that an overflow has already occurred
1514 if ARG1 is a too-large unsigned value and T is signed. */
1515 TREE_OVERFLOW (t)
1516 = (TREE_OVERFLOW (arg1)
1517 | force_fit_type (t,
1518 (TREE_INT_CST_HIGH (arg1) < 0
1519 & (TREE_UNSIGNED (type)
1520 < TREE_UNSIGNED (TREE_TYPE (arg1))))));
1521 TREE_CONSTANT_OVERFLOW (t)
1522 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1523 }
1524 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1525 else if (TREE_CODE (arg1) == REAL_CST)
1526 {
1527 /* Don't initialize these, use assignments.
1528 Initialized local aggregates don't work on old compilers. */
1529 REAL_VALUE_TYPE x;
1530 REAL_VALUE_TYPE l;
1531 REAL_VALUE_TYPE u;
1532 tree type1 = TREE_TYPE (arg1);
1533
1534 x = TREE_REAL_CST (arg1);
1535 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1536 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1537 /* See if X will be in range after truncation towards 0.
1538 To compensate for truncation, move the bounds away from 0,
1539 but reject if X exactly equals the adjusted bounds. */
1540 #ifdef REAL_ARITHMETIC
1541 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1542 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1543 #else
1544 l--;
1545 u++;
1546 #endif
1547 /* If X is a NaN, use zero instead and show we have an overflow.
1548 Otherwise, range check. */
1549 if (REAL_VALUE_ISNAN (x))
1550 overflow = 1, x = dconst0;
1551 else if (! (REAL_VALUES_LESS (l, x) && REAL_VALUES_LESS (x, u)))
1552 overflow = 1;
1553
1554 #ifndef REAL_ARITHMETIC
1555 {
1556 HOST_WIDE_INT low, high;
1557 HOST_WIDE_INT half_word
1558 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1559
1560 if (x < 0)
1561 x = -x;
1562
1563 high = (HOST_WIDE_INT) (x / half_word / half_word);
1564 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1565 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1566 {
1567 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1568 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1569 }
1570 else
1571 low = (HOST_WIDE_INT) x;
1572 if (TREE_REAL_CST (arg1) < 0)
1573 neg_double (low, high, &low, &high);
1574 t = build_int_2 (low, high);
1575 }
1576 #else
1577 {
1578 HOST_WIDE_INT low, high;
1579 REAL_VALUE_TO_INT (&low, &high, x);
1580 t = build_int_2 (low, high);
1581 }
1582 #endif
1583 TREE_TYPE (t) = type;
1584 TREE_OVERFLOW (t)
1585 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1586 TREE_CONSTANT_OVERFLOW (t)
1587 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1588 }
1589 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1590 TREE_TYPE (t) = type;
1591 }
1592 else if (TREE_CODE (type) == REAL_TYPE)
1593 {
1594 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1595 if (TREE_CODE (arg1) == INTEGER_CST)
1596 return build_real_from_int_cst (type, arg1);
1597 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1598 if (TREE_CODE (arg1) == REAL_CST)
1599 {
1600 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1601 {
1602 t = arg1;
1603 TREE_TYPE (arg1) = type;
1604 return t;
1605 }
1606 else if (setjmp (float_error))
1607 {
1608 overflow = 1;
1609 t = copy_node (arg1);
1610 goto got_it;
1611 }
1612 set_float_handler (float_error);
1613
1614 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1615 TREE_REAL_CST (arg1)));
1616 set_float_handler (NULL_PTR);
1617
1618 got_it:
1619 TREE_OVERFLOW (t)
1620 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1621 TREE_CONSTANT_OVERFLOW (t)
1622 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1623 return t;
1624 }
1625 }
1626 TREE_CONSTANT (t) = 1;
1627 return t;
1628 }
1629 \f
1630 /* Return an expr equal to X but certainly not valid as an lvalue.
1631 Also make sure it is not valid as an null pointer constant. */
1632
1633 tree
1634 non_lvalue (x)
1635 tree x;
1636 {
1637 tree result;
1638
1639 /* These things are certainly not lvalues. */
1640 if (TREE_CODE (x) == NON_LVALUE_EXPR
1641 || TREE_CODE (x) == INTEGER_CST
1642 || TREE_CODE (x) == REAL_CST
1643 || TREE_CODE (x) == STRING_CST
1644 || TREE_CODE (x) == ADDR_EXPR)
1645 {
1646 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1647 {
1648 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1649 so convert_for_assignment won't strip it.
1650 This is so this 0 won't be treated as a null pointer constant. */
1651 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1652 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1653 return result;
1654 }
1655 return x;
1656 }
1657
1658 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1659 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1660 return result;
1661 }
1662
1663 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1664 Zero means allow extended lvalues. */
1665
1666 int pedantic_lvalues;
1667
1668 /* When pedantic, return an expr equal to X but certainly not valid as a
1669 pedantic lvalue. Otherwise, return X. */
1670
1671 tree
1672 pedantic_non_lvalue (x)
1673 tree x;
1674 {
1675 if (pedantic_lvalues)
1676 return non_lvalue (x);
1677 else
1678 return x;
1679 }
1680 \f
1681 /* Given a tree comparison code, return the code that is the logical inverse
1682 of the given code. It is not safe to do this for floating-point
1683 comparisons, except for NE_EXPR and EQ_EXPR. */
1684
1685 static enum tree_code
1686 invert_tree_comparison (code)
1687 enum tree_code code;
1688 {
1689 switch (code)
1690 {
1691 case EQ_EXPR:
1692 return NE_EXPR;
1693 case NE_EXPR:
1694 return EQ_EXPR;
1695 case GT_EXPR:
1696 return LE_EXPR;
1697 case GE_EXPR:
1698 return LT_EXPR;
1699 case LT_EXPR:
1700 return GE_EXPR;
1701 case LE_EXPR:
1702 return GT_EXPR;
1703 default:
1704 abort ();
1705 }
1706 }
1707
1708 /* Similar, but return the comparison that results if the operands are
1709 swapped. This is safe for floating-point. */
1710
1711 static enum tree_code
1712 swap_tree_comparison (code)
1713 enum tree_code code;
1714 {
1715 switch (code)
1716 {
1717 case EQ_EXPR:
1718 case NE_EXPR:
1719 return code;
1720 case GT_EXPR:
1721 return LT_EXPR;
1722 case GE_EXPR:
1723 return LE_EXPR;
1724 case LT_EXPR:
1725 return GT_EXPR;
1726 case LE_EXPR:
1727 return GE_EXPR;
1728 default:
1729 abort ();
1730 }
1731 }
1732
1733 /* Return nonzero if CODE is a tree code that represents a truth value. */
1734
1735 static int
1736 truth_value_p (code)
1737 enum tree_code code;
1738 {
1739 return (TREE_CODE_CLASS (code) == '<'
1740 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1741 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1742 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1743 }
1744 \f
1745 /* Return nonzero if two operands are necessarily equal.
1746 If ONLY_CONST is non-zero, only return non-zero for constants.
1747 This function tests whether the operands are indistinguishable;
1748 it does not test whether they are equal using C's == operation.
1749 The distinction is important for IEEE floating point, because
1750 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1751 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1752
1753 int
1754 operand_equal_p (arg0, arg1, only_const)
1755 tree arg0, arg1;
1756 int only_const;
1757 {
1758 /* If both types don't have the same signedness, then we can't consider
1759 them equal. We must check this before the STRIP_NOPS calls
1760 because they may change the signedness of the arguments. */
1761 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1762 return 0;
1763
1764 STRIP_NOPS (arg0);
1765 STRIP_NOPS (arg1);
1766
1767 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1768 /* This is needed for conversions and for COMPONENT_REF.
1769 Might as well play it safe and always test this. */
1770 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1771 return 0;
1772
1773 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1774 We don't care about side effects in that case because the SAVE_EXPR
1775 takes care of that for us. In all other cases, two expressions are
1776 equal if they have no side effects. If we have two identical
1777 expressions with side effects that should be treated the same due
1778 to the only side effects being identical SAVE_EXPR's, that will
1779 be detected in the recursive calls below. */
1780 if (arg0 == arg1 && ! only_const
1781 && (TREE_CODE (arg0) == SAVE_EXPR
1782 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1783 return 1;
1784
1785 /* Next handle constant cases, those for which we can return 1 even
1786 if ONLY_CONST is set. */
1787 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1788 switch (TREE_CODE (arg0))
1789 {
1790 case INTEGER_CST:
1791 return (! TREE_CONSTANT_OVERFLOW (arg0)
1792 && ! TREE_CONSTANT_OVERFLOW (arg1)
1793 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1794 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1));
1795
1796 case REAL_CST:
1797 return (! TREE_CONSTANT_OVERFLOW (arg0)
1798 && ! TREE_CONSTANT_OVERFLOW (arg1)
1799 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1800 TREE_REAL_CST (arg1)));
1801
1802 case COMPLEX_CST:
1803 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1804 only_const)
1805 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1806 only_const));
1807
1808 case STRING_CST:
1809 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1810 && ! strncmp (TREE_STRING_POINTER (arg0),
1811 TREE_STRING_POINTER (arg1),
1812 TREE_STRING_LENGTH (arg0)));
1813
1814 case ADDR_EXPR:
1815 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1816 0);
1817 default:
1818 break;
1819 }
1820
1821 if (only_const)
1822 return 0;
1823
1824 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1825 {
1826 case '1':
1827 /* Two conversions are equal only if signedness and modes match. */
1828 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1829 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1830 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1831 return 0;
1832
1833 return operand_equal_p (TREE_OPERAND (arg0, 0),
1834 TREE_OPERAND (arg1, 0), 0);
1835
1836 case '<':
1837 case '2':
1838 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1839 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1840 0))
1841 return 1;
1842
1843 /* For commutative ops, allow the other order. */
1844 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1845 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1846 || TREE_CODE (arg0) == BIT_IOR_EXPR
1847 || TREE_CODE (arg0) == BIT_XOR_EXPR
1848 || TREE_CODE (arg0) == BIT_AND_EXPR
1849 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1850 && operand_equal_p (TREE_OPERAND (arg0, 0),
1851 TREE_OPERAND (arg1, 1), 0)
1852 && operand_equal_p (TREE_OPERAND (arg0, 1),
1853 TREE_OPERAND (arg1, 0), 0));
1854
1855 case 'r':
1856 switch (TREE_CODE (arg0))
1857 {
1858 case INDIRECT_REF:
1859 return operand_equal_p (TREE_OPERAND (arg0, 0),
1860 TREE_OPERAND (arg1, 0), 0);
1861
1862 case COMPONENT_REF:
1863 case ARRAY_REF:
1864 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1865 TREE_OPERAND (arg1, 0), 0)
1866 && operand_equal_p (TREE_OPERAND (arg0, 1),
1867 TREE_OPERAND (arg1, 1), 0));
1868
1869 case BIT_FIELD_REF:
1870 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1871 TREE_OPERAND (arg1, 0), 0)
1872 && operand_equal_p (TREE_OPERAND (arg0, 1),
1873 TREE_OPERAND (arg1, 1), 0)
1874 && operand_equal_p (TREE_OPERAND (arg0, 2),
1875 TREE_OPERAND (arg1, 2), 0));
1876 default:
1877 return 0;
1878 }
1879
1880 default:
1881 return 0;
1882 }
1883 }
1884 \f
1885 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1886 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1887
1888 When in doubt, return 0. */
1889
1890 static int
1891 operand_equal_for_comparison_p (arg0, arg1, other)
1892 tree arg0, arg1;
1893 tree other;
1894 {
1895 int unsignedp1, unsignedpo;
1896 tree primarg1, primother;
1897 unsigned correct_width;
1898
1899 if (operand_equal_p (arg0, arg1, 0))
1900 return 1;
1901
1902 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1903 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1904 return 0;
1905
1906 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1907 actual comparison operand, ARG0.
1908
1909 First throw away any conversions to wider types
1910 already present in the operands. */
1911
1912 primarg1 = get_narrower (arg1, &unsignedp1);
1913 primother = get_narrower (other, &unsignedpo);
1914
1915 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1916 if (unsignedp1 == unsignedpo
1917 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1918 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1919 {
1920 tree type = TREE_TYPE (arg0);
1921
1922 /* Make sure shorter operand is extended the right way
1923 to match the longer operand. */
1924 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1925 TREE_TYPE (primarg1)),
1926 primarg1);
1927
1928 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1929 return 1;
1930 }
1931
1932 return 0;
1933 }
1934 \f
1935 /* See if ARG is an expression that is either a comparison or is performing
1936 arithmetic on comparisons. The comparisons must only be comparing
1937 two different values, which will be stored in *CVAL1 and *CVAL2; if
1938 they are non-zero it means that some operands have already been found.
1939 No variables may be used anywhere else in the expression except in the
1940 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1941 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1942
1943 If this is true, return 1. Otherwise, return zero. */
1944
1945 static int
1946 twoval_comparison_p (arg, cval1, cval2, save_p)
1947 tree arg;
1948 tree *cval1, *cval2;
1949 int *save_p;
1950 {
1951 enum tree_code code = TREE_CODE (arg);
1952 char class = TREE_CODE_CLASS (code);
1953
1954 /* We can handle some of the 'e' cases here. */
1955 if (class == 'e' && code == TRUTH_NOT_EXPR)
1956 class = '1';
1957 else if (class == 'e'
1958 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1959 || code == COMPOUND_EXPR))
1960 class = '2';
1961
1962 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1963 the expression. There may be no way to make this work, but it needs
1964 to be looked at again for 2.6. */
1965 #if 0
1966 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1967 {
1968 /* If we've already found a CVAL1 or CVAL2, this expression is
1969 two complex to handle. */
1970 if (*cval1 || *cval2)
1971 return 0;
1972
1973 class = '1';
1974 *save_p = 1;
1975 }
1976 #endif
1977
1978 switch (class)
1979 {
1980 case '1':
1981 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1982
1983 case '2':
1984 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1985 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1986 cval1, cval2, save_p));
1987
1988 case 'c':
1989 return 1;
1990
1991 case 'e':
1992 if (code == COND_EXPR)
1993 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
1994 cval1, cval2, save_p)
1995 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1996 cval1, cval2, save_p)
1997 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1998 cval1, cval2, save_p));
1999 return 0;
2000
2001 case '<':
2002 /* First see if we can handle the first operand, then the second. For
2003 the second operand, we know *CVAL1 can't be zero. It must be that
2004 one side of the comparison is each of the values; test for the
2005 case where this isn't true by failing if the two operands
2006 are the same. */
2007
2008 if (operand_equal_p (TREE_OPERAND (arg, 0),
2009 TREE_OPERAND (arg, 1), 0))
2010 return 0;
2011
2012 if (*cval1 == 0)
2013 *cval1 = TREE_OPERAND (arg, 0);
2014 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2015 ;
2016 else if (*cval2 == 0)
2017 *cval2 = TREE_OPERAND (arg, 0);
2018 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2019 ;
2020 else
2021 return 0;
2022
2023 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2024 ;
2025 else if (*cval2 == 0)
2026 *cval2 = TREE_OPERAND (arg, 1);
2027 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2028 ;
2029 else
2030 return 0;
2031
2032 return 1;
2033
2034 default:
2035 return 0;
2036 }
2037 }
2038 \f
2039 /* ARG is a tree that is known to contain just arithmetic operations and
2040 comparisons. Evaluate the operations in the tree substituting NEW0 for
2041 any occurrence of OLD0 as an operand of a comparison and likewise for
2042 NEW1 and OLD1. */
2043
2044 static tree
2045 eval_subst (arg, old0, new0, old1, new1)
2046 tree arg;
2047 tree old0, new0, old1, new1;
2048 {
2049 tree type = TREE_TYPE (arg);
2050 enum tree_code code = TREE_CODE (arg);
2051 char class = TREE_CODE_CLASS (code);
2052
2053 /* We can handle some of the 'e' cases here. */
2054 if (class == 'e' && code == TRUTH_NOT_EXPR)
2055 class = '1';
2056 else if (class == 'e'
2057 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2058 class = '2';
2059
2060 switch (class)
2061 {
2062 case '1':
2063 return fold (build1 (code, type,
2064 eval_subst (TREE_OPERAND (arg, 0),
2065 old0, new0, old1, new1)));
2066
2067 case '2':
2068 return fold (build (code, type,
2069 eval_subst (TREE_OPERAND (arg, 0),
2070 old0, new0, old1, new1),
2071 eval_subst (TREE_OPERAND (arg, 1),
2072 old0, new0, old1, new1)));
2073
2074 case 'e':
2075 switch (code)
2076 {
2077 case SAVE_EXPR:
2078 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2079
2080 case COMPOUND_EXPR:
2081 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2082
2083 case COND_EXPR:
2084 return fold (build (code, type,
2085 eval_subst (TREE_OPERAND (arg, 0),
2086 old0, new0, old1, new1),
2087 eval_subst (TREE_OPERAND (arg, 1),
2088 old0, new0, old1, new1),
2089 eval_subst (TREE_OPERAND (arg, 2),
2090 old0, new0, old1, new1)));
2091 default:
2092 break;
2093 }
2094 /* fall through (???) */
2095
2096 case '<':
2097 {
2098 tree arg0 = TREE_OPERAND (arg, 0);
2099 tree arg1 = TREE_OPERAND (arg, 1);
2100
2101 /* We need to check both for exact equality and tree equality. The
2102 former will be true if the operand has a side-effect. In that
2103 case, we know the operand occurred exactly once. */
2104
2105 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2106 arg0 = new0;
2107 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2108 arg0 = new1;
2109
2110 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2111 arg1 = new0;
2112 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2113 arg1 = new1;
2114
2115 return fold (build (code, type, arg0, arg1));
2116 }
2117
2118 default:
2119 return arg;
2120 }
2121 }
2122 \f
2123 /* Return a tree for the case when the result of an expression is RESULT
2124 converted to TYPE and OMITTED was previously an operand of the expression
2125 but is now not needed (e.g., we folded OMITTED * 0).
2126
2127 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2128 the conversion of RESULT to TYPE. */
2129
2130 static tree
2131 omit_one_operand (type, result, omitted)
2132 tree type, result, omitted;
2133 {
2134 tree t = convert (type, result);
2135
2136 if (TREE_SIDE_EFFECTS (omitted))
2137 return build (COMPOUND_EXPR, type, omitted, t);
2138
2139 return non_lvalue (t);
2140 }
2141
2142 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2143
2144 static tree
2145 pedantic_omit_one_operand (type, result, omitted)
2146 tree type, result, omitted;
2147 {
2148 tree t = convert (type, result);
2149
2150 if (TREE_SIDE_EFFECTS (omitted))
2151 return build (COMPOUND_EXPR, type, omitted, t);
2152
2153 return pedantic_non_lvalue (t);
2154 }
2155
2156
2157 \f
2158 /* Return a simplified tree node for the truth-negation of ARG. This
2159 never alters ARG itself. We assume that ARG is an operation that
2160 returns a truth value (0 or 1). */
2161
2162 tree
2163 invert_truthvalue (arg)
2164 tree arg;
2165 {
2166 tree type = TREE_TYPE (arg);
2167 enum tree_code code = TREE_CODE (arg);
2168
2169 if (code == ERROR_MARK)
2170 return arg;
2171
2172 /* If this is a comparison, we can simply invert it, except for
2173 floating-point non-equality comparisons, in which case we just
2174 enclose a TRUTH_NOT_EXPR around what we have. */
2175
2176 if (TREE_CODE_CLASS (code) == '<')
2177 {
2178 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2179 && code != NE_EXPR && code != EQ_EXPR)
2180 return build1 (TRUTH_NOT_EXPR, type, arg);
2181 else
2182 return build (invert_tree_comparison (code), type,
2183 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2184 }
2185
2186 switch (code)
2187 {
2188 case INTEGER_CST:
2189 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2190 && TREE_INT_CST_HIGH (arg) == 0, 0));
2191
2192 case TRUTH_AND_EXPR:
2193 return build (TRUTH_OR_EXPR, type,
2194 invert_truthvalue (TREE_OPERAND (arg, 0)),
2195 invert_truthvalue (TREE_OPERAND (arg, 1)));
2196
2197 case TRUTH_OR_EXPR:
2198 return build (TRUTH_AND_EXPR, type,
2199 invert_truthvalue (TREE_OPERAND (arg, 0)),
2200 invert_truthvalue (TREE_OPERAND (arg, 1)));
2201
2202 case TRUTH_XOR_EXPR:
2203 /* Here we can invert either operand. We invert the first operand
2204 unless the second operand is a TRUTH_NOT_EXPR in which case our
2205 result is the XOR of the first operand with the inside of the
2206 negation of the second operand. */
2207
2208 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2209 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2210 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2211 else
2212 return build (TRUTH_XOR_EXPR, type,
2213 invert_truthvalue (TREE_OPERAND (arg, 0)),
2214 TREE_OPERAND (arg, 1));
2215
2216 case TRUTH_ANDIF_EXPR:
2217 return build (TRUTH_ORIF_EXPR, type,
2218 invert_truthvalue (TREE_OPERAND (arg, 0)),
2219 invert_truthvalue (TREE_OPERAND (arg, 1)));
2220
2221 case TRUTH_ORIF_EXPR:
2222 return build (TRUTH_ANDIF_EXPR, type,
2223 invert_truthvalue (TREE_OPERAND (arg, 0)),
2224 invert_truthvalue (TREE_OPERAND (arg, 1)));
2225
2226 case TRUTH_NOT_EXPR:
2227 return TREE_OPERAND (arg, 0);
2228
2229 case COND_EXPR:
2230 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2231 invert_truthvalue (TREE_OPERAND (arg, 1)),
2232 invert_truthvalue (TREE_OPERAND (arg, 2)));
2233
2234 case COMPOUND_EXPR:
2235 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2236 invert_truthvalue (TREE_OPERAND (arg, 1)));
2237
2238 case NON_LVALUE_EXPR:
2239 return invert_truthvalue (TREE_OPERAND (arg, 0));
2240
2241 case NOP_EXPR:
2242 case CONVERT_EXPR:
2243 case FLOAT_EXPR:
2244 return build1 (TREE_CODE (arg), type,
2245 invert_truthvalue (TREE_OPERAND (arg, 0)));
2246
2247 case BIT_AND_EXPR:
2248 if (!integer_onep (TREE_OPERAND (arg, 1)))
2249 break;
2250 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2251
2252 case SAVE_EXPR:
2253 return build1 (TRUTH_NOT_EXPR, type, arg);
2254
2255 case CLEANUP_POINT_EXPR:
2256 return build1 (CLEANUP_POINT_EXPR, type,
2257 invert_truthvalue (TREE_OPERAND (arg, 0)));
2258
2259 default:
2260 break;
2261 }
2262 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2263 abort ();
2264 return build1 (TRUTH_NOT_EXPR, type, arg);
2265 }
2266
2267 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2268 operands are another bit-wise operation with a common input. If so,
2269 distribute the bit operations to save an operation and possibly two if
2270 constants are involved. For example, convert
2271 (A | B) & (A | C) into A | (B & C)
2272 Further simplification will occur if B and C are constants.
2273
2274 If this optimization cannot be done, 0 will be returned. */
2275
2276 static tree
2277 distribute_bit_expr (code, type, arg0, arg1)
2278 enum tree_code code;
2279 tree type;
2280 tree arg0, arg1;
2281 {
2282 tree common;
2283 tree left, right;
2284
2285 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2286 || TREE_CODE (arg0) == code
2287 || (TREE_CODE (arg0) != BIT_AND_EXPR
2288 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2289 return 0;
2290
2291 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2292 {
2293 common = TREE_OPERAND (arg0, 0);
2294 left = TREE_OPERAND (arg0, 1);
2295 right = TREE_OPERAND (arg1, 1);
2296 }
2297 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2298 {
2299 common = TREE_OPERAND (arg0, 0);
2300 left = TREE_OPERAND (arg0, 1);
2301 right = TREE_OPERAND (arg1, 0);
2302 }
2303 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2304 {
2305 common = TREE_OPERAND (arg0, 1);
2306 left = TREE_OPERAND (arg0, 0);
2307 right = TREE_OPERAND (arg1, 1);
2308 }
2309 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2310 {
2311 common = TREE_OPERAND (arg0, 1);
2312 left = TREE_OPERAND (arg0, 0);
2313 right = TREE_OPERAND (arg1, 0);
2314 }
2315 else
2316 return 0;
2317
2318 return fold (build (TREE_CODE (arg0), type, common,
2319 fold (build (code, type, left, right))));
2320 }
2321 \f
2322 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2323 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2324
2325 static tree
2326 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2327 tree inner;
2328 tree type;
2329 int bitsize, bitpos;
2330 int unsignedp;
2331 {
2332 tree result = build (BIT_FIELD_REF, type, inner,
2333 size_int (bitsize), size_int (bitpos));
2334
2335 TREE_UNSIGNED (result) = unsignedp;
2336
2337 return result;
2338 }
2339
2340 /* Optimize a bit-field compare.
2341
2342 There are two cases: First is a compare against a constant and the
2343 second is a comparison of two items where the fields are at the same
2344 bit position relative to the start of a chunk (byte, halfword, word)
2345 large enough to contain it. In these cases we can avoid the shift
2346 implicit in bitfield extractions.
2347
2348 For constants, we emit a compare of the shifted constant with the
2349 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2350 compared. For two fields at the same position, we do the ANDs with the
2351 similar mask and compare the result of the ANDs.
2352
2353 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2354 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2355 are the left and right operands of the comparison, respectively.
2356
2357 If the optimization described above can be done, we return the resulting
2358 tree. Otherwise we return zero. */
2359
2360 static tree
2361 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2362 enum tree_code code;
2363 tree compare_type;
2364 tree lhs, rhs;
2365 {
2366 int lbitpos, lbitsize, rbitpos, rbitsize;
2367 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2368 tree type = TREE_TYPE (lhs);
2369 tree signed_type, unsigned_type;
2370 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2371 enum machine_mode lmode, rmode, lnmode, rnmode;
2372 int lunsignedp, runsignedp;
2373 int lvolatilep = 0, rvolatilep = 0;
2374 int alignment;
2375 tree linner, rinner;
2376 tree mask;
2377 tree offset;
2378
2379 /* Get all the information about the extractions being done. If the bit size
2380 if the same as the size of the underlying object, we aren't doing an
2381 extraction at all and so can do nothing. */
2382 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2383 &lunsignedp, &lvolatilep, &alignment);
2384 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2385 || offset != 0)
2386 return 0;
2387
2388 if (!const_p)
2389 {
2390 /* If this is not a constant, we can only do something if bit positions,
2391 sizes, and signedness are the same. */
2392 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2393 &runsignedp, &rvolatilep, &alignment);
2394
2395 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2396 || lunsignedp != runsignedp || offset != 0)
2397 return 0;
2398 }
2399
2400 /* See if we can find a mode to refer to this field. We should be able to,
2401 but fail if we can't. */
2402 lnmode = get_best_mode (lbitsize, lbitpos,
2403 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2404 lvolatilep);
2405 if (lnmode == VOIDmode)
2406 return 0;
2407
2408 /* Set signed and unsigned types of the precision of this mode for the
2409 shifts below. */
2410 signed_type = type_for_mode (lnmode, 0);
2411 unsigned_type = type_for_mode (lnmode, 1);
2412
2413 if (! const_p)
2414 {
2415 rnmode = get_best_mode (rbitsize, rbitpos,
2416 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2417 rvolatilep);
2418 if (rnmode == VOIDmode)
2419 return 0;
2420 }
2421
2422 /* Compute the bit position and size for the new reference and our offset
2423 within it. If the new reference is the same size as the original, we
2424 won't optimize anything, so return zero. */
2425 lnbitsize = GET_MODE_BITSIZE (lnmode);
2426 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2427 lbitpos -= lnbitpos;
2428 if (lnbitsize == lbitsize)
2429 return 0;
2430
2431 if (! const_p)
2432 {
2433 rnbitsize = GET_MODE_BITSIZE (rnmode);
2434 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2435 rbitpos -= rnbitpos;
2436 if (rnbitsize == rbitsize)
2437 return 0;
2438 }
2439
2440 if (BYTES_BIG_ENDIAN)
2441 lbitpos = lnbitsize - lbitsize - lbitpos;
2442
2443 /* Make the mask to be used against the extracted field. */
2444 mask = build_int_2 (~0, ~0);
2445 TREE_TYPE (mask) = unsigned_type;
2446 force_fit_type (mask, 0);
2447 mask = convert (unsigned_type, mask);
2448 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2449 mask = const_binop (RSHIFT_EXPR, mask,
2450 size_int (lnbitsize - lbitsize - lbitpos), 0);
2451
2452 if (! const_p)
2453 /* If not comparing with constant, just rework the comparison
2454 and return. */
2455 return build (code, compare_type,
2456 build (BIT_AND_EXPR, unsigned_type,
2457 make_bit_field_ref (linner, unsigned_type,
2458 lnbitsize, lnbitpos, 1),
2459 mask),
2460 build (BIT_AND_EXPR, unsigned_type,
2461 make_bit_field_ref (rinner, unsigned_type,
2462 rnbitsize, rnbitpos, 1),
2463 mask));
2464
2465 /* Otherwise, we are handling the constant case. See if the constant is too
2466 big for the field. Warn and return a tree of for 0 (false) if so. We do
2467 this not only for its own sake, but to avoid having to test for this
2468 error case below. If we didn't, we might generate wrong code.
2469
2470 For unsigned fields, the constant shifted right by the field length should
2471 be all zero. For signed fields, the high-order bits should agree with
2472 the sign bit. */
2473
2474 if (lunsignedp)
2475 {
2476 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2477 convert (unsigned_type, rhs),
2478 size_int (lbitsize), 0)))
2479 {
2480 warning ("comparison is always %s due to width of bitfield",
2481 code == NE_EXPR ? "one" : "zero");
2482 return convert (compare_type,
2483 (code == NE_EXPR
2484 ? integer_one_node : integer_zero_node));
2485 }
2486 }
2487 else
2488 {
2489 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2490 size_int (lbitsize - 1), 0);
2491 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2492 {
2493 warning ("comparison is always %s due to width of bitfield",
2494 code == NE_EXPR ? "one" : "zero");
2495 return convert (compare_type,
2496 (code == NE_EXPR
2497 ? integer_one_node : integer_zero_node));
2498 }
2499 }
2500
2501 /* Single-bit compares should always be against zero. */
2502 if (lbitsize == 1 && ! integer_zerop (rhs))
2503 {
2504 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2505 rhs = convert (type, integer_zero_node);
2506 }
2507
2508 /* Make a new bitfield reference, shift the constant over the
2509 appropriate number of bits and mask it with the computed mask
2510 (in case this was a signed field). If we changed it, make a new one. */
2511 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2512 if (lvolatilep)
2513 {
2514 TREE_SIDE_EFFECTS (lhs) = 1;
2515 TREE_THIS_VOLATILE (lhs) = 1;
2516 }
2517
2518 rhs = fold (const_binop (BIT_AND_EXPR,
2519 const_binop (LSHIFT_EXPR,
2520 convert (unsigned_type, rhs),
2521 size_int (lbitpos), 0),
2522 mask, 0));
2523
2524 return build (code, compare_type,
2525 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2526 rhs);
2527 }
2528 \f
2529 /* Subroutine for fold_truthop: decode a field reference.
2530
2531 If EXP is a comparison reference, we return the innermost reference.
2532
2533 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2534 set to the starting bit number.
2535
2536 If the innermost field can be completely contained in a mode-sized
2537 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2538
2539 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2540 otherwise it is not changed.
2541
2542 *PUNSIGNEDP is set to the signedness of the field.
2543
2544 *PMASK is set to the mask used. This is either contained in a
2545 BIT_AND_EXPR or derived from the width of the field.
2546
2547 *PAND_MASK is set the the mask found in a BIT_AND_EXPR, if any.
2548
2549 Return 0 if this is not a component reference or is one that we can't
2550 do anything with. */
2551
2552 static tree
2553 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2554 pvolatilep, pmask, pand_mask)
2555 tree exp;
2556 int *pbitsize, *pbitpos;
2557 enum machine_mode *pmode;
2558 int *punsignedp, *pvolatilep;
2559 tree *pmask;
2560 tree *pand_mask;
2561 {
2562 tree and_mask = 0;
2563 tree mask, inner, offset;
2564 tree unsigned_type;
2565 int precision;
2566 int alignment;
2567
2568 /* All the optimizations using this function assume integer fields.
2569 There are problems with FP fields since the type_for_size call
2570 below can fail for, e.g., XFmode. */
2571 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2572 return 0;
2573
2574 STRIP_NOPS (exp);
2575
2576 if (TREE_CODE (exp) == BIT_AND_EXPR)
2577 {
2578 and_mask = TREE_OPERAND (exp, 1);
2579 exp = TREE_OPERAND (exp, 0);
2580 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2581 if (TREE_CODE (and_mask) != INTEGER_CST)
2582 return 0;
2583 }
2584
2585
2586 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2587 punsignedp, pvolatilep, &alignment);
2588 if ((inner == exp && and_mask == 0)
2589 || *pbitsize < 0 || offset != 0)
2590 return 0;
2591
2592 /* Compute the mask to access the bitfield. */
2593 unsigned_type = type_for_size (*pbitsize, 1);
2594 precision = TYPE_PRECISION (unsigned_type);
2595
2596 mask = build_int_2 (~0, ~0);
2597 TREE_TYPE (mask) = unsigned_type;
2598 force_fit_type (mask, 0);
2599 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2600 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2601
2602 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2603 if (and_mask != 0)
2604 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2605 convert (unsigned_type, and_mask), mask));
2606
2607 *pmask = mask;
2608 *pand_mask = and_mask;
2609 return inner;
2610 }
2611
2612 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2613 bit positions. */
2614
2615 static int
2616 all_ones_mask_p (mask, size)
2617 tree mask;
2618 int size;
2619 {
2620 tree type = TREE_TYPE (mask);
2621 int precision = TYPE_PRECISION (type);
2622 tree tmask;
2623
2624 tmask = build_int_2 (~0, ~0);
2625 TREE_TYPE (tmask) = signed_type (type);
2626 force_fit_type (tmask, 0);
2627 return
2628 tree_int_cst_equal (mask,
2629 const_binop (RSHIFT_EXPR,
2630 const_binop (LSHIFT_EXPR, tmask,
2631 size_int (precision - size),
2632 0),
2633 size_int (precision - size), 0));
2634 }
2635
2636 /* Subroutine for fold_truthop: determine if an operand is simple enough
2637 to be evaluated unconditionally. */
2638
2639 static int
2640 simple_operand_p (exp)
2641 tree exp;
2642 {
2643 /* Strip any conversions that don't change the machine mode. */
2644 while ((TREE_CODE (exp) == NOP_EXPR
2645 || TREE_CODE (exp) == CONVERT_EXPR)
2646 && (TYPE_MODE (TREE_TYPE (exp))
2647 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2648 exp = TREE_OPERAND (exp, 0);
2649
2650 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2651 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2652 && ! TREE_ADDRESSABLE (exp)
2653 && ! TREE_THIS_VOLATILE (exp)
2654 && ! DECL_NONLOCAL (exp)
2655 /* Don't regard global variables as simple. They may be
2656 allocated in ways unknown to the compiler (shared memory,
2657 #pragma weak, etc). */
2658 && ! TREE_PUBLIC (exp)
2659 && ! DECL_EXTERNAL (exp)
2660 /* Loading a static variable is unduly expensive, but global
2661 registers aren't expensive. */
2662 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2663 }
2664 \f
2665 /* The following functions are subroutines to fold_range_test and allow it to
2666 try to change a logical combination of comparisons into a range test.
2667
2668 For example, both
2669 X == 2 && X == 3 && X == 4 && X == 5
2670 and
2671 X >= 2 && X <= 5
2672 are converted to
2673 (unsigned) (X - 2) <= 3
2674
2675 We describe each set of comparisons as being either inside or outside
2676 a range, using a variable named like IN_P, and then describe the
2677 range with a lower and upper bound. If one of the bounds is omitted,
2678 it represents either the highest or lowest value of the type.
2679
2680 In the comments below, we represent a range by two numbers in brackets
2681 preceded by a "+" to designate being inside that range, or a "-" to
2682 designate being outside that range, so the condition can be inverted by
2683 flipping the prefix. An omitted bound is represented by a "-". For
2684 example, "- [-, 10]" means being outside the range starting at the lowest
2685 possible value and ending at 10, in other words, being greater than 10.
2686 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2687 always false.
2688
2689 We set up things so that the missing bounds are handled in a consistent
2690 manner so neither a missing bound nor "true" and "false" need to be
2691 handled using a special case. */
2692
2693 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2694 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2695 and UPPER1_P are nonzero if the respective argument is an upper bound
2696 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2697 must be specified for a comparison. ARG1 will be converted to ARG0's
2698 type if both are specified. */
2699
2700 static tree
2701 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2702 enum tree_code code;
2703 tree type;
2704 tree arg0, arg1;
2705 int upper0_p, upper1_p;
2706 {
2707 tree tem;
2708 int result;
2709 int sgn0, sgn1;
2710
2711 /* If neither arg represents infinity, do the normal operation.
2712 Else, if not a comparison, return infinity. Else handle the special
2713 comparison rules. Note that most of the cases below won't occur, but
2714 are handled for consistency. */
2715
2716 if (arg0 != 0 && arg1 != 0)
2717 {
2718 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2719 arg0, convert (TREE_TYPE (arg0), arg1)));
2720 STRIP_NOPS (tem);
2721 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2722 }
2723
2724 if (TREE_CODE_CLASS (code) != '<')
2725 return 0;
2726
2727 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2728 for neither. Then compute our result treating them as never equal
2729 and comparing bounds to non-bounds as above. */
2730 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2731 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2732 switch (code)
2733 {
2734 case EQ_EXPR: case NE_EXPR:
2735 result = (code == NE_EXPR);
2736 break;
2737 case LT_EXPR: case LE_EXPR:
2738 result = sgn0 < sgn1;
2739 break;
2740 case GT_EXPR: case GE_EXPR:
2741 result = sgn0 > sgn1;
2742 break;
2743 default:
2744 abort ();
2745 }
2746
2747 return convert (type, result ? integer_one_node : integer_zero_node);
2748 }
2749 \f
2750 /* Given EXP, a logical expression, set the range it is testing into
2751 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2752 actually being tested. *PLOW and *PHIGH will have be made the same type
2753 as the returned expression. If EXP is not a comparison, we will most
2754 likely not be returning a useful value and range. */
2755
2756 static tree
2757 make_range (exp, pin_p, plow, phigh)
2758 tree exp;
2759 int *pin_p;
2760 tree *plow, *phigh;
2761 {
2762 enum tree_code code;
2763 tree arg0, arg1, type;
2764 int in_p, n_in_p;
2765 tree low, high, n_low, n_high;
2766
2767 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2768 and see if we can refine the range. Some of the cases below may not
2769 happen, but it doesn't seem worth worrying about this. We "continue"
2770 the outer loop when we've changed something; otherwise we "break"
2771 the switch, which will "break" the while. */
2772
2773 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2774
2775 while (1)
2776 {
2777 code = TREE_CODE (exp);
2778 arg0 = TREE_OPERAND (exp, 0), arg1 = TREE_OPERAND (exp, 1);
2779 if (TREE_CODE_CLASS (code) == '<' || TREE_CODE_CLASS (code) == '1'
2780 || TREE_CODE_CLASS (code) == '2')
2781 type = TREE_TYPE (arg0);
2782
2783 switch (code)
2784 {
2785 case TRUTH_NOT_EXPR:
2786 in_p = ! in_p, exp = arg0;
2787 continue;
2788
2789 case EQ_EXPR: case NE_EXPR:
2790 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2791 /* We can only do something if the range is testing for zero
2792 and if the second operand is an integer constant. Note that
2793 saying something is "in" the range we make is done by
2794 complementing IN_P since it will set in the initial case of
2795 being not equal to zero; "out" is leaving it alone. */
2796 if (low == 0 || high == 0
2797 || ! integer_zerop (low) || ! integer_zerop (high)
2798 || TREE_CODE (arg1) != INTEGER_CST)
2799 break;
2800
2801 switch (code)
2802 {
2803 case NE_EXPR: /* - [c, c] */
2804 low = high = arg1;
2805 break;
2806 case EQ_EXPR: /* + [c, c] */
2807 in_p = ! in_p, low = high = arg1;
2808 break;
2809 case GT_EXPR: /* - [-, c] */
2810 low = 0, high = arg1;
2811 break;
2812 case GE_EXPR: /* + [c, -] */
2813 in_p = ! in_p, low = arg1, high = 0;
2814 break;
2815 case LT_EXPR: /* - [c, -] */
2816 low = arg1, high = 0;
2817 break;
2818 case LE_EXPR: /* + [-, c] */
2819 in_p = ! in_p, low = 0, high = arg1;
2820 break;
2821 default:
2822 abort ();
2823 }
2824
2825 exp = arg0;
2826
2827 /* If this is an unsigned comparison, we also know that EXP is
2828 greater than or equal to zero. We base the range tests we make
2829 on that fact, so we record it here so we can parse existing
2830 range tests. */
2831 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2832 {
2833 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2834 1, convert (type, integer_zero_node),
2835 NULL_TREE))
2836 break;
2837
2838 in_p = n_in_p, low = n_low, high = n_high;
2839
2840 /* If the high bound is missing, reverse the range so it
2841 goes from zero to the low bound minus 1. */
2842 if (high == 0)
2843 {
2844 in_p = ! in_p;
2845 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2846 integer_one_node, 0);
2847 low = convert (type, integer_zero_node);
2848 }
2849 }
2850 continue;
2851
2852 case NEGATE_EXPR:
2853 /* (-x) IN [a,b] -> x in [-b, -a] */
2854 n_low = range_binop (MINUS_EXPR, type,
2855 convert (type, integer_zero_node), 0, high, 1);
2856 n_high = range_binop (MINUS_EXPR, type,
2857 convert (type, integer_zero_node), 0, low, 0);
2858 low = n_low, high = n_high;
2859 exp = arg0;
2860 continue;
2861
2862 case BIT_NOT_EXPR:
2863 /* ~ X -> -X - 1 */
2864 exp = build (MINUS_EXPR, type, build1 (NEGATE_EXPR, type, arg0),
2865 convert (type, integer_one_node));
2866 continue;
2867
2868 case PLUS_EXPR: case MINUS_EXPR:
2869 if (TREE_CODE (arg1) != INTEGER_CST)
2870 break;
2871
2872 /* If EXP is signed, any overflow in the computation is undefined,
2873 so we don't worry about it so long as our computations on
2874 the bounds don't overflow. For unsigned, overflow is defined
2875 and this is exactly the right thing. */
2876 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2877 type, low, 0, arg1, 0);
2878 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2879 type, high, 1, arg1, 0);
2880 if ((n_low != 0 && TREE_OVERFLOW (n_low))
2881 || (n_high != 0 && TREE_OVERFLOW (n_high)))
2882 break;
2883
2884 /* Check for an unsigned range which has wrapped around the maximum
2885 value thus making n_high < n_low, and normalize it. */
2886 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
2887 {
2888 low = range_binop (PLUS_EXPR, type, n_high, 0,
2889 integer_one_node, 0);
2890 high = range_binop (MINUS_EXPR, type, n_low, 0,
2891 integer_one_node, 0);
2892 in_p = ! in_p;
2893 }
2894 else
2895 low = n_low, high = n_high;
2896
2897 exp = arg0;
2898 continue;
2899
2900 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
2901 if (! INTEGRAL_TYPE_P (type)
2902 || (low != 0 && ! int_fits_type_p (low, type))
2903 || (high != 0 && ! int_fits_type_p (high, type)))
2904 break;
2905
2906 n_low = low, n_high = high;
2907
2908 if (n_low != 0)
2909 n_low = convert (type, n_low);
2910
2911 if (n_high != 0)
2912 n_high = convert (type, n_high);
2913
2914 /* If we're converting from an unsigned to a signed type,
2915 we will be doing the comparison as unsigned. The tests above
2916 have already verified that LOW and HIGH are both positive.
2917
2918 So we have to make sure that the original unsigned value will
2919 be interpreted as positive. */
2920 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
2921 {
2922 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
2923 tree high_positive
2924 = fold (build (RSHIFT_EXPR, type,
2925 convert (type,
2926 TYPE_MAX_VALUE (equiv_type)),
2927 convert (type, integer_one_node)));
2928
2929 /* If the low bound is specified, "and" the range with the
2930 range for which the original unsigned value will be
2931 positive. */
2932 if (low != 0)
2933 {
2934 if (! merge_ranges (&n_in_p, &n_low, &n_high,
2935 1, n_low, n_high,
2936 1, convert (type, integer_zero_node),
2937 high_positive))
2938 break;
2939
2940 in_p = (n_in_p == in_p);
2941 }
2942 else
2943 {
2944 /* Otherwise, "or" the range with the range of the input
2945 that will be interpreted as negative. */
2946 if (! merge_ranges (&n_in_p, &n_low, &n_high,
2947 0, n_low, n_high,
2948 1, convert (type, integer_zero_node),
2949 high_positive))
2950 break;
2951
2952 in_p = (in_p != n_in_p);
2953 }
2954 }
2955
2956 exp = arg0;
2957 low = n_low, high = n_high;
2958 continue;
2959
2960 default:
2961 break;
2962 }
2963
2964 break;
2965 }
2966
2967 /* If EXP is a constant, we can evaluate whether this is true or false. */
2968 if (TREE_CODE (exp) == INTEGER_CST)
2969 {
2970 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
2971 exp, 0, low, 0))
2972 && integer_onep (range_binop (LE_EXPR, integer_type_node,
2973 exp, 1, high, 1)));
2974 low = high = 0;
2975 exp = 0;
2976 }
2977
2978 *pin_p = in_p, *plow = low, *phigh = high;
2979 return exp;
2980 }
2981 \f
2982 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
2983 type, TYPE, return an expression to test if EXP is in (or out of, depending
2984 on IN_P) the range. */
2985
2986 static tree
2987 build_range_check (type, exp, in_p, low, high)
2988 tree type;
2989 tree exp;
2990 int in_p;
2991 tree low, high;
2992 {
2993 tree etype = TREE_TYPE (exp);
2994 tree utype, value;
2995
2996 if (! in_p
2997 && (0 != (value = build_range_check (type, exp, 1, low, high))))
2998 return invert_truthvalue (value);
2999
3000 else if (low == 0 && high == 0)
3001 return convert (type, integer_one_node);
3002
3003 else if (low == 0)
3004 return fold (build (LE_EXPR, type, exp, high));
3005
3006 else if (high == 0)
3007 return fold (build (GE_EXPR, type, exp, low));
3008
3009 else if (operand_equal_p (low, high, 0))
3010 return fold (build (EQ_EXPR, type, exp, low));
3011
3012 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3013 return build_range_check (type, exp, 1, 0, high);
3014
3015 else if (integer_zerop (low))
3016 {
3017 utype = unsigned_type (etype);
3018 return build_range_check (type, convert (utype, exp), 1, 0,
3019 convert (utype, high));
3020 }
3021
3022 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3023 && ! TREE_OVERFLOW (value))
3024 return build_range_check (type,
3025 fold (build (MINUS_EXPR, etype, exp, low)),
3026 1, convert (etype, integer_zero_node), value);
3027 else
3028 return 0;
3029 }
3030 \f
3031 /* Given two ranges, see if we can merge them into one. Return 1 if we
3032 can, 0 if we can't. Set the output range into the specified parameters. */
3033
3034 static int
3035 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3036 int *pin_p;
3037 tree *plow, *phigh;
3038 int in0_p, in1_p;
3039 tree low0, high0, low1, high1;
3040 {
3041 int no_overlap;
3042 int subset;
3043 int temp;
3044 tree tem;
3045 int in_p;
3046 tree low, high;
3047 int lowequal = ((low0 == 0 && low1 == 0)
3048 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3049 low0, 0, low1, 0)));
3050 int highequal = ((high0 == 0 && high1 == 0)
3051 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3052 high0, 1, high1, 1)));
3053
3054 /* Make range 0 be the range that starts first, or ends last if they
3055 start at the same value. Swap them if it isn't. */
3056 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3057 low0, 0, low1, 0))
3058 || (lowequal
3059 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3060 high1, 1, high0, 1))))
3061 {
3062 temp = in0_p, in0_p = in1_p, in1_p = temp;
3063 tem = low0, low0 = low1, low1 = tem;
3064 tem = high0, high0 = high1, high1 = tem;
3065 }
3066
3067 /* Now flag two cases, whether the ranges are disjoint or whether the
3068 second range is totally subsumed in the first. Note that the tests
3069 below are simplified by the ones above. */
3070 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3071 high0, 1, low1, 0));
3072 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3073 high1, 1, high0, 1));
3074
3075 /* We now have four cases, depending on whether we are including or
3076 excluding the two ranges. */
3077 if (in0_p && in1_p)
3078 {
3079 /* If they don't overlap, the result is false. If the second range
3080 is a subset it is the result. Otherwise, the range is from the start
3081 of the second to the end of the first. */
3082 if (no_overlap)
3083 in_p = 0, low = high = 0;
3084 else if (subset)
3085 in_p = 1, low = low1, high = high1;
3086 else
3087 in_p = 1, low = low1, high = high0;
3088 }
3089
3090 else if (in0_p && ! in1_p)
3091 {
3092 /* If they don't overlap, the result is the first range. If they are
3093 equal, the result is false. If the second range is a subset of the
3094 first, and the ranges begin at the same place, we go from just after
3095 the end of the first range to the end of the second. If the second
3096 range is not a subset of the first, or if it is a subset and both
3097 ranges end at the same place, the range starts at the start of the
3098 first range and ends just before the second range.
3099 Otherwise, we can't describe this as a single range. */
3100 if (no_overlap)
3101 in_p = 1, low = low0, high = high0;
3102 else if (lowequal && highequal)
3103 in_p = 0, low = high = 0;
3104 else if (subset && lowequal)
3105 {
3106 in_p = 1, high = high0;
3107 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3108 integer_one_node, 0);
3109 }
3110 else if (! subset || highequal)
3111 {
3112 in_p = 1, low = low0;
3113 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3114 integer_one_node, 0);
3115 }
3116 else
3117 return 0;
3118 }
3119
3120 else if (! in0_p && in1_p)
3121 {
3122 /* If they don't overlap, the result is the second range. If the second
3123 is a subset of the first, the result is false. Otherwise,
3124 the range starts just after the first range and ends at the
3125 end of the second. */
3126 if (no_overlap)
3127 in_p = 1, low = low1, high = high1;
3128 else if (subset)
3129 in_p = 0, low = high = 0;
3130 else
3131 {
3132 in_p = 1, high = high1;
3133 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3134 integer_one_node, 0);
3135 }
3136 }
3137
3138 else
3139 {
3140 /* The case where we are excluding both ranges. Here the complex case
3141 is if they don't overlap. In that case, the only time we have a
3142 range is if they are adjacent. If the second is a subset of the
3143 first, the result is the first. Otherwise, the range to exclude
3144 starts at the beginning of the first range and ends at the end of the
3145 second. */
3146 if (no_overlap)
3147 {
3148 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3149 range_binop (PLUS_EXPR, NULL_TREE,
3150 high0, 1,
3151 integer_one_node, 1),
3152 1, low1, 0)))
3153 in_p = 0, low = low0, high = high1;
3154 else
3155 return 0;
3156 }
3157 else if (subset)
3158 in_p = 0, low = low0, high = high0;
3159 else
3160 in_p = 0, low = low0, high = high1;
3161 }
3162
3163 *pin_p = in_p, *plow = low, *phigh = high;
3164 return 1;
3165 }
3166 \f
3167 /* EXP is some logical combination of boolean tests. See if we can
3168 merge it into some range test. Return the new tree if so. */
3169
3170 static tree
3171 fold_range_test (exp)
3172 tree exp;
3173 {
3174 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3175 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3176 int in0_p, in1_p, in_p;
3177 tree low0, low1, low, high0, high1, high;
3178 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3179 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3180 tree tem;
3181
3182 /* If this is an OR operation, invert both sides; we will invert
3183 again at the end. */
3184 if (or_op)
3185 in0_p = ! in0_p, in1_p = ! in1_p;
3186
3187 /* If both expressions are the same, if we can merge the ranges, and we
3188 can build the range test, return it or it inverted. If one of the
3189 ranges is always true or always false, consider it to be the same
3190 expression as the other. */
3191 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3192 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3193 in1_p, low1, high1)
3194 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3195 lhs != 0 ? lhs
3196 : rhs != 0 ? rhs : integer_zero_node,
3197 in_p, low, high))))
3198 return or_op ? invert_truthvalue (tem) : tem;
3199
3200 /* On machines where the branch cost is expensive, if this is a
3201 short-circuited branch and the underlying object on both sides
3202 is the same, make a non-short-circuit operation. */
3203 else if (BRANCH_COST >= 2
3204 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3205 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3206 && operand_equal_p (lhs, rhs, 0))
3207 {
3208 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3209 unless we are at top level, in which case we can't do this. */
3210 if (simple_operand_p (lhs))
3211 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3212 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3213 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3214 TREE_OPERAND (exp, 1));
3215
3216 else if (current_function_decl != 0)
3217 {
3218 tree common = save_expr (lhs);
3219
3220 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3221 or_op ? ! in0_p : in0_p,
3222 low0, high0))
3223 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3224 or_op ? ! in1_p : in1_p,
3225 low1, high1))))
3226 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3227 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3228 TREE_TYPE (exp), lhs, rhs);
3229 }
3230 }
3231 else
3232 return 0;
3233 }
3234 \f
3235 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3236 bit value. Arrange things so the extra bits will be set to zero if and
3237 only if C is signed-extended to its full width. If MASK is nonzero,
3238 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3239
3240 static tree
3241 unextend (c, p, unsignedp, mask)
3242 tree c;
3243 int p;
3244 int unsignedp;
3245 tree mask;
3246 {
3247 tree type = TREE_TYPE (c);
3248 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3249 tree temp;
3250
3251 if (p == modesize || unsignedp)
3252 return c;
3253
3254 /* We work by getting just the sign bit into the low-order bit, then
3255 into the high-order bit, then sign-extend. We then XOR that value
3256 with C. */
3257 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3258 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3259
3260 /* We must use a signed type in order to get an arithmetic right shift.
3261 However, we must also avoid introducing accidental overflows, so that
3262 a subsequent call to integer_zerop will work. Hence we must
3263 do the type conversion here. At this point, the constant is either
3264 zero or one, and the conversion to a signed type can never overflow.
3265 We could get an overflow if this conversion is done anywhere else. */
3266 if (TREE_UNSIGNED (type))
3267 temp = convert (signed_type (type), temp);
3268
3269 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3270 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3271 if (mask != 0)
3272 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3273 /* If necessary, convert the type back to match the type of C. */
3274 if (TREE_UNSIGNED (type))
3275 temp = convert (type, temp);
3276
3277 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3278 }
3279 \f
3280 /* Find ways of folding logical expressions of LHS and RHS:
3281 Try to merge two comparisons to the same innermost item.
3282 Look for range tests like "ch >= '0' && ch <= '9'".
3283 Look for combinations of simple terms on machines with expensive branches
3284 and evaluate the RHS unconditionally.
3285
3286 For example, if we have p->a == 2 && p->b == 4 and we can make an
3287 object large enough to span both A and B, we can do this with a comparison
3288 against the object ANDed with the a mask.
3289
3290 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3291 operations to do this with one comparison.
3292
3293 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3294 function and the one above.
3295
3296 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3297 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3298
3299 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3300 two operands.
3301
3302 We return the simplified tree or 0 if no optimization is possible. */
3303
3304 static tree
3305 fold_truthop (code, truth_type, lhs, rhs)
3306 enum tree_code code;
3307 tree truth_type, lhs, rhs;
3308 {
3309 /* If this is the "or" of two comparisons, we can do something if we
3310 the comparisons are NE_EXPR. If this is the "and", we can do something
3311 if the comparisons are EQ_EXPR. I.e.,
3312 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3313
3314 WANTED_CODE is this operation code. For single bit fields, we can
3315 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3316 comparison for one-bit fields. */
3317
3318 enum tree_code wanted_code;
3319 enum tree_code lcode, rcode;
3320 tree ll_arg, lr_arg, rl_arg, rr_arg;
3321 tree ll_inner, lr_inner, rl_inner, rr_inner;
3322 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3323 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3324 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3325 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3326 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3327 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3328 enum machine_mode lnmode, rnmode;
3329 tree ll_mask, lr_mask, rl_mask, rr_mask;
3330 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3331 tree l_const, r_const;
3332 tree type, result;
3333 int first_bit, end_bit;
3334 int volatilep;
3335
3336 /* Start by getting the comparison codes. Fail if anything is volatile.
3337 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3338 it were surrounded with a NE_EXPR. */
3339
3340 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3341 return 0;
3342
3343 lcode = TREE_CODE (lhs);
3344 rcode = TREE_CODE (rhs);
3345
3346 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3347 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3348
3349 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3350 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3351
3352 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3353 return 0;
3354
3355 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3356 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3357
3358 ll_arg = TREE_OPERAND (lhs, 0);
3359 lr_arg = TREE_OPERAND (lhs, 1);
3360 rl_arg = TREE_OPERAND (rhs, 0);
3361 rr_arg = TREE_OPERAND (rhs, 1);
3362
3363 /* If the RHS can be evaluated unconditionally and its operands are
3364 simple, it wins to evaluate the RHS unconditionally on machines
3365 with expensive branches. In this case, this isn't a comparison
3366 that can be merged. */
3367
3368 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
3369 are with zero (tmw). */
3370
3371 if (BRANCH_COST >= 2
3372 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
3373 && simple_operand_p (rl_arg)
3374 && simple_operand_p (rr_arg))
3375 return build (code, truth_type, lhs, rhs);
3376
3377 /* See if the comparisons can be merged. Then get all the parameters for
3378 each side. */
3379
3380 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3381 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3382 return 0;
3383
3384 volatilep = 0;
3385 ll_inner = decode_field_reference (ll_arg,
3386 &ll_bitsize, &ll_bitpos, &ll_mode,
3387 &ll_unsignedp, &volatilep, &ll_mask,
3388 &ll_and_mask);
3389 lr_inner = decode_field_reference (lr_arg,
3390 &lr_bitsize, &lr_bitpos, &lr_mode,
3391 &lr_unsignedp, &volatilep, &lr_mask,
3392 &lr_and_mask);
3393 rl_inner = decode_field_reference (rl_arg,
3394 &rl_bitsize, &rl_bitpos, &rl_mode,
3395 &rl_unsignedp, &volatilep, &rl_mask,
3396 &rl_and_mask);
3397 rr_inner = decode_field_reference (rr_arg,
3398 &rr_bitsize, &rr_bitpos, &rr_mode,
3399 &rr_unsignedp, &volatilep, &rr_mask,
3400 &rr_and_mask);
3401
3402 /* It must be true that the inner operation on the lhs of each
3403 comparison must be the same if we are to be able to do anything.
3404 Then see if we have constants. If not, the same must be true for
3405 the rhs's. */
3406 if (volatilep || ll_inner == 0 || rl_inner == 0
3407 || ! operand_equal_p (ll_inner, rl_inner, 0))
3408 return 0;
3409
3410 if (TREE_CODE (lr_arg) == INTEGER_CST
3411 && TREE_CODE (rr_arg) == INTEGER_CST)
3412 l_const = lr_arg, r_const = rr_arg;
3413 else if (lr_inner == 0 || rr_inner == 0
3414 || ! operand_equal_p (lr_inner, rr_inner, 0))
3415 return 0;
3416 else
3417 l_const = r_const = 0;
3418
3419 /* If either comparison code is not correct for our logical operation,
3420 fail. However, we can convert a one-bit comparison against zero into
3421 the opposite comparison against that bit being set in the field. */
3422
3423 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3424 if (lcode != wanted_code)
3425 {
3426 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3427 {
3428 if (ll_unsignedp || tree_log2 (ll_mask) + 1 < ll_bitsize)
3429 l_const = ll_mask;
3430 else
3431 /* Since ll_arg is a single bit bit mask, we can sign extend
3432 it appropriately with a NEGATE_EXPR.
3433 l_const is made a signed value here, but since for l_const != NULL
3434 lr_unsignedp is not used, we don't need to clear the latter. */
3435 l_const = fold (build1 (NEGATE_EXPR, TREE_TYPE (ll_arg),
3436 convert (TREE_TYPE (ll_arg), ll_mask)));
3437 }
3438 else
3439 return 0;
3440 }
3441
3442 if (rcode != wanted_code)
3443 {
3444 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3445 {
3446 if (rl_unsignedp || tree_log2 (rl_mask) + 1 < rl_bitsize)
3447 r_const = rl_mask;
3448 else
3449 /* This is analogous to the code for l_const above. */
3450 r_const = fold (build1 (NEGATE_EXPR, TREE_TYPE (rl_arg),
3451 convert (TREE_TYPE (rl_arg), rl_mask)));
3452 }
3453 else
3454 return 0;
3455 }
3456
3457 /* See if we can find a mode that contains both fields being compared on
3458 the left. If we can't, fail. Otherwise, update all constants and masks
3459 to be relative to a field of that size. */
3460 first_bit = MIN (ll_bitpos, rl_bitpos);
3461 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3462 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3463 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3464 volatilep);
3465 if (lnmode == VOIDmode)
3466 return 0;
3467
3468 lnbitsize = GET_MODE_BITSIZE (lnmode);
3469 lnbitpos = first_bit & ~ (lnbitsize - 1);
3470 type = type_for_size (lnbitsize, 1);
3471 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3472
3473 if (BYTES_BIG_ENDIAN)
3474 {
3475 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3476 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3477 }
3478
3479 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
3480 size_int (xll_bitpos), 0);
3481 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
3482 size_int (xrl_bitpos), 0);
3483
3484 if (l_const)
3485 {
3486 l_const = convert (type, l_const);
3487 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3488 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3489 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3490 fold (build1 (BIT_NOT_EXPR,
3491 type, ll_mask)),
3492 0)))
3493 {
3494 warning ("comparison is always %s",
3495 wanted_code == NE_EXPR ? "one" : "zero");
3496
3497 return convert (truth_type,
3498 wanted_code == NE_EXPR
3499 ? integer_one_node : integer_zero_node);
3500 }
3501 }
3502 if (r_const)
3503 {
3504 r_const = convert (type, r_const);
3505 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3506 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3507 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3508 fold (build1 (BIT_NOT_EXPR,
3509 type, rl_mask)),
3510 0)))
3511 {
3512 warning ("comparison is always %s",
3513 wanted_code == NE_EXPR ? "one" : "zero");
3514
3515 return convert (truth_type,
3516 wanted_code == NE_EXPR
3517 ? integer_one_node : integer_zero_node);
3518 }
3519 }
3520
3521 /* If the right sides are not constant, do the same for it. Also,
3522 disallow this optimization if a size or signedness mismatch occurs
3523 between the left and right sides. */
3524 if (l_const == 0)
3525 {
3526 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3527 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3528 /* Make sure the two fields on the right
3529 correspond to the left without being swapped. */
3530 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3531 return 0;
3532
3533 first_bit = MIN (lr_bitpos, rr_bitpos);
3534 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3535 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3536 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3537 volatilep);
3538 if (rnmode == VOIDmode)
3539 return 0;
3540
3541 rnbitsize = GET_MODE_BITSIZE (rnmode);
3542 rnbitpos = first_bit & ~ (rnbitsize - 1);
3543 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3544
3545 if (BYTES_BIG_ENDIAN)
3546 {
3547 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3548 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3549 }
3550
3551 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
3552 size_int (xlr_bitpos), 0);
3553 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
3554 size_int (xrr_bitpos), 0);
3555
3556 /* Make a mask that corresponds to both fields being compared.
3557 Do this for both items being compared. If the masks agree,
3558 we can do this by masking both and comparing the masked
3559 results. */
3560 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3561 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3562 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
3563 {
3564 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3565 ll_unsignedp || rl_unsignedp);
3566 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
3567 lr_unsignedp || rr_unsignedp);
3568 if (! all_ones_mask_p (ll_mask, lnbitsize))
3569 {
3570 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3571 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
3572 }
3573 return build (wanted_code, truth_type, lhs, rhs);
3574 }
3575
3576 /* There is still another way we can do something: If both pairs of
3577 fields being compared are adjacent, we may be able to make a wider
3578 field containing them both. */
3579 if ((ll_bitsize + ll_bitpos == rl_bitpos
3580 && lr_bitsize + lr_bitpos == rr_bitpos)
3581 || (ll_bitpos == rl_bitpos + rl_bitsize
3582 && lr_bitpos == rr_bitpos + rr_bitsize))
3583 return build (wanted_code, truth_type,
3584 make_bit_field_ref (ll_inner, type,
3585 ll_bitsize + rl_bitsize,
3586 MIN (ll_bitpos, rl_bitpos),
3587 ll_unsignedp),
3588 make_bit_field_ref (lr_inner, type,
3589 lr_bitsize + rr_bitsize,
3590 MIN (lr_bitpos, rr_bitpos),
3591 lr_unsignedp));
3592
3593 return 0;
3594 }
3595
3596 /* Handle the case of comparisons with constants. If there is something in
3597 common between the masks, those bits of the constants must be the same.
3598 If not, the condition is always false. Test for this to avoid generating
3599 incorrect code below. */
3600 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3601 if (! integer_zerop (result)
3602 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3603 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3604 {
3605 if (wanted_code == NE_EXPR)
3606 {
3607 warning ("`or' of unmatched not-equal tests is always 1");
3608 return convert (truth_type, integer_one_node);
3609 }
3610 else
3611 {
3612 warning ("`and' of mutually exclusive equal-tests is always zero");
3613 return convert (truth_type, integer_zero_node);
3614 }
3615 }
3616
3617 /* Construct the expression we will return. First get the component
3618 reference we will make. Unless the mask is all ones the width of
3619 that field, perform the mask operation. Then compare with the
3620 merged constant. */
3621 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3622 ll_unsignedp || rl_unsignedp);
3623
3624 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3625 if (! all_ones_mask_p (ll_mask, lnbitsize))
3626 result = build (BIT_AND_EXPR, type, result, ll_mask);
3627
3628 return build (wanted_code, truth_type, result,
3629 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3630 }
3631 \f
3632 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3633 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3634 that we may sometimes modify the tree. */
3635
3636 static tree
3637 strip_compound_expr (t, s)
3638 tree t;
3639 tree s;
3640 {
3641 tree type = TREE_TYPE (t);
3642 enum tree_code code = TREE_CODE (t);
3643
3644 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3645 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
3646 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
3647 return TREE_OPERAND (t, 1);
3648
3649 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3650 don't bother handling any other types. */
3651 else if (code == COND_EXPR)
3652 {
3653 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3654 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3655 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
3656 }
3657 else if (TREE_CODE_CLASS (code) == '1')
3658 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3659 else if (TREE_CODE_CLASS (code) == '<'
3660 || TREE_CODE_CLASS (code) == '2')
3661 {
3662 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3663 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3664 }
3665
3666 return t;
3667 }
3668 \f
3669 /* Perform constant folding and related simplification of EXPR.
3670 The related simplifications include x*1 => x, x*0 => 0, etc.,
3671 and application of the associative law.
3672 NOP_EXPR conversions may be removed freely (as long as we
3673 are careful not to change the C type of the overall expression)
3674 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3675 but we can constant-fold them if they have constant operands. */
3676
3677 tree
3678 fold (expr)
3679 tree expr;
3680 {
3681 register tree t = expr;
3682 tree t1 = NULL_TREE;
3683 tree tem;
3684 tree type = TREE_TYPE (expr);
3685 register tree arg0, arg1;
3686 register enum tree_code code = TREE_CODE (t);
3687 register int kind;
3688 int invert;
3689
3690 /* WINS will be nonzero when the switch is done
3691 if all operands are constant. */
3692
3693 int wins = 1;
3694
3695 /* Don't try to process an RTL_EXPR since its operands aren't trees.
3696 Likewise for a SAVE_EXPR that's already been evaluated. */
3697 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
3698 return t;
3699
3700 /* Return right away if already constant. */
3701 if (TREE_CONSTANT (t))
3702 {
3703 if (code == CONST_DECL)
3704 return DECL_INITIAL (t);
3705 return t;
3706 }
3707
3708 kind = TREE_CODE_CLASS (code);
3709 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3710 {
3711 tree subop;
3712
3713 /* Special case for conversion ops that can have fixed point args. */
3714 arg0 = TREE_OPERAND (t, 0);
3715
3716 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3717 if (arg0 != 0)
3718 STRIP_TYPE_NOPS (arg0);
3719
3720 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3721 subop = TREE_REALPART (arg0);
3722 else
3723 subop = arg0;
3724
3725 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3726 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3727 && TREE_CODE (subop) != REAL_CST
3728 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3729 )
3730 /* Note that TREE_CONSTANT isn't enough:
3731 static var addresses are constant but we can't
3732 do arithmetic on them. */
3733 wins = 0;
3734 }
3735 else if (kind == 'e' || kind == '<'
3736 || kind == '1' || kind == '2' || kind == 'r')
3737 {
3738 register int len = tree_code_length[(int) code];
3739 register int i;
3740 for (i = 0; i < len; i++)
3741 {
3742 tree op = TREE_OPERAND (t, i);
3743 tree subop;
3744
3745 if (op == 0)
3746 continue; /* Valid for CALL_EXPR, at least. */
3747
3748 if (kind == '<' || code == RSHIFT_EXPR)
3749 {
3750 /* Signedness matters here. Perhaps we can refine this
3751 later. */
3752 STRIP_TYPE_NOPS (op);
3753 }
3754 else
3755 {
3756 /* Strip any conversions that don't change the mode. */
3757 STRIP_NOPS (op);
3758 }
3759
3760 if (TREE_CODE (op) == COMPLEX_CST)
3761 subop = TREE_REALPART (op);
3762 else
3763 subop = op;
3764
3765 if (TREE_CODE (subop) != INTEGER_CST
3766 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3767 && TREE_CODE (subop) != REAL_CST
3768 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3769 )
3770 /* Note that TREE_CONSTANT isn't enough:
3771 static var addresses are constant but we can't
3772 do arithmetic on them. */
3773 wins = 0;
3774
3775 if (i == 0)
3776 arg0 = op;
3777 else if (i == 1)
3778 arg1 = op;
3779 }
3780 }
3781
3782 /* If this is a commutative operation, and ARG0 is a constant, move it
3783 to ARG1 to reduce the number of tests below. */
3784 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3785 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3786 || code == BIT_AND_EXPR)
3787 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3788 {
3789 tem = arg0; arg0 = arg1; arg1 = tem;
3790
3791 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3792 TREE_OPERAND (t, 1) = tem;
3793 }
3794
3795 /* Now WINS is set as described above,
3796 ARG0 is the first operand of EXPR,
3797 and ARG1 is the second operand (if it has more than one operand).
3798
3799 First check for cases where an arithmetic operation is applied to a
3800 compound, conditional, or comparison operation. Push the arithmetic
3801 operation inside the compound or conditional to see if any folding
3802 can then be done. Convert comparison to conditional for this purpose.
3803 The also optimizes non-constant cases that used to be done in
3804 expand_expr.
3805
3806 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3807 one of the operands is a comparison and the other is a comparison, a
3808 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3809 code below would make the expression more complex. Change it to a
3810 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3811 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3812
3813 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3814 || code == EQ_EXPR || code == NE_EXPR)
3815 && ((truth_value_p (TREE_CODE (arg0))
3816 && (truth_value_p (TREE_CODE (arg1))
3817 || (TREE_CODE (arg1) == BIT_AND_EXPR
3818 && integer_onep (TREE_OPERAND (arg1, 1)))))
3819 || (truth_value_p (TREE_CODE (arg1))
3820 && (truth_value_p (TREE_CODE (arg0))
3821 || (TREE_CODE (arg0) == BIT_AND_EXPR
3822 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3823 {
3824 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3825 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3826 : TRUTH_XOR_EXPR,
3827 type, arg0, arg1));
3828
3829 if (code == EQ_EXPR)
3830 t = invert_truthvalue (t);
3831
3832 return t;
3833 }
3834
3835 if (TREE_CODE_CLASS (code) == '1')
3836 {
3837 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3838 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3839 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3840 else if (TREE_CODE (arg0) == COND_EXPR)
3841 {
3842 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3843 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3844 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3845
3846 /* If this was a conversion, and all we did was to move into
3847 inside the COND_EXPR, bring it back out. But leave it if
3848 it is a conversion from integer to integer and the
3849 result precision is no wider than a word since such a
3850 conversion is cheap and may be optimized away by combine,
3851 while it couldn't if it were outside the COND_EXPR. Then return
3852 so we don't get into an infinite recursion loop taking the
3853 conversion out and then back in. */
3854
3855 if ((code == NOP_EXPR || code == CONVERT_EXPR
3856 || code == NON_LVALUE_EXPR)
3857 && TREE_CODE (t) == COND_EXPR
3858 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3859 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3860 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3861 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
3862 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
3863 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)))
3864 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
3865 t = build1 (code, type,
3866 build (COND_EXPR,
3867 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3868 TREE_OPERAND (t, 0),
3869 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3870 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3871 return t;
3872 }
3873 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3874 return fold (build (COND_EXPR, type, arg0,
3875 fold (build1 (code, type, integer_one_node)),
3876 fold (build1 (code, type, integer_zero_node))));
3877 }
3878 else if (TREE_CODE_CLASS (code) == '2'
3879 || TREE_CODE_CLASS (code) == '<')
3880 {
3881 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3882 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3883 fold (build (code, type,
3884 arg0, TREE_OPERAND (arg1, 1))));
3885 else if ((TREE_CODE (arg1) == COND_EXPR
3886 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
3887 && TREE_CODE_CLASS (code) != '<'))
3888 && (! TREE_SIDE_EFFECTS (arg0) || current_function_decl != 0))
3889 {
3890 tree test, true_value, false_value;
3891
3892 if (TREE_CODE (arg1) == COND_EXPR)
3893 {
3894 test = TREE_OPERAND (arg1, 0);
3895 true_value = TREE_OPERAND (arg1, 1);
3896 false_value = TREE_OPERAND (arg1, 2);
3897 }
3898 else
3899 {
3900 tree testtype = TREE_TYPE (arg1);
3901 test = arg1;
3902 true_value = convert (testtype, integer_one_node);
3903 false_value = convert (testtype, integer_zero_node);
3904 }
3905
3906 /* If ARG0 is complex we want to make sure we only evaluate
3907 it once. Though this is only required if it is volatile, it
3908 might be more efficient even if it is not. However, if we
3909 succeed in folding one part to a constant, we do not need
3910 to make this SAVE_EXPR. Since we do this optimization
3911 primarily to see if we do end up with constant and this
3912 SAVE_EXPR interferes with later optimizations, suppressing
3913 it when we can is important. */
3914
3915 if (TREE_CODE (arg0) != SAVE_EXPR
3916 && ((TREE_CODE (arg0) != VAR_DECL
3917 && TREE_CODE (arg0) != PARM_DECL)
3918 || TREE_SIDE_EFFECTS (arg0)))
3919 {
3920 tree lhs = fold (build (code, type, arg0, true_value));
3921 tree rhs = fold (build (code, type, arg0, false_value));
3922
3923 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3924 return fold (build (COND_EXPR, type, test, lhs, rhs));
3925
3926 if (current_function_decl != 0)
3927 arg0 = save_expr (arg0);
3928 }
3929
3930 test = fold (build (COND_EXPR, type, test,
3931 fold (build (code, type, arg0, true_value)),
3932 fold (build (code, type, arg0, false_value))));
3933 if (TREE_CODE (arg0) == SAVE_EXPR)
3934 return build (COMPOUND_EXPR, type,
3935 convert (void_type_node, arg0),
3936 strip_compound_expr (test, arg0));
3937 else
3938 return convert (type, test);
3939 }
3940
3941 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3942 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3943 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3944 else if ((TREE_CODE (arg0) == COND_EXPR
3945 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
3946 && TREE_CODE_CLASS (code) != '<'))
3947 && (! TREE_SIDE_EFFECTS (arg1) || current_function_decl != 0))
3948 {
3949 tree test, true_value, false_value;
3950
3951 if (TREE_CODE (arg0) == COND_EXPR)
3952 {
3953 test = TREE_OPERAND (arg0, 0);
3954 true_value = TREE_OPERAND (arg0, 1);
3955 false_value = TREE_OPERAND (arg0, 2);
3956 }
3957 else
3958 {
3959 tree testtype = TREE_TYPE (arg0);
3960 test = arg0;
3961 true_value = convert (testtype, integer_one_node);
3962 false_value = convert (testtype, integer_zero_node);
3963 }
3964
3965 if (TREE_CODE (arg1) != SAVE_EXPR
3966 && ((TREE_CODE (arg1) != VAR_DECL
3967 && TREE_CODE (arg1) != PARM_DECL)
3968 || TREE_SIDE_EFFECTS (arg1)))
3969 {
3970 tree lhs = fold (build (code, type, true_value, arg1));
3971 tree rhs = fold (build (code, type, false_value, arg1));
3972
3973 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs)
3974 || TREE_CONSTANT (arg1))
3975 return fold (build (COND_EXPR, type, test, lhs, rhs));
3976
3977 if (current_function_decl != 0)
3978 arg1 = save_expr (arg1);
3979 }
3980
3981 test = fold (build (COND_EXPR, type, test,
3982 fold (build (code, type, true_value, arg1)),
3983 fold (build (code, type, false_value, arg1))));
3984 if (TREE_CODE (arg1) == SAVE_EXPR)
3985 return build (COMPOUND_EXPR, type,
3986 convert (void_type_node, arg1),
3987 strip_compound_expr (test, arg1));
3988 else
3989 return convert (type, test);
3990 }
3991 }
3992 else if (TREE_CODE_CLASS (code) == '<'
3993 && TREE_CODE (arg0) == COMPOUND_EXPR)
3994 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3995 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3996 else if (TREE_CODE_CLASS (code) == '<'
3997 && TREE_CODE (arg1) == COMPOUND_EXPR)
3998 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3999 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4000
4001 switch (code)
4002 {
4003 case INTEGER_CST:
4004 case REAL_CST:
4005 case STRING_CST:
4006 case COMPLEX_CST:
4007 case CONSTRUCTOR:
4008 return t;
4009
4010 case CONST_DECL:
4011 return fold (DECL_INITIAL (t));
4012
4013 case NOP_EXPR:
4014 case FLOAT_EXPR:
4015 case CONVERT_EXPR:
4016 case FIX_TRUNC_EXPR:
4017 /* Other kinds of FIX are not handled properly by fold_convert. */
4018
4019 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4020 return TREE_OPERAND (t, 0);
4021
4022 /* Handle cases of two conversions in a row. */
4023 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4024 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4025 {
4026 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4027 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4028 tree final_type = TREE_TYPE (t);
4029 int inside_int = INTEGRAL_TYPE_P (inside_type);
4030 int inside_ptr = POINTER_TYPE_P (inside_type);
4031 int inside_float = FLOAT_TYPE_P (inside_type);
4032 int inside_prec = TYPE_PRECISION (inside_type);
4033 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4034 int inter_int = INTEGRAL_TYPE_P (inter_type);
4035 int inter_ptr = POINTER_TYPE_P (inter_type);
4036 int inter_float = FLOAT_TYPE_P (inter_type);
4037 int inter_prec = TYPE_PRECISION (inter_type);
4038 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4039 int final_int = INTEGRAL_TYPE_P (final_type);
4040 int final_ptr = POINTER_TYPE_P (final_type);
4041 int final_float = FLOAT_TYPE_P (final_type);
4042 int final_prec = TYPE_PRECISION (final_type);
4043 int final_unsignedp = TREE_UNSIGNED (final_type);
4044
4045 /* In addition to the cases of two conversions in a row
4046 handled below, if we are converting something to its own
4047 type via an object of identical or wider precision, neither
4048 conversion is needed. */
4049 if (inside_type == final_type
4050 && ((inter_int && final_int) || (inter_float && final_float))
4051 && inter_prec >= final_prec)
4052 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4053
4054 /* Likewise, if the intermediate and final types are either both
4055 float or both integer, we don't need the middle conversion if
4056 it is wider than the final type and doesn't change the signedness
4057 (for integers). Avoid this if the final type is a pointer
4058 since then we sometimes need the inner conversion. Likewise if
4059 the outer has a precision not equal to the size of its mode. */
4060 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4061 || (inter_float && inside_float))
4062 && inter_prec >= inside_prec
4063 && (inter_float || inter_unsignedp == inside_unsignedp)
4064 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4065 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4066 && ! final_ptr)
4067 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4068
4069 /* Two conversions in a row are not needed unless:
4070 - some conversion is floating-point (overstrict for now), or
4071 - the intermediate type is narrower than both initial and
4072 final, or
4073 - the intermediate type and innermost type differ in signedness,
4074 and the outermost type is wider than the intermediate, or
4075 - the initial type is a pointer type and the precisions of the
4076 intermediate and final types differ, or
4077 - the final type is a pointer type and the precisions of the
4078 initial and intermediate types differ. */
4079 if (! inside_float && ! inter_float && ! final_float
4080 && (inter_prec > inside_prec || inter_prec > final_prec)
4081 && ! (inside_int && inter_int
4082 && inter_unsignedp != inside_unsignedp
4083 && inter_prec < final_prec)
4084 && ((inter_unsignedp && inter_prec > inside_prec)
4085 == (final_unsignedp && final_prec > inter_prec))
4086 && ! (inside_ptr && inter_prec != final_prec)
4087 && ! (final_ptr && inside_prec != inter_prec)
4088 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4089 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4090 && ! final_ptr)
4091 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4092 }
4093
4094 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4095 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4096 /* Detect assigning a bitfield. */
4097 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4098 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4099 {
4100 /* Don't leave an assignment inside a conversion
4101 unless assigning a bitfield. */
4102 tree prev = TREE_OPERAND (t, 0);
4103 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4104 /* First do the assignment, then return converted constant. */
4105 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4106 TREE_USED (t) = 1;
4107 return t;
4108 }
4109 if (!wins)
4110 {
4111 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4112 return t;
4113 }
4114 return fold_convert (t, arg0);
4115
4116 #if 0 /* This loses on &"foo"[0]. */
4117 case ARRAY_REF:
4118 {
4119 int i;
4120
4121 /* Fold an expression like: "foo"[2] */
4122 if (TREE_CODE (arg0) == STRING_CST
4123 && TREE_CODE (arg1) == INTEGER_CST
4124 && !TREE_INT_CST_HIGH (arg1)
4125 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4126 {
4127 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4128 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
4129 force_fit_type (t, 0);
4130 }
4131 }
4132 return t;
4133 #endif /* 0 */
4134
4135 case COMPONENT_REF:
4136 if (TREE_CODE (arg0) == CONSTRUCTOR)
4137 {
4138 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4139 if (m)
4140 t = TREE_VALUE (m);
4141 }
4142 return t;
4143
4144 case RANGE_EXPR:
4145 TREE_CONSTANT (t) = wins;
4146 return t;
4147
4148 case NEGATE_EXPR:
4149 if (wins)
4150 {
4151 if (TREE_CODE (arg0) == INTEGER_CST)
4152 {
4153 HOST_WIDE_INT low, high;
4154 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4155 TREE_INT_CST_HIGH (arg0),
4156 &low, &high);
4157 t = build_int_2 (low, high);
4158 TREE_TYPE (t) = type;
4159 TREE_OVERFLOW (t)
4160 = (TREE_OVERFLOW (arg0)
4161 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4162 TREE_CONSTANT_OVERFLOW (t)
4163 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4164 }
4165 else if (TREE_CODE (arg0) == REAL_CST)
4166 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4167 }
4168 else if (TREE_CODE (arg0) == NEGATE_EXPR)
4169 return TREE_OPERAND (arg0, 0);
4170
4171 /* Convert - (a - b) to (b - a) for non-floating-point. */
4172 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
4173 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4174 TREE_OPERAND (arg0, 0));
4175
4176 return t;
4177
4178 case ABS_EXPR:
4179 if (wins)
4180 {
4181 if (TREE_CODE (arg0) == INTEGER_CST)
4182 {
4183 if (! TREE_UNSIGNED (type)
4184 && TREE_INT_CST_HIGH (arg0) < 0)
4185 {
4186 HOST_WIDE_INT low, high;
4187 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4188 TREE_INT_CST_HIGH (arg0),
4189 &low, &high);
4190 t = build_int_2 (low, high);
4191 TREE_TYPE (t) = type;
4192 TREE_OVERFLOW (t)
4193 = (TREE_OVERFLOW (arg0)
4194 | force_fit_type (t, overflow));
4195 TREE_CONSTANT_OVERFLOW (t)
4196 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4197 }
4198 }
4199 else if (TREE_CODE (arg0) == REAL_CST)
4200 {
4201 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4202 t = build_real (type,
4203 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4204 }
4205 }
4206 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4207 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4208 return t;
4209
4210 case CONJ_EXPR:
4211 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4212 return arg0;
4213 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4214 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
4215 TREE_OPERAND (arg0, 0),
4216 fold (build1 (NEGATE_EXPR,
4217 TREE_TYPE (TREE_TYPE (arg0)),
4218 TREE_OPERAND (arg0, 1))));
4219 else if (TREE_CODE (arg0) == COMPLEX_CST)
4220 return build_complex (type, TREE_OPERAND (arg0, 0),
4221 fold (build1 (NEGATE_EXPR,
4222 TREE_TYPE (TREE_TYPE (arg0)),
4223 TREE_OPERAND (arg0, 1))));
4224 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4225 return fold (build (TREE_CODE (arg0), type,
4226 fold (build1 (CONJ_EXPR, type,
4227 TREE_OPERAND (arg0, 0))),
4228 fold (build1 (CONJ_EXPR,
4229 type, TREE_OPERAND (arg0, 1)))));
4230 else if (TREE_CODE (arg0) == CONJ_EXPR)
4231 return TREE_OPERAND (arg0, 0);
4232 return t;
4233
4234 case BIT_NOT_EXPR:
4235 if (wins)
4236 {
4237 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4238 ~ TREE_INT_CST_HIGH (arg0));
4239 TREE_TYPE (t) = type;
4240 force_fit_type (t, 0);
4241 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4242 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4243 }
4244 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4245 return TREE_OPERAND (arg0, 0);
4246 return t;
4247
4248 case PLUS_EXPR:
4249 /* A + (-B) -> A - B */
4250 if (TREE_CODE (arg1) == NEGATE_EXPR)
4251 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4252 else if (! FLOAT_TYPE_P (type))
4253 {
4254 if (integer_zerop (arg1))
4255 return non_lvalue (convert (type, arg0));
4256
4257 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4258 with a constant, and the two constants have no bits in common,
4259 we should treat this as a BIT_IOR_EXPR since this may produce more
4260 simplifications. */
4261 if (TREE_CODE (arg0) == BIT_AND_EXPR
4262 && TREE_CODE (arg1) == BIT_AND_EXPR
4263 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4264 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
4265 && integer_zerop (const_binop (BIT_AND_EXPR,
4266 TREE_OPERAND (arg0, 1),
4267 TREE_OPERAND (arg1, 1), 0)))
4268 {
4269 code = BIT_IOR_EXPR;
4270 goto bit_ior;
4271 }
4272
4273 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
4274 about the case where C is a constant, just try one of the
4275 four possibilities. */
4276
4277 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4278 && operand_equal_p (TREE_OPERAND (arg0, 1),
4279 TREE_OPERAND (arg1, 1), 0))
4280 return fold (build (MULT_EXPR, type,
4281 fold (build (PLUS_EXPR, type,
4282 TREE_OPERAND (arg0, 0),
4283 TREE_OPERAND (arg1, 0))),
4284 TREE_OPERAND (arg0, 1)));
4285 }
4286 /* In IEEE floating point, x+0 may not equal x. */
4287 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4288 || flag_fast_math)
4289 && real_zerop (arg1))
4290 return non_lvalue (convert (type, arg0));
4291 associate:
4292 /* In most languages, can't associate operations on floats
4293 through parentheses. Rather than remember where the parentheses
4294 were, we don't associate floats at all. It shouldn't matter much.
4295 However, associating multiplications is only very slightly
4296 inaccurate, so do that if -ffast-math is specified. */
4297 if (FLOAT_TYPE_P (type)
4298 && ! (flag_fast_math && code == MULT_EXPR))
4299 goto binary;
4300
4301 /* The varsign == -1 cases happen only for addition and subtraction.
4302 It says that the arg that was split was really CON minus VAR.
4303 The rest of the code applies to all associative operations. */
4304 if (!wins)
4305 {
4306 tree var, con;
4307 int varsign;
4308
4309 if (split_tree (arg0, code, &var, &con, &varsign))
4310 {
4311 if (varsign == -1)
4312 {
4313 /* EXPR is (CON-VAR) +- ARG1. */
4314 /* If it is + and VAR==ARG1, return just CONST. */
4315 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
4316 return convert (TREE_TYPE (t), con);
4317
4318 /* If ARG0 is a constant, don't change things around;
4319 instead keep all the constant computations together. */
4320
4321 if (TREE_CONSTANT (arg0))
4322 return t;
4323
4324 /* Otherwise return (CON +- ARG1) - VAR. */
4325 t = build (MINUS_EXPR, type,
4326 fold (build (code, type, con, arg1)), var);
4327 }
4328 else
4329 {
4330 /* EXPR is (VAR+CON) +- ARG1. */
4331 /* If it is - and VAR==ARG1, return just CONST. */
4332 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
4333 return convert (TREE_TYPE (t), con);
4334
4335 /* If ARG0 is a constant, don't change things around;
4336 instead keep all the constant computations together. */
4337
4338 if (TREE_CONSTANT (arg0))
4339 return t;
4340
4341 /* Otherwise return VAR +- (ARG1 +- CON). */
4342 tem = fold (build (code, type, arg1, con));
4343 t = build (code, type, var, tem);
4344
4345 if (integer_zerop (tem)
4346 && (code == PLUS_EXPR || code == MINUS_EXPR))
4347 return convert (type, var);
4348 /* If we have x +/- (c - d) [c an explicit integer]
4349 change it to x -/+ (d - c) since if d is relocatable
4350 then the latter can be a single immediate insn
4351 and the former cannot. */
4352 if (TREE_CODE (tem) == MINUS_EXPR
4353 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
4354 {
4355 tree tem1 = TREE_OPERAND (tem, 1);
4356 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
4357 TREE_OPERAND (tem, 0) = tem1;
4358 TREE_SET_CODE (t,
4359 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4360 }
4361 }
4362 return t;
4363 }
4364
4365 if (split_tree (arg1, code, &var, &con, &varsign))
4366 {
4367 if (TREE_CONSTANT (arg1))
4368 return t;
4369
4370 if (varsign == -1)
4371 TREE_SET_CODE (t,
4372 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4373
4374 /* EXPR is ARG0 +- (CON +- VAR). */
4375 if (TREE_CODE (t) == MINUS_EXPR
4376 && operand_equal_p (var, arg0, 0))
4377 {
4378 /* If VAR and ARG0 cancel, return just CON or -CON. */
4379 if (code == PLUS_EXPR)
4380 return convert (TREE_TYPE (t), con);
4381 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
4382 convert (TREE_TYPE (t), con)));
4383 }
4384
4385 t = build (TREE_CODE (t), type,
4386 fold (build (code, TREE_TYPE (t), arg0, con)), var);
4387
4388 if (integer_zerop (TREE_OPERAND (t, 0))
4389 && TREE_CODE (t) == PLUS_EXPR)
4390 return convert (TREE_TYPE (t), var);
4391 return t;
4392 }
4393 }
4394 binary:
4395 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
4396 if (TREE_CODE (arg1) == REAL_CST)
4397 return t;
4398 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
4399 if (wins)
4400 t1 = const_binop (code, arg0, arg1, 0);
4401 if (t1 != NULL_TREE)
4402 {
4403 /* The return value should always have
4404 the same type as the original expression. */
4405 if (TREE_TYPE (t1) != TREE_TYPE (t))
4406 t1 = convert (TREE_TYPE (t), t1);
4407
4408 return t1;
4409 }
4410 return t;
4411
4412 case MINUS_EXPR:
4413 if (! FLOAT_TYPE_P (type))
4414 {
4415 if (! wins && integer_zerop (arg0))
4416 return build1 (NEGATE_EXPR, type, arg1);
4417 if (integer_zerop (arg1))
4418 return non_lvalue (convert (type, arg0));
4419
4420 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
4421 about the case where C is a constant, just try one of the
4422 four possibilities. */
4423
4424 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4425 && operand_equal_p (TREE_OPERAND (arg0, 1),
4426 TREE_OPERAND (arg1, 1), 0))
4427 return fold (build (MULT_EXPR, type,
4428 fold (build (MINUS_EXPR, type,
4429 TREE_OPERAND (arg0, 0),
4430 TREE_OPERAND (arg1, 0))),
4431 TREE_OPERAND (arg0, 1)));
4432 }
4433 /* Convert A - (-B) to A + B. */
4434 else if (TREE_CODE (arg1) == NEGATE_EXPR)
4435 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4436
4437 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4438 || flag_fast_math)
4439 {
4440 /* Except with IEEE floating point, 0-x equals -x. */
4441 if (! wins && real_zerop (arg0))
4442 return build1 (NEGATE_EXPR, type, arg1);
4443 /* Except with IEEE floating point, x-0 equals x. */
4444 if (real_zerop (arg1))
4445 return non_lvalue (convert (type, arg0));
4446 }
4447
4448 /* Fold &x - &x. This can happen from &x.foo - &x.
4449 This is unsafe for certain floats even in non-IEEE formats.
4450 In IEEE, it is unsafe because it does wrong for NaNs.
4451 Also note that operand_equal_p is always false if an operand
4452 is volatile. */
4453
4454 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
4455 && operand_equal_p (arg0, arg1, 0))
4456 return convert (type, integer_zero_node);
4457
4458 goto associate;
4459
4460 case MULT_EXPR:
4461 if (! FLOAT_TYPE_P (type))
4462 {
4463 if (integer_zerop (arg1))
4464 return omit_one_operand (type, arg1, arg0);
4465 if (integer_onep (arg1))
4466 return non_lvalue (convert (type, arg0));
4467
4468 /* ((A / C) * C) is A if the division is an
4469 EXACT_DIV_EXPR. Since C is normally a constant,
4470 just check for one of the four possibilities. */
4471
4472 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
4473 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4474 return TREE_OPERAND (arg0, 0);
4475
4476 /* (a * (1 << b)) is (a << b) */
4477 if (TREE_CODE (arg1) == LSHIFT_EXPR
4478 && integer_onep (TREE_OPERAND (arg1, 0)))
4479 return fold (build (LSHIFT_EXPR, type, arg0,
4480 TREE_OPERAND (arg1, 1)));
4481 if (TREE_CODE (arg0) == LSHIFT_EXPR
4482 && integer_onep (TREE_OPERAND (arg0, 0)))
4483 return fold (build (LSHIFT_EXPR, type, arg1,
4484 TREE_OPERAND (arg0, 1)));
4485 }
4486 else
4487 {
4488 /* x*0 is 0, except for IEEE floating point. */
4489 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4490 || flag_fast_math)
4491 && real_zerop (arg1))
4492 return omit_one_operand (type, arg1, arg0);
4493 /* In IEEE floating point, x*1 is not equivalent to x for snans.
4494 However, ANSI says we can drop signals,
4495 so we can do this anyway. */
4496 if (real_onep (arg1))
4497 return non_lvalue (convert (type, arg0));
4498 /* x*2 is x+x */
4499 if (! wins && real_twop (arg1) && current_function_decl != 0)
4500 {
4501 tree arg = save_expr (arg0);
4502 return build (PLUS_EXPR, type, arg, arg);
4503 }
4504 }
4505 goto associate;
4506
4507 case BIT_IOR_EXPR:
4508 bit_ior:
4509 {
4510 register enum tree_code code0, code1;
4511
4512 if (integer_all_onesp (arg1))
4513 return omit_one_operand (type, arg1, arg0);
4514 if (integer_zerop (arg1))
4515 return non_lvalue (convert (type, arg0));
4516 t1 = distribute_bit_expr (code, type, arg0, arg1);
4517 if (t1 != NULL_TREE)
4518 return t1;
4519
4520 /* (A << C1) | (A >> C2) if A is unsigned and C1+C2 is the size of A
4521 is a rotate of A by C1 bits. */
4522 /* (A << B) | (A >> (Z - B)) if A is unsigned and Z is the size of A
4523 is a rotate of A by B bits. */
4524
4525 code0 = TREE_CODE (arg0);
4526 code1 = TREE_CODE (arg1);
4527 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
4528 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
4529 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
4530 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4531 {
4532 register tree tree01, tree11;
4533 register enum tree_code code01, code11;
4534
4535 tree01 = TREE_OPERAND (arg0, 1);
4536 tree11 = TREE_OPERAND (arg1, 1);
4537 code01 = TREE_CODE (tree01);
4538 code11 = TREE_CODE (tree11);
4539 if (code01 == INTEGER_CST
4540 && code11 == INTEGER_CST
4541 && TREE_INT_CST_HIGH (tree01) == 0
4542 && TREE_INT_CST_HIGH (tree11) == 0
4543 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
4544 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
4545 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
4546 code0 == LSHIFT_EXPR ? tree01 : tree11);
4547 else if (code11 == MINUS_EXPR
4548 && TREE_CODE (TREE_OPERAND (tree11, 0)) == INTEGER_CST
4549 && TREE_INT_CST_HIGH (TREE_OPERAND (tree11, 0)) == 0
4550 && TREE_INT_CST_LOW (TREE_OPERAND (tree11, 0))
4551 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4552 && operand_equal_p (tree01, TREE_OPERAND (tree11, 1), 0))
4553 return build (code0 == LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
4554 type, TREE_OPERAND (arg0, 0), tree01);
4555 else if (code01 == MINUS_EXPR
4556 && TREE_CODE (TREE_OPERAND (tree01, 0)) == INTEGER_CST
4557 && TREE_INT_CST_HIGH (TREE_OPERAND (tree01, 0)) == 0
4558 && TREE_INT_CST_LOW (TREE_OPERAND (tree01, 0))
4559 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4560 && operand_equal_p (tree11, TREE_OPERAND (tree01, 1), 0))
4561 return build (code0 != LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
4562 type, TREE_OPERAND (arg0, 0), tree11);
4563 }
4564
4565 goto associate;
4566 }
4567
4568 case BIT_XOR_EXPR:
4569 if (integer_zerop (arg1))
4570 return non_lvalue (convert (type, arg0));
4571 if (integer_all_onesp (arg1))
4572 return fold (build1 (BIT_NOT_EXPR, type, arg0));
4573 goto associate;
4574
4575 case BIT_AND_EXPR:
4576 bit_and:
4577 if (integer_all_onesp (arg1))
4578 return non_lvalue (convert (type, arg0));
4579 if (integer_zerop (arg1))
4580 return omit_one_operand (type, arg1, arg0);
4581 t1 = distribute_bit_expr (code, type, arg0, arg1);
4582 if (t1 != NULL_TREE)
4583 return t1;
4584 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
4585 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
4586 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
4587 {
4588 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
4589 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4590 && (~TREE_INT_CST_LOW (arg0)
4591 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4592 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
4593 }
4594 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
4595 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4596 {
4597 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
4598 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4599 && (~TREE_INT_CST_LOW (arg1)
4600 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4601 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
4602 }
4603 goto associate;
4604
4605 case BIT_ANDTC_EXPR:
4606 if (integer_all_onesp (arg0))
4607 return non_lvalue (convert (type, arg1));
4608 if (integer_zerop (arg0))
4609 return omit_one_operand (type, arg0, arg1);
4610 if (TREE_CODE (arg1) == INTEGER_CST)
4611 {
4612 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
4613 code = BIT_AND_EXPR;
4614 goto bit_and;
4615 }
4616 goto binary;
4617
4618 case RDIV_EXPR:
4619 /* In most cases, do nothing with a divide by zero. */
4620 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4621 #ifndef REAL_INFINITY
4622 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
4623 return t;
4624 #endif
4625 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4626
4627 /* In IEEE floating point, x/1 is not equivalent to x for snans.
4628 However, ANSI says we can drop signals, so we can do this anyway. */
4629 if (real_onep (arg1))
4630 return non_lvalue (convert (type, arg0));
4631
4632 /* If ARG1 is a constant, we can convert this to a multiply by the
4633 reciprocal. This does not have the same rounding properties,
4634 so only do this if -ffast-math. We can actually always safely
4635 do it if ARG1 is a power of two, but it's hard to tell if it is
4636 or not in a portable manner. */
4637 if (TREE_CODE (arg1) == REAL_CST)
4638 {
4639 if (flag_fast_math
4640 && 0 != (tem = const_binop (code, build_real (type, dconst1),
4641 arg1, 0)))
4642 return fold (build (MULT_EXPR, type, arg0, tem));
4643 /* Find the reciprocal if optimizing and the result is exact. */
4644 else if (optimize)
4645 {
4646 REAL_VALUE_TYPE r;
4647 r = TREE_REAL_CST (arg1);
4648 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
4649 {
4650 tem = build_real (type, r);
4651 return fold (build (MULT_EXPR, type, arg0, tem));
4652 }
4653 }
4654 }
4655 goto binary;
4656
4657 case TRUNC_DIV_EXPR:
4658 case ROUND_DIV_EXPR:
4659 case FLOOR_DIV_EXPR:
4660 case CEIL_DIV_EXPR:
4661 case EXACT_DIV_EXPR:
4662 if (integer_onep (arg1))
4663 return non_lvalue (convert (type, arg0));
4664 if (integer_zerop (arg1))
4665 return t;
4666
4667 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
4668 operation, EXACT_DIV_EXPR.
4669
4670 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
4671 At one time others generated faster code, it's not clear if they do
4672 after the last round to changes to the DIV code in expmed.c. */
4673 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
4674 && multiple_of_p (type, arg0, arg1))
4675 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
4676
4677 /* If we have ((a / C1) / C2) where both division are the same type, try
4678 to simplify. First see if C1 * C2 overflows or not. */
4679 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
4680 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4681 {
4682 tree new_divisor;
4683
4684 new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
4685 tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
4686
4687 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
4688 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
4689 {
4690 /* If no overflow, divide by C1*C2. */
4691 return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
4692 }
4693 }
4694
4695 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4696 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4697 expressions, which often appear in the offsets or sizes of
4698 objects with a varying size. Only deal with positive divisors
4699 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4700
4701 Look for NOPs and SAVE_EXPRs inside. */
4702
4703 if (TREE_CODE (arg1) == INTEGER_CST
4704 && tree_int_cst_sgn (arg1) >= 0)
4705 {
4706 int have_save_expr = 0;
4707 tree c2 = integer_zero_node;
4708 tree xarg0 = arg0;
4709
4710 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4711 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4712
4713 STRIP_NOPS (xarg0);
4714
4715 /* Look inside the dividend and simplify using EXACT_DIV_EXPR
4716 if possible. */
4717 if (TREE_CODE (xarg0) == MULT_EXPR
4718 && multiple_of_p (type, TREE_OPERAND (xarg0, 0), arg1))
4719 {
4720 tree t;
4721
4722 t = fold (build (MULT_EXPR, type,
4723 fold (build (EXACT_DIV_EXPR, type,
4724 TREE_OPERAND (xarg0, 0), arg1)),
4725 TREE_OPERAND (xarg0, 1)));
4726 if (have_save_expr)
4727 t = save_expr (t);
4728 return t;
4729
4730 }
4731
4732 if (TREE_CODE (xarg0) == MULT_EXPR
4733 && multiple_of_p (type, TREE_OPERAND (xarg0, 1), arg1))
4734 {
4735 tree t;
4736
4737 t = fold (build (MULT_EXPR, type,
4738 fold (build (EXACT_DIV_EXPR, type,
4739 TREE_OPERAND (xarg0, 1), arg1)),
4740 TREE_OPERAND (xarg0, 0)));
4741 if (have_save_expr)
4742 t = save_expr (t);
4743 return t;
4744 }
4745
4746 if (TREE_CODE (xarg0) == PLUS_EXPR
4747 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4748 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4749 else if (TREE_CODE (xarg0) == MINUS_EXPR
4750 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4751 /* If we are doing this computation unsigned, the negate
4752 is incorrect. */
4753 && ! TREE_UNSIGNED (type))
4754 {
4755 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4756 xarg0 = TREE_OPERAND (xarg0, 0);
4757 }
4758
4759 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4760 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4761
4762 STRIP_NOPS (xarg0);
4763
4764 if (TREE_CODE (xarg0) == MULT_EXPR
4765 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4766 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
4767 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
4768 TREE_OPERAND (xarg0, 1), arg1, 1))
4769 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
4770 TREE_OPERAND (xarg0, 1), 1)))
4771 && (tree_int_cst_sgn (c2) >= 0
4772 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
4773 arg1, 1))))
4774 {
4775 tree outer_div = integer_one_node;
4776 tree c1 = TREE_OPERAND (xarg0, 1);
4777 tree c3 = arg1;
4778
4779 /* If C3 > C1, set them equal and do a divide by
4780 C3/C1 at the end of the operation. */
4781 if (tree_int_cst_lt (c1, c3))
4782 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4783
4784 /* The result is A * (C1/C3) + (C2/C3). */
4785 t = fold (build (PLUS_EXPR, type,
4786 fold (build (MULT_EXPR, type,
4787 TREE_OPERAND (xarg0, 0),
4788 const_binop (code, c1, c3, 1))),
4789 const_binop (code, c2, c3, 1)));
4790
4791 if (! integer_onep (outer_div))
4792 t = fold (build (code, type, t, convert (type, outer_div)));
4793
4794 if (have_save_expr)
4795 t = save_expr (t);
4796
4797 return t;
4798 }
4799 }
4800
4801 goto binary;
4802
4803 case CEIL_MOD_EXPR:
4804 case FLOOR_MOD_EXPR:
4805 case ROUND_MOD_EXPR:
4806 case TRUNC_MOD_EXPR:
4807 if (integer_onep (arg1))
4808 return omit_one_operand (type, integer_zero_node, arg0);
4809 if (integer_zerop (arg1))
4810 return t;
4811
4812 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4813 where C1 % C3 == 0. Handle similarly to the division case,
4814 but don't bother with SAVE_EXPRs. */
4815
4816 if (TREE_CODE (arg1) == INTEGER_CST
4817 && ! integer_zerop (arg1))
4818 {
4819 tree c2 = integer_zero_node;
4820 tree xarg0 = arg0;
4821
4822 if (TREE_CODE (xarg0) == PLUS_EXPR
4823 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4824 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4825 else if (TREE_CODE (xarg0) == MINUS_EXPR
4826 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4827 && ! TREE_UNSIGNED (type))
4828 {
4829 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4830 xarg0 = TREE_OPERAND (xarg0, 0);
4831 }
4832
4833 STRIP_NOPS (xarg0);
4834
4835 if (TREE_CODE (xarg0) == MULT_EXPR
4836 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4837 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4838 TREE_OPERAND (xarg0, 1),
4839 arg1, 1))
4840 && tree_int_cst_sgn (c2) >= 0)
4841 /* The result is (C2%C3). */
4842 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4843 TREE_OPERAND (xarg0, 0));
4844 }
4845
4846 goto binary;
4847
4848 case LSHIFT_EXPR:
4849 case RSHIFT_EXPR:
4850 case LROTATE_EXPR:
4851 case RROTATE_EXPR:
4852 if (integer_zerop (arg1))
4853 return non_lvalue (convert (type, arg0));
4854 /* Since negative shift count is not well-defined,
4855 don't try to compute it in the compiler. */
4856 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
4857 return t;
4858 /* Rewrite an LROTATE_EXPR by a constant into an
4859 RROTATE_EXPR by a new constant. */
4860 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
4861 {
4862 TREE_SET_CODE (t, RROTATE_EXPR);
4863 code = RROTATE_EXPR;
4864 TREE_OPERAND (t, 1) = arg1
4865 = const_binop
4866 (MINUS_EXPR,
4867 convert (TREE_TYPE (arg1),
4868 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
4869 arg1, 0);
4870 if (tree_int_cst_sgn (arg1) < 0)
4871 return t;
4872 }
4873
4874 /* If we have a rotate of a bit operation with the rotate count and
4875 the second operand of the bit operation both constant,
4876 permute the two operations. */
4877 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4878 && (TREE_CODE (arg0) == BIT_AND_EXPR
4879 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
4880 || TREE_CODE (arg0) == BIT_IOR_EXPR
4881 || TREE_CODE (arg0) == BIT_XOR_EXPR)
4882 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4883 return fold (build (TREE_CODE (arg0), type,
4884 fold (build (code, type,
4885 TREE_OPERAND (arg0, 0), arg1)),
4886 fold (build (code, type,
4887 TREE_OPERAND (arg0, 1), arg1))));
4888
4889 /* Two consecutive rotates adding up to the width of the mode can
4890 be ignored. */
4891 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4892 && TREE_CODE (arg0) == RROTATE_EXPR
4893 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4894 && TREE_INT_CST_HIGH (arg1) == 0
4895 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
4896 && ((TREE_INT_CST_LOW (arg1)
4897 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
4898 == GET_MODE_BITSIZE (TYPE_MODE (type))))
4899 return TREE_OPERAND (arg0, 0);
4900
4901 goto binary;
4902
4903 case MIN_EXPR:
4904 if (operand_equal_p (arg0, arg1, 0))
4905 return arg0;
4906 if (INTEGRAL_TYPE_P (type)
4907 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4908 return omit_one_operand (type, arg1, arg0);
4909 goto associate;
4910
4911 case MAX_EXPR:
4912 if (operand_equal_p (arg0, arg1, 0))
4913 return arg0;
4914 if (INTEGRAL_TYPE_P (type)
4915 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4916 return omit_one_operand (type, arg1, arg0);
4917 goto associate;
4918
4919 case TRUTH_NOT_EXPR:
4920 /* Note that the operand of this must be an int
4921 and its values must be 0 or 1.
4922 ("true" is a fixed value perhaps depending on the language,
4923 but we don't handle values other than 1 correctly yet.) */
4924 tem = invert_truthvalue (arg0);
4925 /* Avoid infinite recursion. */
4926 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
4927 return t;
4928 return convert (type, tem);
4929
4930 case TRUTH_ANDIF_EXPR:
4931 /* Note that the operands of this must be ints
4932 and their values must be 0 or 1.
4933 ("true" is a fixed value perhaps depending on the language.) */
4934 /* If first arg is constant zero, return it. */
4935 if (integer_zerop (arg0))
4936 return arg0;
4937 case TRUTH_AND_EXPR:
4938 /* If either arg is constant true, drop it. */
4939 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4940 return non_lvalue (arg1);
4941 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4942 return non_lvalue (arg0);
4943 /* If second arg is constant zero, result is zero, but first arg
4944 must be evaluated. */
4945 if (integer_zerop (arg1))
4946 return omit_one_operand (type, arg1, arg0);
4947 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
4948 case will be handled here. */
4949 if (integer_zerop (arg0))
4950 return omit_one_operand (type, arg0, arg1);
4951
4952 truth_andor:
4953 /* We only do these simplifications if we are optimizing. */
4954 if (!optimize)
4955 return t;
4956
4957 /* Check for things like (A || B) && (A || C). We can convert this
4958 to A || (B && C). Note that either operator can be any of the four
4959 truth and/or operations and the transformation will still be
4960 valid. Also note that we only care about order for the
4961 ANDIF and ORIF operators. If B contains side effects, this
4962 might change the truth-value of A. */
4963 if (TREE_CODE (arg0) == TREE_CODE (arg1)
4964 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
4965 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
4966 || TREE_CODE (arg0) == TRUTH_AND_EXPR
4967 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
4968 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
4969 {
4970 tree a00 = TREE_OPERAND (arg0, 0);
4971 tree a01 = TREE_OPERAND (arg0, 1);
4972 tree a10 = TREE_OPERAND (arg1, 0);
4973 tree a11 = TREE_OPERAND (arg1, 1);
4974 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
4975 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
4976 && (code == TRUTH_AND_EXPR
4977 || code == TRUTH_OR_EXPR));
4978
4979 if (operand_equal_p (a00, a10, 0))
4980 return fold (build (TREE_CODE (arg0), type, a00,
4981 fold (build (code, type, a01, a11))));
4982 else if (commutative && operand_equal_p (a00, a11, 0))
4983 return fold (build (TREE_CODE (arg0), type, a00,
4984 fold (build (code, type, a01, a10))));
4985 else if (commutative && operand_equal_p (a01, a10, 0))
4986 return fold (build (TREE_CODE (arg0), type, a01,
4987 fold (build (code, type, a00, a11))));
4988
4989 /* This case if tricky because we must either have commutative
4990 operators or else A10 must not have side-effects. */
4991
4992 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
4993 && operand_equal_p (a01, a11, 0))
4994 return fold (build (TREE_CODE (arg0), type,
4995 fold (build (code, type, a00, a10)),
4996 a01));
4997 }
4998
4999 /* See if we can build a range comparison. */
5000 if (0 != (tem = fold_range_test (t)))
5001 return tem;
5002
5003 /* Check for the possibility of merging component references. If our
5004 lhs is another similar operation, try to merge its rhs with our
5005 rhs. Then try to merge our lhs and rhs. */
5006 if (TREE_CODE (arg0) == code
5007 && 0 != (tem = fold_truthop (code, type,
5008 TREE_OPERAND (arg0, 1), arg1)))
5009 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5010
5011 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5012 return tem;
5013
5014 return t;
5015
5016 case TRUTH_ORIF_EXPR:
5017 /* Note that the operands of this must be ints
5018 and their values must be 0 or true.
5019 ("true" is a fixed value perhaps depending on the language.) */
5020 /* If first arg is constant true, return it. */
5021 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5022 return arg0;
5023 case TRUTH_OR_EXPR:
5024 /* If either arg is constant zero, drop it. */
5025 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5026 return non_lvalue (arg1);
5027 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
5028 return non_lvalue (arg0);
5029 /* If second arg is constant true, result is true, but we must
5030 evaluate first arg. */
5031 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5032 return omit_one_operand (type, arg1, arg0);
5033 /* Likewise for first arg, but note this only occurs here for
5034 TRUTH_OR_EXPR. */
5035 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5036 return omit_one_operand (type, arg0, arg1);
5037 goto truth_andor;
5038
5039 case TRUTH_XOR_EXPR:
5040 /* If either arg is constant zero, drop it. */
5041 if (integer_zerop (arg0))
5042 return non_lvalue (arg1);
5043 if (integer_zerop (arg1))
5044 return non_lvalue (arg0);
5045 /* If either arg is constant true, this is a logical inversion. */
5046 if (integer_onep (arg0))
5047 return non_lvalue (invert_truthvalue (arg1));
5048 if (integer_onep (arg1))
5049 return non_lvalue (invert_truthvalue (arg0));
5050 return t;
5051
5052 case EQ_EXPR:
5053 case NE_EXPR:
5054 case LT_EXPR:
5055 case GT_EXPR:
5056 case LE_EXPR:
5057 case GE_EXPR:
5058 /* If one arg is a constant integer, put it last. */
5059 if (TREE_CODE (arg0) == INTEGER_CST
5060 && TREE_CODE (arg1) != INTEGER_CST)
5061 {
5062 TREE_OPERAND (t, 0) = arg1;
5063 TREE_OPERAND (t, 1) = arg0;
5064 arg0 = TREE_OPERAND (t, 0);
5065 arg1 = TREE_OPERAND (t, 1);
5066 code = swap_tree_comparison (code);
5067 TREE_SET_CODE (t, code);
5068 }
5069
5070 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5071 First, see if one arg is constant; find the constant arg
5072 and the other one. */
5073 {
5074 tree constop = 0, varop;
5075 int constopnum = -1;
5076
5077 if (TREE_CONSTANT (arg1))
5078 constopnum = 1, constop = arg1, varop = arg0;
5079 if (TREE_CONSTANT (arg0))
5080 constopnum = 0, constop = arg0, varop = arg1;
5081
5082 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5083 {
5084 /* This optimization is invalid for ordered comparisons
5085 if CONST+INCR overflows or if foo+incr might overflow.
5086 This optimization is invalid for floating point due to rounding.
5087 For pointer types we assume overflow doesn't happen. */
5088 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
5089 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5090 && (code == EQ_EXPR || code == NE_EXPR)))
5091 {
5092 tree newconst
5093 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
5094 constop, TREE_OPERAND (varop, 1)));
5095 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
5096
5097 /* If VAROP is a reference to a bitfield, we must mask
5098 the constant by the width of the field. */
5099 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5100 && DECL_BIT_FIELD(TREE_OPERAND
5101 (TREE_OPERAND (varop, 0), 1)))
5102 {
5103 int size
5104 = TREE_INT_CST_LOW (DECL_SIZE
5105 (TREE_OPERAND
5106 (TREE_OPERAND (varop, 0), 1)));
5107
5108 newconst = fold (build (BIT_AND_EXPR,
5109 TREE_TYPE (varop), newconst,
5110 convert (TREE_TYPE (varop),
5111 build_int_2 (size, 0))));
5112 }
5113
5114
5115 t = build (code, type, TREE_OPERAND (t, 0),
5116 TREE_OPERAND (t, 1));
5117 TREE_OPERAND (t, constopnum) = newconst;
5118 return t;
5119 }
5120 }
5121 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
5122 {
5123 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
5124 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5125 && (code == EQ_EXPR || code == NE_EXPR)))
5126 {
5127 tree newconst
5128 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
5129 constop, TREE_OPERAND (varop, 1)));
5130 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
5131
5132 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5133 && DECL_BIT_FIELD(TREE_OPERAND
5134 (TREE_OPERAND (varop, 0), 1)))
5135 {
5136 int size
5137 = TREE_INT_CST_LOW (DECL_SIZE
5138 (TREE_OPERAND
5139 (TREE_OPERAND (varop, 0), 1)));
5140
5141 newconst = fold (build (BIT_AND_EXPR,
5142 TREE_TYPE (varop), newconst,
5143 convert (TREE_TYPE (varop),
5144 build_int_2 (size, 0))));
5145 }
5146
5147
5148 t = build (code, type, TREE_OPERAND (t, 0),
5149 TREE_OPERAND (t, 1));
5150 TREE_OPERAND (t, constopnum) = newconst;
5151 return t;
5152 }
5153 }
5154 }
5155
5156 /* Change X >= CST to X > (CST - 1) if CST is positive. */
5157 if (TREE_CODE (arg1) == INTEGER_CST
5158 && TREE_CODE (arg0) != INTEGER_CST
5159 && tree_int_cst_sgn (arg1) > 0)
5160 {
5161 switch (TREE_CODE (t))
5162 {
5163 case GE_EXPR:
5164 code = GT_EXPR;
5165 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5166 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5167 break;
5168
5169 case LT_EXPR:
5170 code = LE_EXPR;
5171 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5172 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5173 break;
5174
5175 default:
5176 break;
5177 }
5178 }
5179
5180 /* If this is an EQ or NE comparison with zero and ARG0 is
5181 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
5182 two operations, but the latter can be done in one less insn
5183 on machines that have only two-operand insns or on which a
5184 constant cannot be the first operand. */
5185 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
5186 && TREE_CODE (arg0) == BIT_AND_EXPR)
5187 {
5188 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
5189 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
5190 return
5191 fold (build (code, type,
5192 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5193 build (RSHIFT_EXPR,
5194 TREE_TYPE (TREE_OPERAND (arg0, 0)),
5195 TREE_OPERAND (arg0, 1),
5196 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
5197 convert (TREE_TYPE (arg0),
5198 integer_one_node)),
5199 arg1));
5200 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
5201 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
5202 return
5203 fold (build (code, type,
5204 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5205 build (RSHIFT_EXPR,
5206 TREE_TYPE (TREE_OPERAND (arg0, 1)),
5207 TREE_OPERAND (arg0, 0),
5208 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
5209 convert (TREE_TYPE (arg0),
5210 integer_one_node)),
5211 arg1));
5212 }
5213
5214 /* If this is an NE or EQ comparison of zero against the result of a
5215 signed MOD operation whose second operand is a power of 2, make
5216 the MOD operation unsigned since it is simpler and equivalent. */
5217 if ((code == NE_EXPR || code == EQ_EXPR)
5218 && integer_zerop (arg1)
5219 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
5220 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
5221 || TREE_CODE (arg0) == CEIL_MOD_EXPR
5222 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
5223 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
5224 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5225 {
5226 tree newtype = unsigned_type (TREE_TYPE (arg0));
5227 tree newmod = build (TREE_CODE (arg0), newtype,
5228 convert (newtype, TREE_OPERAND (arg0, 0)),
5229 convert (newtype, TREE_OPERAND (arg0, 1)));
5230
5231 return build (code, type, newmod, convert (newtype, arg1));
5232 }
5233
5234 /* If this is an NE comparison of zero with an AND of one, remove the
5235 comparison since the AND will give the correct value. */
5236 if (code == NE_EXPR && integer_zerop (arg1)
5237 && TREE_CODE (arg0) == BIT_AND_EXPR
5238 && integer_onep (TREE_OPERAND (arg0, 1)))
5239 return convert (type, arg0);
5240
5241 /* If we have (A & C) == C where C is a power of 2, convert this into
5242 (A & C) != 0. Similarly for NE_EXPR. */
5243 if ((code == EQ_EXPR || code == NE_EXPR)
5244 && TREE_CODE (arg0) == BIT_AND_EXPR
5245 && integer_pow2p (TREE_OPERAND (arg0, 1))
5246 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
5247 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
5248 arg0, integer_zero_node);
5249
5250 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
5251 and similarly for >= into !=. */
5252 if ((code == LT_EXPR || code == GE_EXPR)
5253 && TREE_UNSIGNED (TREE_TYPE (arg0))
5254 && TREE_CODE (arg1) == LSHIFT_EXPR
5255 && integer_onep (TREE_OPERAND (arg1, 0)))
5256 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5257 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5258 TREE_OPERAND (arg1, 1)),
5259 convert (TREE_TYPE (arg0), integer_zero_node));
5260
5261 else if ((code == LT_EXPR || code == GE_EXPR)
5262 && TREE_UNSIGNED (TREE_TYPE (arg0))
5263 && (TREE_CODE (arg1) == NOP_EXPR
5264 || TREE_CODE (arg1) == CONVERT_EXPR)
5265 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
5266 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
5267 return
5268 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5269 convert (TREE_TYPE (arg0),
5270 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5271 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
5272 convert (TREE_TYPE (arg0), integer_zero_node));
5273
5274 /* Simplify comparison of something with itself. (For IEEE
5275 floating-point, we can only do some of these simplifications.) */
5276 if (operand_equal_p (arg0, arg1, 0))
5277 {
5278 switch (code)
5279 {
5280 case EQ_EXPR:
5281 case GE_EXPR:
5282 case LE_EXPR:
5283 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5284 {
5285 if (type == integer_type_node)
5286 return integer_one_node;
5287
5288 t = build_int_2 (1, 0);
5289 TREE_TYPE (t) = type;
5290 return t;
5291 }
5292 code = EQ_EXPR;
5293 TREE_SET_CODE (t, code);
5294 break;
5295
5296 case NE_EXPR:
5297 /* For NE, we can only do this simplification if integer. */
5298 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5299 break;
5300 /* ... fall through ... */
5301 case GT_EXPR:
5302 case LT_EXPR:
5303 if (type == integer_type_node)
5304 return integer_zero_node;
5305
5306 t = build_int_2 (0, 0);
5307 TREE_TYPE (t) = type;
5308 return t;
5309 default:
5310 abort ();
5311 }
5312 }
5313
5314 /* An unsigned comparison against 0 can be simplified. */
5315 if (integer_zerop (arg1)
5316 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5317 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
5318 && TREE_UNSIGNED (TREE_TYPE (arg1)))
5319 {
5320 switch (TREE_CODE (t))
5321 {
5322 case GT_EXPR:
5323 code = NE_EXPR;
5324 TREE_SET_CODE (t, NE_EXPR);
5325 break;
5326 case LE_EXPR:
5327 code = EQ_EXPR;
5328 TREE_SET_CODE (t, EQ_EXPR);
5329 break;
5330 case GE_EXPR:
5331 return omit_one_operand (type,
5332 convert (type, integer_one_node),
5333 arg0);
5334 case LT_EXPR:
5335 return omit_one_operand (type,
5336 convert (type, integer_zero_node),
5337 arg0);
5338 default:
5339 break;
5340 }
5341 }
5342
5343 /* An unsigned <= 0x7fffffff can be simplified. */
5344 {
5345 int width = TYPE_PRECISION (TREE_TYPE (arg1));
5346 if (TREE_CODE (arg1) == INTEGER_CST
5347 && ! TREE_CONSTANT_OVERFLOW (arg1)
5348 && width <= HOST_BITS_PER_WIDE_INT
5349 && TREE_INT_CST_LOW (arg1) == ((HOST_WIDE_INT) 1 << (width - 1)) - 1
5350 && TREE_INT_CST_HIGH (arg1) == 0
5351 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5352 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
5353 && TREE_UNSIGNED (TREE_TYPE (arg1)))
5354 {
5355 switch (TREE_CODE (t))
5356 {
5357 case LE_EXPR:
5358 return fold (build (GE_EXPR, type,
5359 convert (signed_type (TREE_TYPE (arg0)),
5360 arg0),
5361 convert (signed_type (TREE_TYPE (arg1)),
5362 integer_zero_node)));
5363 case GT_EXPR:
5364 return fold (build (LT_EXPR, type,
5365 convert (signed_type (TREE_TYPE (arg0)),
5366 arg0),
5367 convert (signed_type (TREE_TYPE (arg1)),
5368 integer_zero_node)));
5369 }
5370 }
5371 }
5372
5373 /* If we are comparing an expression that just has comparisons
5374 of two integer values, arithmetic expressions of those comparisons,
5375 and constants, we can simplify it. There are only three cases
5376 to check: the two values can either be equal, the first can be
5377 greater, or the second can be greater. Fold the expression for
5378 those three values. Since each value must be 0 or 1, we have
5379 eight possibilities, each of which corresponds to the constant 0
5380 or 1 or one of the six possible comparisons.
5381
5382 This handles common cases like (a > b) == 0 but also handles
5383 expressions like ((x > y) - (y > x)) > 0, which supposedly
5384 occur in macroized code. */
5385
5386 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
5387 {
5388 tree cval1 = 0, cval2 = 0;
5389 int save_p = 0;
5390
5391 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
5392 /* Don't handle degenerate cases here; they should already
5393 have been handled anyway. */
5394 && cval1 != 0 && cval2 != 0
5395 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
5396 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
5397 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
5398 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
5399 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
5400 {
5401 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
5402 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
5403
5404 /* We can't just pass T to eval_subst in case cval1 or cval2
5405 was the same as ARG1. */
5406
5407 tree high_result
5408 = fold (build (code, type,
5409 eval_subst (arg0, cval1, maxval, cval2, minval),
5410 arg1));
5411 tree equal_result
5412 = fold (build (code, type,
5413 eval_subst (arg0, cval1, maxval, cval2, maxval),
5414 arg1));
5415 tree low_result
5416 = fold (build (code, type,
5417 eval_subst (arg0, cval1, minval, cval2, maxval),
5418 arg1));
5419
5420 /* All three of these results should be 0 or 1. Confirm they
5421 are. Then use those values to select the proper code
5422 to use. */
5423
5424 if ((integer_zerop (high_result)
5425 || integer_onep (high_result))
5426 && (integer_zerop (equal_result)
5427 || integer_onep (equal_result))
5428 && (integer_zerop (low_result)
5429 || integer_onep (low_result)))
5430 {
5431 /* Make a 3-bit mask with the high-order bit being the
5432 value for `>', the next for '=', and the low for '<'. */
5433 switch ((integer_onep (high_result) * 4)
5434 + (integer_onep (equal_result) * 2)
5435 + integer_onep (low_result))
5436 {
5437 case 0:
5438 /* Always false. */
5439 return omit_one_operand (type, integer_zero_node, arg0);
5440 case 1:
5441 code = LT_EXPR;
5442 break;
5443 case 2:
5444 code = EQ_EXPR;
5445 break;
5446 case 3:
5447 code = LE_EXPR;
5448 break;
5449 case 4:
5450 code = GT_EXPR;
5451 break;
5452 case 5:
5453 code = NE_EXPR;
5454 break;
5455 case 6:
5456 code = GE_EXPR;
5457 break;
5458 case 7:
5459 /* Always true. */
5460 return omit_one_operand (type, integer_one_node, arg0);
5461 }
5462
5463 t = build (code, type, cval1, cval2);
5464 if (save_p)
5465 return save_expr (t);
5466 else
5467 return fold (t);
5468 }
5469 }
5470 }
5471
5472 /* If this is a comparison of a field, we may be able to simplify it. */
5473 if ((TREE_CODE (arg0) == COMPONENT_REF
5474 || TREE_CODE (arg0) == BIT_FIELD_REF)
5475 && (code == EQ_EXPR || code == NE_EXPR)
5476 /* Handle the constant case even without -O
5477 to make sure the warnings are given. */
5478 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
5479 {
5480 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
5481 return t1 ? t1 : t;
5482 }
5483
5484 /* If this is a comparison of complex values and either or both
5485 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
5486 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
5487 may prevent needless evaluations. */
5488 if ((code == EQ_EXPR || code == NE_EXPR)
5489 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
5490 && (TREE_CODE (arg0) == COMPLEX_EXPR
5491 || TREE_CODE (arg1) == COMPLEX_EXPR))
5492 {
5493 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
5494 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
5495 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
5496 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
5497 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
5498
5499 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
5500 : TRUTH_ORIF_EXPR),
5501 type,
5502 fold (build (code, type, real0, real1)),
5503 fold (build (code, type, imag0, imag1))));
5504 }
5505
5506 /* From here on, the only cases we handle are when the result is
5507 known to be a constant.
5508
5509 To compute GT, swap the arguments and do LT.
5510 To compute GE, do LT and invert the result.
5511 To compute LE, swap the arguments, do LT and invert the result.
5512 To compute NE, do EQ and invert the result.
5513
5514 Therefore, the code below must handle only EQ and LT. */
5515
5516 if (code == LE_EXPR || code == GT_EXPR)
5517 {
5518 tem = arg0, arg0 = arg1, arg1 = tem;
5519 code = swap_tree_comparison (code);
5520 }
5521
5522 /* Note that it is safe to invert for real values here because we
5523 will check below in the one case that it matters. */
5524
5525 invert = 0;
5526 if (code == NE_EXPR || code == GE_EXPR)
5527 {
5528 invert = 1;
5529 code = invert_tree_comparison (code);
5530 }
5531
5532 /* Compute a result for LT or EQ if args permit;
5533 otherwise return T. */
5534 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
5535 {
5536 if (code == EQ_EXPR)
5537 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
5538 == TREE_INT_CST_LOW (arg1))
5539 && (TREE_INT_CST_HIGH (arg0)
5540 == TREE_INT_CST_HIGH (arg1)),
5541 0);
5542 else
5543 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
5544 ? INT_CST_LT_UNSIGNED (arg0, arg1)
5545 : INT_CST_LT (arg0, arg1)),
5546 0);
5547 }
5548
5549 #if 0 /* This is no longer useful, but breaks some real code. */
5550 /* Assume a nonexplicit constant cannot equal an explicit one,
5551 since such code would be undefined anyway.
5552 Exception: on sysvr4, using #pragma weak,
5553 a label can come out as 0. */
5554 else if (TREE_CODE (arg1) == INTEGER_CST
5555 && !integer_zerop (arg1)
5556 && TREE_CONSTANT (arg0)
5557 && TREE_CODE (arg0) == ADDR_EXPR
5558 && code == EQ_EXPR)
5559 t1 = build_int_2 (0, 0);
5560 #endif
5561 /* Two real constants can be compared explicitly. */
5562 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
5563 {
5564 /* If either operand is a NaN, the result is false with two
5565 exceptions: First, an NE_EXPR is true on NaNs, but that case
5566 is already handled correctly since we will be inverting the
5567 result for NE_EXPR. Second, if we had inverted a LE_EXPR
5568 or a GE_EXPR into a LT_EXPR, we must return true so that it
5569 will be inverted into false. */
5570
5571 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
5572 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
5573 t1 = build_int_2 (invert && code == LT_EXPR, 0);
5574
5575 else if (code == EQ_EXPR)
5576 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
5577 TREE_REAL_CST (arg1)),
5578 0);
5579 else
5580 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
5581 TREE_REAL_CST (arg1)),
5582 0);
5583 }
5584
5585 if (t1 == NULL_TREE)
5586 return t;
5587
5588 if (invert)
5589 TREE_INT_CST_LOW (t1) ^= 1;
5590
5591 TREE_TYPE (t1) = type;
5592 return t1;
5593
5594 case COND_EXPR:
5595 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
5596 so all simple results must be passed through pedantic_non_lvalue. */
5597 if (TREE_CODE (arg0) == INTEGER_CST)
5598 return pedantic_non_lvalue
5599 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
5600 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
5601 return pedantic_omit_one_operand (type, arg1, arg0);
5602
5603 /* If the second operand is zero, invert the comparison and swap
5604 the second and third operands. Likewise if the second operand
5605 is constant and the third is not or if the third operand is
5606 equivalent to the first operand of the comparison. */
5607
5608 if (integer_zerop (arg1)
5609 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
5610 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5611 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
5612 TREE_OPERAND (t, 2),
5613 TREE_OPERAND (arg0, 1))))
5614 {
5615 /* See if this can be inverted. If it can't, possibly because
5616 it was a floating-point inequality comparison, don't do
5617 anything. */
5618 tem = invert_truthvalue (arg0);
5619
5620 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5621 {
5622 t = build (code, type, tem,
5623 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5624 arg0 = tem;
5625 arg1 = TREE_OPERAND (t, 2);
5626 STRIP_NOPS (arg1);
5627 }
5628 }
5629
5630 /* If we have A op B ? A : C, we may be able to convert this to a
5631 simpler expression, depending on the operation and the values
5632 of B and C. IEEE floating point prevents this though,
5633 because A or B might be -0.0 or a NaN. */
5634
5635 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5636 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5637 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
5638 || flag_fast_math)
5639 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
5640 arg1, TREE_OPERAND (arg0, 1)))
5641 {
5642 tree arg2 = TREE_OPERAND (t, 2);
5643 enum tree_code comp_code = TREE_CODE (arg0);
5644
5645 STRIP_NOPS (arg2);
5646
5647 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
5648 depending on the comparison operation. */
5649 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
5650 ? real_zerop (TREE_OPERAND (arg0, 1))
5651 : integer_zerop (TREE_OPERAND (arg0, 1)))
5652 && TREE_CODE (arg2) == NEGATE_EXPR
5653 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5654 switch (comp_code)
5655 {
5656 case EQ_EXPR:
5657 return pedantic_non_lvalue
5658 (fold (build1 (NEGATE_EXPR, type, arg1)));
5659 case NE_EXPR:
5660 return pedantic_non_lvalue (convert (type, arg1));
5661 case GE_EXPR:
5662 case GT_EXPR:
5663 return pedantic_non_lvalue
5664 (convert (type, fold (build1 (ABS_EXPR,
5665 TREE_TYPE (arg1), arg1))));
5666 case LE_EXPR:
5667 case LT_EXPR:
5668 return pedantic_non_lvalue
5669 (fold (build1 (NEGATE_EXPR, type,
5670 convert (type,
5671 fold (build1 (ABS_EXPR,
5672 TREE_TYPE (arg1),
5673 arg1))))));
5674 default:
5675 abort ();
5676 }
5677
5678 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
5679 always zero. */
5680
5681 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
5682 {
5683 if (comp_code == NE_EXPR)
5684 return pedantic_non_lvalue (convert (type, arg1));
5685 else if (comp_code == EQ_EXPR)
5686 return pedantic_non_lvalue (convert (type, integer_zero_node));
5687 }
5688
5689 /* If this is A op B ? A : B, this is either A, B, min (A, B),
5690 or max (A, B), depending on the operation. */
5691
5692 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
5693 arg2, TREE_OPERAND (arg0, 0)))
5694 {
5695 tree comp_op0 = TREE_OPERAND (arg0, 0);
5696 tree comp_op1 = TREE_OPERAND (arg0, 1);
5697 tree comp_type = TREE_TYPE (comp_op0);
5698
5699 switch (comp_code)
5700 {
5701 case EQ_EXPR:
5702 return pedantic_non_lvalue (convert (type, arg2));
5703 case NE_EXPR:
5704 return pedantic_non_lvalue (convert (type, arg1));
5705 case LE_EXPR:
5706 case LT_EXPR:
5707 /* In C++ a ?: expression can be an lvalue, so put the
5708 operand which will be used if they are equal first
5709 so that we can convert this back to the
5710 corresponding COND_EXPR. */
5711 return pedantic_non_lvalue
5712 (convert (type, (fold (build (MIN_EXPR, comp_type,
5713 (comp_code == LE_EXPR
5714 ? comp_op0 : comp_op1),
5715 (comp_code == LE_EXPR
5716 ? comp_op1 : comp_op0))))));
5717 break;
5718 case GE_EXPR:
5719 case GT_EXPR:
5720 return pedantic_non_lvalue
5721 (convert (type, fold (build (MAX_EXPR, comp_type,
5722 (comp_code == GE_EXPR
5723 ? comp_op0 : comp_op1),
5724 (comp_code == GE_EXPR
5725 ? comp_op1 : comp_op0)))));
5726 break;
5727 default:
5728 abort ();
5729 }
5730 }
5731
5732 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5733 we might still be able to simplify this. For example,
5734 if C1 is one less or one more than C2, this might have started
5735 out as a MIN or MAX and been transformed by this function.
5736 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5737
5738 if (INTEGRAL_TYPE_P (type)
5739 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5740 && TREE_CODE (arg2) == INTEGER_CST)
5741 switch (comp_code)
5742 {
5743 case EQ_EXPR:
5744 /* We can replace A with C1 in this case. */
5745 arg1 = convert (type, TREE_OPERAND (arg0, 1));
5746 t = build (code, type, TREE_OPERAND (t, 0), arg1,
5747 TREE_OPERAND (t, 2));
5748 break;
5749
5750 case LT_EXPR:
5751 /* If C1 is C2 + 1, this is min(A, C2). */
5752 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5753 && operand_equal_p (TREE_OPERAND (arg0, 1),
5754 const_binop (PLUS_EXPR, arg2,
5755 integer_one_node, 0), 1))
5756 return pedantic_non_lvalue
5757 (fold (build (MIN_EXPR, type, arg1, arg2)));
5758 break;
5759
5760 case LE_EXPR:
5761 /* If C1 is C2 - 1, this is min(A, C2). */
5762 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5763 && operand_equal_p (TREE_OPERAND (arg0, 1),
5764 const_binop (MINUS_EXPR, arg2,
5765 integer_one_node, 0), 1))
5766 return pedantic_non_lvalue
5767 (fold (build (MIN_EXPR, type, arg1, arg2)));
5768 break;
5769
5770 case GT_EXPR:
5771 /* If C1 is C2 - 1, this is max(A, C2). */
5772 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5773 && operand_equal_p (TREE_OPERAND (arg0, 1),
5774 const_binop (MINUS_EXPR, arg2,
5775 integer_one_node, 0), 1))
5776 return pedantic_non_lvalue
5777 (fold (build (MAX_EXPR, type, arg1, arg2)));
5778 break;
5779
5780 case GE_EXPR:
5781 /* If C1 is C2 + 1, this is max(A, C2). */
5782 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5783 && operand_equal_p (TREE_OPERAND (arg0, 1),
5784 const_binop (PLUS_EXPR, arg2,
5785 integer_one_node, 0), 1))
5786 return pedantic_non_lvalue
5787 (fold (build (MAX_EXPR, type, arg1, arg2)));
5788 break;
5789 case NE_EXPR:
5790 break;
5791 default:
5792 abort ();
5793 }
5794 }
5795
5796 /* If the second operand is simpler than the third, swap them
5797 since that produces better jump optimization results. */
5798 if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
5799 || TREE_CODE (arg1) == SAVE_EXPR)
5800 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
5801 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
5802 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
5803 {
5804 /* See if this can be inverted. If it can't, possibly because
5805 it was a floating-point inequality comparison, don't do
5806 anything. */
5807 tem = invert_truthvalue (arg0);
5808
5809 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5810 {
5811 t = build (code, type, tem,
5812 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5813 arg0 = tem;
5814 arg1 = TREE_OPERAND (t, 2);
5815 STRIP_NOPS (arg1);
5816 }
5817 }
5818
5819 /* Convert A ? 1 : 0 to simply A. */
5820 if (integer_onep (TREE_OPERAND (t, 1))
5821 && integer_zerop (TREE_OPERAND (t, 2))
5822 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
5823 call to fold will try to move the conversion inside
5824 a COND, which will recurse. In that case, the COND_EXPR
5825 is probably the best choice, so leave it alone. */
5826 && type == TREE_TYPE (arg0))
5827 return pedantic_non_lvalue (arg0);
5828
5829 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
5830 operation is simply A & 2. */
5831
5832 if (integer_zerop (TREE_OPERAND (t, 2))
5833 && TREE_CODE (arg0) == NE_EXPR
5834 && integer_zerop (TREE_OPERAND (arg0, 1))
5835 && integer_pow2p (arg1)
5836 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
5837 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
5838 arg1, 1))
5839 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
5840
5841 return t;
5842
5843 case COMPOUND_EXPR:
5844 /* When pedantic, a compound expression can be neither an lvalue
5845 nor an integer constant expression. */
5846 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
5847 return t;
5848 /* Don't let (0, 0) be null pointer constant. */
5849 if (integer_zerop (arg1))
5850 return non_lvalue (arg1);
5851 return arg1;
5852
5853 case COMPLEX_EXPR:
5854 if (wins)
5855 return build_complex (type, arg0, arg1);
5856 return t;
5857
5858 case REALPART_EXPR:
5859 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5860 return t;
5861 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5862 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
5863 TREE_OPERAND (arg0, 1));
5864 else if (TREE_CODE (arg0) == COMPLEX_CST)
5865 return TREE_REALPART (arg0);
5866 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5867 return fold (build (TREE_CODE (arg0), type,
5868 fold (build1 (REALPART_EXPR, type,
5869 TREE_OPERAND (arg0, 0))),
5870 fold (build1 (REALPART_EXPR,
5871 type, TREE_OPERAND (arg0, 1)))));
5872 return t;
5873
5874 case IMAGPART_EXPR:
5875 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5876 return convert (type, integer_zero_node);
5877 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5878 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
5879 TREE_OPERAND (arg0, 0));
5880 else if (TREE_CODE (arg0) == COMPLEX_CST)
5881 return TREE_IMAGPART (arg0);
5882 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5883 return fold (build (TREE_CODE (arg0), type,
5884 fold (build1 (IMAGPART_EXPR, type,
5885 TREE_OPERAND (arg0, 0))),
5886 fold (build1 (IMAGPART_EXPR, type,
5887 TREE_OPERAND (arg0, 1)))));
5888 return t;
5889
5890 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
5891 appropriate. */
5892 case CLEANUP_POINT_EXPR:
5893 if (! TREE_SIDE_EFFECTS (arg0))
5894 return TREE_OPERAND (t, 0);
5895
5896 {
5897 enum tree_code code0 = TREE_CODE (arg0);
5898 int kind0 = TREE_CODE_CLASS (code0);
5899 tree arg00 = TREE_OPERAND (arg0, 0);
5900 tree arg01;
5901
5902 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
5903 return fold (build1 (code0, type,
5904 fold (build1 (CLEANUP_POINT_EXPR,
5905 TREE_TYPE (arg00), arg00))));
5906
5907 if (kind0 == '<' || kind0 == '2'
5908 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
5909 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
5910 || code0 == TRUTH_XOR_EXPR)
5911 {
5912 arg01 = TREE_OPERAND (arg0, 1);
5913
5914 if (! TREE_SIDE_EFFECTS (arg00))
5915 return fold (build (code0, type, arg00,
5916 fold (build1 (CLEANUP_POINT_EXPR,
5917 TREE_TYPE (arg01), arg01))));
5918
5919 if (! TREE_SIDE_EFFECTS (arg01))
5920 return fold (build (code0, type,
5921 fold (build1 (CLEANUP_POINT_EXPR,
5922 TREE_TYPE (arg00), arg00)),
5923 arg01));
5924 }
5925
5926 return t;
5927 }
5928
5929 default:
5930 return t;
5931 } /* switch (code) */
5932 }
5933
5934 /* Determine if first argument is a multiple of second argument.
5935 Return 0 if it is not, or is not easily determined to so be.
5936
5937 An example of the sort of thing we care about (at this point --
5938 this routine could surely be made more general, and expanded
5939 to do what the *_DIV_EXPR's fold() cases do now) is discovering
5940 that
5941
5942 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
5943
5944 is a multiple of
5945
5946 SAVE_EXPR (J * 8)
5947
5948 when we know that the two `SAVE_EXPR (J * 8)' nodes are the
5949 same node (which means they will have the same value at run
5950 time, even though we don't know when they'll be assigned).
5951
5952 This code also handles discovering that
5953
5954 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
5955
5956 is a multiple of
5957
5958 8
5959
5960 (of course) so we don't have to worry about dealing with a
5961 possible remainder.
5962
5963 Note that we _look_ inside a SAVE_EXPR only to determine
5964 how it was calculated; it is not safe for fold() to do much
5965 of anything else with the internals of a SAVE_EXPR, since
5966 fold() cannot know when it will be evaluated at run time.
5967 For example, the latter example above _cannot_ be implemented
5968 as
5969
5970 SAVE_EXPR (I) * J
5971
5972 or any variant thereof, since the value of J at evaluation time
5973 of the original SAVE_EXPR is not necessarily the same at the time
5974 the new expression is evaluated. The only optimization of this
5975 sort that would be valid is changing
5976
5977 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
5978 divided by
5979 8
5980
5981 to
5982
5983 SAVE_EXPR (I) * SAVE_EXPR (J)
5984
5985 (where the same SAVE_EXPR (J) is used in the original and the
5986 transformed version). */
5987
5988 static int
5989 multiple_of_p (type, top, bottom)
5990 tree type;
5991 tree top;
5992 tree bottom;
5993 {
5994 if (operand_equal_p (top, bottom, 0))
5995 return 1;
5996
5997 if (TREE_CODE (type) != INTEGER_TYPE)
5998 return 0;
5999
6000 switch (TREE_CODE (top))
6001 {
6002 case MULT_EXPR:
6003 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6004 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6005
6006 case PLUS_EXPR:
6007 case MINUS_EXPR:
6008 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6009 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6010
6011 case NOP_EXPR:
6012 /* Punt if conversion from non-integral or wider integral type. */
6013 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
6014 || (TYPE_PRECISION (type)
6015 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
6016 return 0;
6017 /* Fall through. */
6018 case SAVE_EXPR:
6019 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
6020
6021 case INTEGER_CST:
6022 if ((TREE_CODE (bottom) != INTEGER_CST)
6023 || (tree_int_cst_sgn (top) < 0)
6024 || (tree_int_cst_sgn (bottom) < 0))
6025 return 0;
6026 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
6027 top, bottom, 0));
6028
6029 default:
6030 return 0;
6031 }
6032 }
6033 \f