]>
Commit | Line | Data |
---|---|---|
2bc77e10 | 1 | /* Fold a constant sub-tree into a single node for C-compiler |
22331643 | 2 | Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, |
63bf54cf | 3 | 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc. |
2bc77e10 | 4 | |
f12b58b3 | 5 | This file is part of GCC. |
2bc77e10 | 6 | |
f12b58b3 | 7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
2bc77e10 | 11 | |
f12b58b3 | 12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
2bc77e10 | 16 | |
17 | You should have received a copy of the GNU General Public License | |
f12b58b3 | 18 | along with GCC; see the file COPYING. If not, write to the Free |
67ce556b | 19 | Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA |
20 | 02110-1301, USA. */ | |
2bc77e10 | 21 | |
4bbea254 | 22 | /*@@ This file should be rewritten to use an arbitrary precision |
2bc77e10 | 23 | @@ representation for "struct tree_int_cst" and "struct tree_real_cst". |
24 | @@ Perhaps the routines could also be used for bc/dc, and made a lib. | |
25 | @@ The routines that translate from the ap rep should | |
26 | @@ warn if precision et. al. is lost. | |
27 | @@ This would also make life easier when this technology is used | |
28 | @@ for cross-compilers. */ | |
29 | ||
30384dcf | 30 | /* The entry points in this file are fold, size_int_wide, size_binop |
6e44befc | 31 | and force_fit_type. |
2bc77e10 | 32 | |
33 | fold takes a tree as argument and returns a simplified tree. | |
34 | ||
35 | size_binop takes a tree code for an arithmetic operation | |
36 | and two operands that are trees, and produces a tree for the | |
37 | result, assuming the type comes from `sizetype'. | |
38 | ||
39 | size_int takes an integer value, and creates a tree constant | |
6e44befc | 40 | with type from `sizetype'. |
41 | ||
4d28c5d1 | 42 | force_fit_type takes a constant, an overflowable flag and prior |
43 | overflow indicators. It forces the value to fit the type and sets | |
44 | TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */ | |
6e44befc | 45 | |
0dbd1c74 | 46 | #include "config.h" |
5ee8fe30 | 47 | #include "system.h" |
805e22b2 | 48 | #include "coretypes.h" |
49 | #include "tm.h" | |
2bc77e10 | 50 | #include "flags.h" |
51 | #include "tree.h" | |
ef258422 | 52 | #include "real.h" |
0f9685e4 | 53 | #include "rtl.h" |
aed0bd19 | 54 | #include "expr.h" |
7953c610 | 55 | #include "tm_p.h" |
12874aaf | 56 | #include "toplev.h" |
1bfd55c5 | 57 | #include "ggc.h" |
15d769aa | 58 | #include "hashtab.h" |
20325f61 | 59 | #include "langhooks.h" |
fc3df357 | 60 | #include "md5.h" |
2bc77e10 | 61 | |
47be647d | 62 | /* Non-zero if we are folding constants inside an initializer; zero |
63 | otherwise. */ | |
64 | int folding_initializer = 0; | |
65 | ||
318a728f | 66 | /* The following constants represent a bit based encoding of GCC's |
67 | comparison operators. This encoding simplifies transformations | |
68 | on relational comparison operators, such as AND and OR. */ | |
69 | enum comparison_code { | |
70 | COMPCODE_FALSE = 0, | |
71 | COMPCODE_LT = 1, | |
72 | COMPCODE_EQ = 2, | |
73 | COMPCODE_LE = 3, | |
74 | COMPCODE_GT = 4, | |
75 | COMPCODE_LTGT = 5, | |
76 | COMPCODE_GE = 6, | |
77 | COMPCODE_ORD = 7, | |
78 | COMPCODE_UNORD = 8, | |
79 | COMPCODE_UNLT = 9, | |
80 | COMPCODE_UNEQ = 10, | |
81 | COMPCODE_UNLE = 11, | |
82 | COMPCODE_UNGT = 12, | |
83 | COMPCODE_NE = 13, | |
84 | COMPCODE_UNGE = 14, | |
85 | COMPCODE_TRUE = 15 | |
86 | }; | |
87 | ||
de1b648b | 88 | static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT); |
89 | static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *); | |
bd214d13 | 90 | static bool negate_mathfn_p (enum built_in_function); |
de1b648b | 91 | static bool negate_expr_p (tree); |
92 | static tree negate_expr (tree); | |
93 | static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int); | |
94 | static tree associate_trees (tree, tree, enum tree_code, tree); | |
de1b648b | 95 | static tree const_binop (enum tree_code, tree, tree, int); |
318a728f | 96 | static enum comparison_code comparison_to_compcode (enum tree_code); |
97 | static enum tree_code compcode_to_comparison (enum comparison_code); | |
98 | static tree combine_comparisons (enum tree_code, enum tree_code, | |
99 | enum tree_code, tree, tree, tree); | |
de1b648b | 100 | static int truth_value_p (enum tree_code); |
101 | static int operand_equal_for_comparison_p (tree, tree, tree); | |
102 | static int twoval_comparison_p (tree, tree *, tree *, int *); | |
103 | static tree eval_subst (tree, tree, tree, tree, tree); | |
104 | static tree pedantic_omit_one_operand (tree, tree, tree); | |
105 | static tree distribute_bit_expr (enum tree_code, tree, tree, tree); | |
106 | static tree make_bit_field_ref (tree, tree, int, int, int); | |
107 | static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree); | |
108 | static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *, | |
109 | enum machine_mode *, int *, int *, | |
110 | tree *, tree *); | |
111 | static int all_ones_mask_p (tree, int); | |
112 | static tree sign_bit_p (tree, tree); | |
113 | static int simple_operand_p (tree); | |
114 | static tree range_binop (enum tree_code, tree, tree, int, tree, int); | |
66108e20 | 115 | static tree range_predecessor (tree); |
116 | static tree range_successor (tree); | |
de1b648b | 117 | static tree make_range (tree, int *, tree *, tree *); |
118 | static tree build_range_check (tree, tree, int, tree, tree); | |
119 | static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree, | |
120 | tree); | |
2c17ebb2 | 121 | static tree fold_range_test (enum tree_code, tree, tree, tree); |
9b1fa4a0 | 122 | static tree fold_cond_expr_with_comparison (tree, tree, tree, tree); |
de1b648b | 123 | static tree unextend (tree, int, int, tree); |
124 | static tree fold_truthop (enum tree_code, tree, tree, tree); | |
155acab4 | 125 | static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree); |
de1b648b | 126 | static tree extract_muldiv (tree, tree, enum tree_code, tree); |
127 | static tree extract_muldiv_1 (tree, tree, enum tree_code, tree); | |
de1b648b | 128 | static int multiple_of_p (tree, tree, tree); |
1ebe9a83 | 129 | static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, |
130 | tree, tree, | |
5fe1fe72 | 131 | tree, tree, int); |
de1b648b | 132 | static bool fold_real_zero_addition_p (tree, tree, int); |
133 | static tree fold_mathfn_compare (enum built_in_function, enum tree_code, | |
134 | tree, tree, tree); | |
135 | static tree fold_inf_compare (enum tree_code, tree, tree, tree); | |
270029e0 | 136 | static tree fold_div_compare (enum tree_code, tree, tree, tree); |
bd214d13 | 137 | static bool reorder_operands_p (tree, tree); |
9d77437d | 138 | static tree fold_negate_const (tree, tree); |
c183306c | 139 | static tree fold_not_const (tree, tree); |
ad46984d | 140 | static tree fold_relational_const (enum tree_code, tree, tree, tree); |
5f4092ed | 141 | static int native_encode_expr (tree, unsigned char *, int); |
142 | static tree native_interpret_expr (tree, unsigned char *, int); | |
143 | ||
9d77437d | 144 | |
083a2b5e | 145 | /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring |
146 | overflow. Suppose A, B and SUM have the same respective signs as A1, B1, | |
147 | and SUM1. Then this yields nonzero if overflow occurred during the | |
148 | addition. | |
149 | ||
150 | Overflow occurs if A and B have the same sign, but A and SUM differ in | |
151 | sign. Use `^' to test whether signs differ, and `< 0' to isolate the | |
152 | sign. */ | |
153 | #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0) | |
2bc77e10 | 154 | \f |
b572011e | 155 | /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic. |
bd5b3bce | 156 | We do that by representing the two-word integer in 4 words, with only |
083a2b5e | 157 | HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive |
158 | number. The value of the word is LOWPART + HIGHPART * BASE. */ | |
bd5b3bce | 159 | |
160 | #define LOWPART(x) \ | |
083a2b5e | 161 | ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1)) |
bd5b3bce | 162 | #define HIGHPART(x) \ |
083a2b5e | 163 | ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2) |
164 | #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2) | |
2bc77e10 | 165 | |
bd5b3bce | 166 | /* Unpack a two-word integer into 4 words. |
b572011e | 167 | LOW and HI are the integer, as two `HOST_WIDE_INT' pieces. |
bd5b3bce | 168 | WORDS points to the array of HOST_WIDE_INTs. */ |
2bc77e10 | 169 | |
170 | static void | |
de1b648b | 171 | encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi) |
2bc77e10 | 172 | { |
bd5b3bce | 173 | words[0] = LOWPART (low); |
174 | words[1] = HIGHPART (low); | |
175 | words[2] = LOWPART (hi); | |
176 | words[3] = HIGHPART (hi); | |
2bc77e10 | 177 | } |
178 | ||
bd5b3bce | 179 | /* Pack an array of 4 words into a two-word integer. |
180 | WORDS points to the array of words. | |
b572011e | 181 | The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */ |
2bc77e10 | 182 | |
183 | static void | |
dc81944a | 184 | decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low, |
185 | HOST_WIDE_INT *hi) | |
2bc77e10 | 186 | { |
083a2b5e | 187 | *low = words[0] + words[1] * BASE; |
188 | *hi = words[2] + words[3] * BASE; | |
2bc77e10 | 189 | } |
190 | \f | |
4d28c5d1 | 191 | /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested |
192 | in overflow of the value, when >0 we are only interested in signed | |
193 | overflow, for <0 we are interested in any overflow. OVERFLOWED | |
194 | indicates whether overflow has already occurred. CONST_OVERFLOWED | |
195 | indicates whether constant overflow has already occurred. We force | |
196 | T's value to be within range of T's type (by setting to 0 or 1 all | |
197 | the bits outside the type's range). We set TREE_OVERFLOWED if, | |
9ee236f3 | 198 | OVERFLOWED is nonzero, |
4d28c5d1 | 199 | or OVERFLOWABLE is >0 and signed overflow occurs |
200 | or OVERFLOWABLE is <0 and any overflow occurs | |
201 | We set TREE_CONSTANT_OVERFLOWED if, | |
9ee236f3 | 202 | CONST_OVERFLOWED is nonzero |
4d28c5d1 | 203 | or we set TREE_OVERFLOWED. |
204 | We return either the original T, or a copy. */ | |
083a2b5e | 205 | |
4d28c5d1 | 206 | tree |
7c446c95 | 207 | force_fit_type (tree t, int overflowable, |
208 | bool overflowed, bool overflowed_const) | |
2bc77e10 | 209 | { |
a0c2c45b | 210 | unsigned HOST_WIDE_INT low; |
211 | HOST_WIDE_INT high; | |
212 | unsigned int prec; | |
4d28c5d1 | 213 | int sign_extended_type; |
2bc77e10 | 214 | |
fdada98f | 215 | gcc_assert (TREE_CODE (t) == INTEGER_CST); |
0c5713a2 | 216 | |
817e5691 | 217 | low = TREE_INT_CST_LOW (t); |
218 | high = TREE_INT_CST_HIGH (t); | |
d7b6c802 | 219 | |
1bc16cab | 220 | if (POINTER_TYPE_P (TREE_TYPE (t)) |
221 | || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE) | |
2bc77e10 | 222 | prec = POINTER_SIZE; |
817e5691 | 223 | else |
224 | prec = TYPE_PRECISION (TREE_TYPE (t)); | |
4d28c5d1 | 225 | /* Size types *are* sign extended. */ |
226 | sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t)) | |
227 | || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE | |
228 | && TYPE_IS_SIZETYPE (TREE_TYPE (t)))); | |
2bc77e10 | 229 | |
230 | /* First clear all bits that are beyond the type's precision. */ | |
231 | ||
b38d56be | 232 | if (prec >= 2 * HOST_BITS_PER_WIDE_INT) |
2bc77e10 | 233 | ; |
b572011e | 234 | else if (prec > HOST_BITS_PER_WIDE_INT) |
4d28c5d1 | 235 | high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT)); |
2bc77e10 | 236 | else |
237 | { | |
4d28c5d1 | 238 | high = 0; |
b572011e | 239 | if (prec < HOST_BITS_PER_WIDE_INT) |
4d28c5d1 | 240 | low &= ~((HOST_WIDE_INT) (-1) << prec); |
241 | } | |
242 | ||
243 | if (!sign_extended_type) | |
244 | /* No sign extension */; | |
b38d56be | 245 | else if (prec >= 2 * HOST_BITS_PER_WIDE_INT) |
4d28c5d1 | 246 | /* Correct width already. */; |
247 | else if (prec > HOST_BITS_PER_WIDE_INT) | |
248 | { | |
249 | /* Sign extend top half? */ | |
250 | if (high & ((unsigned HOST_WIDE_INT)1 | |
251 | << (prec - HOST_BITS_PER_WIDE_INT - 1))) | |
252 | high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT); | |
253 | } | |
254 | else if (prec == HOST_BITS_PER_WIDE_INT) | |
255 | { | |
256 | if ((HOST_WIDE_INT)low < 0) | |
257 | high = -1; | |
258 | } | |
259 | else | |
260 | { | |
261 | /* Sign extend bottom half? */ | |
262 | if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1))) | |
2bc77e10 | 263 | { |
4d28c5d1 | 264 | high = -1; |
265 | low |= (HOST_WIDE_INT)(-1) << prec; | |
2bc77e10 | 266 | } |
267 | } | |
f55401f0 | 268 | |
4d28c5d1 | 269 | /* If the value changed, return a new node. */ |
270 | if (overflowed || overflowed_const | |
271 | || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t)) | |
272 | { | |
7016c612 | 273 | t = build_int_cst_wide (TREE_TYPE (t), low, high); |
0c5713a2 | 274 | |
4d28c5d1 | 275 | if (overflowed |
276 | || overflowable < 0 | |
277 | || (overflowable > 0 && sign_extended_type)) | |
278 | { | |
00b76131 | 279 | t = copy_node (t); |
4d28c5d1 | 280 | TREE_OVERFLOW (t) = 1; |
281 | TREE_CONSTANT_OVERFLOW (t) = 1; | |
282 | } | |
283 | else if (overflowed_const) | |
00b76131 | 284 | { |
285 | t = copy_node (t); | |
286 | TREE_CONSTANT_OVERFLOW (t) = 1; | |
287 | } | |
4d28c5d1 | 288 | } |
0c5713a2 | 289 | |
4d28c5d1 | 290 | return t; |
2bc77e10 | 291 | } |
292 | \f | |
b572011e | 293 | /* Add two doubleword integers with doubleword result. |
81035ec1 | 294 | Return nonzero if the operation overflows according to UNSIGNED_P. |
b572011e | 295 | Each argument is given as two `HOST_WIDE_INT' pieces. |
2bc77e10 | 296 | One argument is L1 and H1; the other, L2 and H2. |
bd5b3bce | 297 | The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */ |
2bc77e10 | 298 | |
b9e999f0 | 299 | int |
81035ec1 | 300 | add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, |
301 | unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2, | |
302 | unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, | |
303 | bool unsigned_p) | |
2bc77e10 | 304 | { |
a0c2c45b | 305 | unsigned HOST_WIDE_INT l; |
306 | HOST_WIDE_INT h; | |
2bc77e10 | 307 | |
bd5b3bce | 308 | l = l1 + l2; |
a0c2c45b | 309 | h = h1 + h2 + (l < l1); |
2bc77e10 | 310 | |
bd5b3bce | 311 | *lv = l; |
312 | *hv = h; | |
81035ec1 | 313 | |
314 | if (unsigned_p) | |
315 | return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1; | |
316 | else | |
317 | return OVERFLOW_SUM_SIGN (h1, h2, h); | |
2bc77e10 | 318 | } |
319 | ||
b572011e | 320 | /* Negate a doubleword integer with doubleword result. |
b9e999f0 | 321 | Return nonzero if the operation overflows, assuming it's signed. |
b572011e | 322 | The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1. |
bd5b3bce | 323 | The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */ |
2bc77e10 | 324 | |
b9e999f0 | 325 | int |
dc81944a | 326 | neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, |
327 | unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv) | |
2bc77e10 | 328 | { |
329 | if (l1 == 0) | |
330 | { | |
331 | *lv = 0; | |
332 | *hv = - h1; | |
f55401f0 | 333 | return (*hv & h1) < 0; |
2bc77e10 | 334 | } |
335 | else | |
336 | { | |
cc049fa3 | 337 | *lv = -l1; |
338 | *hv = ~h1; | |
b9e999f0 | 339 | return 0; |
2bc77e10 | 340 | } |
341 | } | |
342 | \f | |
b572011e | 343 | /* Multiply two doubleword integers with doubleword result. |
81035ec1 | 344 | Return nonzero if the operation overflows according to UNSIGNED_P. |
b572011e | 345 | Each argument is given as two `HOST_WIDE_INT' pieces. |
2bc77e10 | 346 | One argument is L1 and H1; the other, L2 and H2. |
bd5b3bce | 347 | The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */ |
2bc77e10 | 348 | |
b9e999f0 | 349 | int |
81035ec1 | 350 | mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, |
351 | unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2, | |
352 | unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, | |
353 | bool unsigned_p) | |
2bc77e10 | 354 | { |
bd5b3bce | 355 | HOST_WIDE_INT arg1[4]; |
356 | HOST_WIDE_INT arg2[4]; | |
357 | HOST_WIDE_INT prod[4 * 2]; | |
19cb6b50 | 358 | unsigned HOST_WIDE_INT carry; |
359 | int i, j, k; | |
a0c2c45b | 360 | unsigned HOST_WIDE_INT toplow, neglow; |
361 | HOST_WIDE_INT tophigh, neghigh; | |
2bc77e10 | 362 | |
2bc77e10 | 363 | encode (arg1, l1, h1); |
364 | encode (arg2, l2, h2); | |
365 | ||
f0af5a88 | 366 | memset (prod, 0, sizeof prod); |
2bc77e10 | 367 | |
bd5b3bce | 368 | for (i = 0; i < 4; i++) |
369 | { | |
370 | carry = 0; | |
371 | for (j = 0; j < 4; j++) | |
372 | { | |
373 | k = i + j; | |
374 | /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */ | |
375 | carry += arg1[i] * arg2[j]; | |
376 | /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */ | |
377 | carry += prod[k]; | |
378 | prod[k] = LOWPART (carry); | |
379 | carry = HIGHPART (carry); | |
380 | } | |
381 | prod[i + 4] = carry; | |
382 | } | |
2bc77e10 | 383 | |
81035ec1 | 384 | decode (prod, lv, hv); |
cc049fa3 | 385 | decode (prod + 4, &toplow, &tophigh); |
81035ec1 | 386 | |
387 | /* Unsigned overflow is immediate. */ | |
388 | if (unsigned_p) | |
389 | return (toplow | tophigh) != 0; | |
390 | ||
391 | /* Check for signed overflow by calculating the signed representation of the | |
392 | top half of the result; it should agree with the low half's sign bit. */ | |
b9e999f0 | 393 | if (h1 < 0) |
394 | { | |
395 | neg_double (l2, h2, &neglow, &neghigh); | |
396 | add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh); | |
397 | } | |
398 | if (h2 < 0) | |
399 | { | |
400 | neg_double (l1, h1, &neglow, &neghigh); | |
401 | add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh); | |
402 | } | |
403 | return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0; | |
2bc77e10 | 404 | } |
405 | \f | |
b572011e | 406 | /* Shift the doubleword integer in L1, H1 left by COUNT places |
2bc77e10 | 407 | keeping only PREC bits of result. |
408 | Shift right if COUNT is negative. | |
409 | ARITH nonzero specifies arithmetic shifting; otherwise use logical shift. | |
b572011e | 410 | Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ |
2bc77e10 | 411 | |
f55401f0 | 412 | void |
dc81944a | 413 | lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, |
414 | HOST_WIDE_INT count, unsigned int prec, | |
415 | unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith) | |
2bc77e10 | 416 | { |
7c5b13dc | 417 | unsigned HOST_WIDE_INT signmask; |
418 | ||
2bc77e10 | 419 | if (count < 0) |
420 | { | |
cc049fa3 | 421 | rshift_double (l1, h1, -count, prec, lv, hv, arith); |
f55401f0 | 422 | return; |
2bc77e10 | 423 | } |
cc049fa3 | 424 | |
0bb60c65 | 425 | if (SHIFT_COUNT_TRUNCATED) |
426 | count %= prec; | |
2bc77e10 | 427 | |
016d117a | 428 | if (count >= 2 * HOST_BITS_PER_WIDE_INT) |
429 | { | |
430 | /* Shifting by the host word size is undefined according to the | |
431 | ANSI standard, so we must handle this as a special case. */ | |
432 | *hv = 0; | |
433 | *lv = 0; | |
434 | } | |
435 | else if (count >= HOST_BITS_PER_WIDE_INT) | |
2bc77e10 | 436 | { |
a0c2c45b | 437 | *hv = l1 << (count - HOST_BITS_PER_WIDE_INT); |
bd5b3bce | 438 | *lv = 0; |
439 | } | |
440 | else | |
441 | { | |
442 | *hv = (((unsigned HOST_WIDE_INT) h1 << count) | |
a0c2c45b | 443 | | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1)); |
444 | *lv = l1 << count; | |
2bc77e10 | 445 | } |
7c5b13dc | 446 | |
447 | /* Sign extend all bits that are beyond the precision. */ | |
448 | ||
449 | signmask = -((prec > HOST_BITS_PER_WIDE_INT | |
f9a532b0 | 450 | ? ((unsigned HOST_WIDE_INT) *hv |
d3371fcd | 451 | >> (prec - HOST_BITS_PER_WIDE_INT - 1)) |
7c5b13dc | 452 | : (*lv >> (prec - 1))) & 1); |
453 | ||
454 | if (prec >= 2 * HOST_BITS_PER_WIDE_INT) | |
455 | ; | |
456 | else if (prec >= HOST_BITS_PER_WIDE_INT) | |
457 | { | |
458 | *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT)); | |
459 | *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT); | |
460 | } | |
461 | else | |
462 | { | |
463 | *hv = signmask; | |
464 | *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec); | |
465 | *lv |= signmask << prec; | |
466 | } | |
2bc77e10 | 467 | } |
468 | ||
b572011e | 469 | /* Shift the doubleword integer in L1, H1 right by COUNT places |
2bc77e10 | 470 | keeping only PREC bits of result. COUNT must be positive. |
471 | ARITH nonzero specifies arithmetic shifting; otherwise use logical shift. | |
b572011e | 472 | Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ |
2bc77e10 | 473 | |
474 | void | |
dc81944a | 475 | rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, |
476 | HOST_WIDE_INT count, unsigned int prec, | |
477 | unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, | |
de1b648b | 478 | int arith) |
2bc77e10 | 479 | { |
bd5b3bce | 480 | unsigned HOST_WIDE_INT signmask; |
a0c2c45b | 481 | |
bd5b3bce | 482 | signmask = (arith |
483 | ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1)) | |
484 | : 0); | |
2bc77e10 | 485 | |
0bb60c65 | 486 | if (SHIFT_COUNT_TRUNCATED) |
487 | count %= prec; | |
2bc77e10 | 488 | |
016d117a | 489 | if (count >= 2 * HOST_BITS_PER_WIDE_INT) |
490 | { | |
491 | /* Shifting by the host word size is undefined according to the | |
492 | ANSI standard, so we must handle this as a special case. */ | |
7c5b13dc | 493 | *hv = 0; |
494 | *lv = 0; | |
016d117a | 495 | } |
496 | else if (count >= HOST_BITS_PER_WIDE_INT) | |
2bc77e10 | 497 | { |
7c5b13dc | 498 | *hv = 0; |
499 | *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT); | |
bd5b3bce | 500 | } |
501 | else | |
502 | { | |
7c5b13dc | 503 | *hv = (unsigned HOST_WIDE_INT) h1 >> count; |
a0c2c45b | 504 | *lv = ((l1 >> count) |
5ee8fe30 | 505 | | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1)); |
7c5b13dc | 506 | } |
507 | ||
508 | /* Zero / sign extend all bits that are beyond the precision. */ | |
509 | ||
510 | if (count >= (HOST_WIDE_INT)prec) | |
511 | { | |
512 | *hv = signmask; | |
513 | *lv = signmask; | |
514 | } | |
515 | else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT) | |
516 | ; | |
517 | else if ((prec - count) >= HOST_BITS_PER_WIDE_INT) | |
518 | { | |
519 | *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT)); | |
520 | *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT); | |
521 | } | |
522 | else | |
523 | { | |
524 | *hv = signmask; | |
525 | *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count)); | |
526 | *lv |= signmask << (prec - count); | |
2bc77e10 | 527 | } |
2bc77e10 | 528 | } |
529 | \f | |
bd5b3bce | 530 | /* Rotate the doubleword integer in L1, H1 left by COUNT places |
2bc77e10 | 531 | keeping only PREC bits of result. |
532 | Rotate right if COUNT is negative. | |
b572011e | 533 | Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ |
2bc77e10 | 534 | |
535 | void | |
dc81944a | 536 | lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, |
537 | HOST_WIDE_INT count, unsigned int prec, | |
538 | unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv) | |
2bc77e10 | 539 | { |
a0c2c45b | 540 | unsigned HOST_WIDE_INT s1l, s2l; |
541 | HOST_WIDE_INT s1h, s2h; | |
2bc77e10 | 542 | |
7a1b56a9 | 543 | count %= prec; |
2bc77e10 | 544 | if (count < 0) |
7a1b56a9 | 545 | count += prec; |
2bc77e10 | 546 | |
7a1b56a9 | 547 | lshift_double (l1, h1, count, prec, &s1l, &s1h, 0); |
548 | rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0); | |
549 | *lv = s1l | s2l; | |
550 | *hv = s1h | s2h; | |
2bc77e10 | 551 | } |
552 | ||
b572011e | 553 | /* Rotate the doubleword integer in L1, H1 left by COUNT places |
2bc77e10 | 554 | keeping only PREC bits of result. COUNT must be positive. |
b572011e | 555 | Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ |
2bc77e10 | 556 | |
557 | void | |
dc81944a | 558 | rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, |
559 | HOST_WIDE_INT count, unsigned int prec, | |
560 | unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv) | |
2bc77e10 | 561 | { |
a0c2c45b | 562 | unsigned HOST_WIDE_INT s1l, s2l; |
563 | HOST_WIDE_INT s1h, s2h; | |
2bc77e10 | 564 | |
7a1b56a9 | 565 | count %= prec; |
566 | if (count < 0) | |
567 | count += prec; | |
2bc77e10 | 568 | |
7a1b56a9 | 569 | rshift_double (l1, h1, count, prec, &s1l, &s1h, 0); |
570 | lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0); | |
571 | *lv = s1l | s2l; | |
572 | *hv = s1h | s2h; | |
2bc77e10 | 573 | } |
574 | \f | |
b572011e | 575 | /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN |
2bc77e10 | 576 | for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM). |
577 | CODE is a tree code for a kind of division, one of | |
578 | TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR | |
579 | or EXACT_DIV_EXPR | |
20dd417a | 580 | It controls how the quotient is rounded to an integer. |
b9e999f0 | 581 | Return nonzero if the operation overflows. |
2bc77e10 | 582 | UNS nonzero says do unsigned division. */ |
583 | ||
15ca565e | 584 | int |
de1b648b | 585 | div_and_round_double (enum tree_code code, int uns, |
586 | unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */ | |
587 | HOST_WIDE_INT hnum_orig, | |
588 | unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */ | |
dc81944a | 589 | HOST_WIDE_INT hden_orig, |
590 | unsigned HOST_WIDE_INT *lquo, | |
de1b648b | 591 | HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem, |
592 | HOST_WIDE_INT *hrem) | |
2bc77e10 | 593 | { |
594 | int quo_neg = 0; | |
bd5b3bce | 595 | HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */ |
596 | HOST_WIDE_INT den[4], quo[4]; | |
19cb6b50 | 597 | int i, j; |
bd5b3bce | 598 | unsigned HOST_WIDE_INT work; |
a0c2c45b | 599 | unsigned HOST_WIDE_INT carry = 0; |
600 | unsigned HOST_WIDE_INT lnum = lnum_orig; | |
abd9ac9c | 601 | HOST_WIDE_INT hnum = hnum_orig; |
a0c2c45b | 602 | unsigned HOST_WIDE_INT lden = lden_orig; |
abd9ac9c | 603 | HOST_WIDE_INT hden = hden_orig; |
b9e999f0 | 604 | int overflow = 0; |
2bc77e10 | 605 | |
a0c2c45b | 606 | if (hden == 0 && lden == 0) |
ad87de1e | 607 | overflow = 1, lden = 1; |
2bc77e10 | 608 | |
139c3f48 | 609 | /* Calculate quotient sign and convert operands to unsigned. */ |
cc049fa3 | 610 | if (!uns) |
2bc77e10 | 611 | { |
b9e999f0 | 612 | if (hnum < 0) |
2bc77e10 | 613 | { |
614 | quo_neg = ~ quo_neg; | |
b9e999f0 | 615 | /* (minimum integer) / (-1) is the only overflow case. */ |
a0c2c45b | 616 | if (neg_double (lnum, hnum, &lnum, &hnum) |
617 | && ((HOST_WIDE_INT) lden & hden) == -1) | |
b9e999f0 | 618 | overflow = 1; |
2bc77e10 | 619 | } |
cc049fa3 | 620 | if (hden < 0) |
2bc77e10 | 621 | { |
622 | quo_neg = ~ quo_neg; | |
b9e999f0 | 623 | neg_double (lden, hden, &lden, &hden); |
2bc77e10 | 624 | } |
625 | } | |
626 | ||
627 | if (hnum == 0 && hden == 0) | |
628 | { /* single precision */ | |
629 | *hquo = *hrem = 0; | |
802ddb63 | 630 | /* This unsigned division rounds toward zero. */ |
a0c2c45b | 631 | *lquo = lnum / lden; |
2bc77e10 | 632 | goto finish_up; |
633 | } | |
634 | ||
635 | if (hnum == 0) | |
636 | { /* trivial case: dividend < divisor */ | |
637 | /* hden != 0 already checked. */ | |
638 | *hquo = *lquo = 0; | |
639 | *hrem = hnum; | |
640 | *lrem = lnum; | |
641 | goto finish_up; | |
642 | } | |
643 | ||
f0af5a88 | 644 | memset (quo, 0, sizeof quo); |
2bc77e10 | 645 | |
f0af5a88 | 646 | memset (num, 0, sizeof num); /* to zero 9th element */ |
647 | memset (den, 0, sizeof den); | |
2bc77e10 | 648 | |
cc049fa3 | 649 | encode (num, lnum, hnum); |
2bc77e10 | 650 | encode (den, lden, hden); |
651 | ||
bd5b3bce | 652 | /* Special code for when the divisor < BASE. */ |
a0c2c45b | 653 | if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE) |
bd5b3bce | 654 | { |
2bc77e10 | 655 | /* hnum != 0 already checked. */ |
bd5b3bce | 656 | for (i = 4 - 1; i >= 0; i--) |
2bc77e10 | 657 | { |
bd5b3bce | 658 | work = num[i] + carry * BASE; |
a0c2c45b | 659 | quo[i] = work / lden; |
660 | carry = work % lden; | |
2bc77e10 | 661 | } |
662 | } | |
bd5b3bce | 663 | else |
664 | { | |
665 | /* Full double precision division, | |
666 | with thanks to Don Knuth's "Seminumerical Algorithms". */ | |
a0c2c45b | 667 | int num_hi_sig, den_hi_sig; |
668 | unsigned HOST_WIDE_INT quo_est, scale; | |
2bc77e10 | 669 | |
6ef828f9 | 670 | /* Find the highest nonzero divisor digit. */ |
cc049fa3 | 671 | for (i = 4 - 1;; i--) |
672 | if (den[i] != 0) | |
673 | { | |
674 | den_hi_sig = i; | |
675 | break; | |
676 | } | |
bd5b3bce | 677 | |
a0c2c45b | 678 | /* Insure that the first digit of the divisor is at least BASE/2. |
679 | This is required by the quotient digit estimation algorithm. */ | |
2bc77e10 | 680 | |
a0c2c45b | 681 | scale = BASE / (den[den_hi_sig] + 1); |
682 | if (scale > 1) | |
683 | { /* scale divisor and dividend */ | |
684 | carry = 0; | |
685 | for (i = 0; i <= 4 - 1; i++) | |
686 | { | |
687 | work = (num[i] * scale) + carry; | |
688 | num[i] = LOWPART (work); | |
689 | carry = HIGHPART (work); | |
690 | } | |
2bc77e10 | 691 | |
a0c2c45b | 692 | num[4] = carry; |
693 | carry = 0; | |
694 | for (i = 0; i <= 4 - 1; i++) | |
695 | { | |
696 | work = (den[i] * scale) + carry; | |
697 | den[i] = LOWPART (work); | |
698 | carry = HIGHPART (work); | |
699 | if (den[i] != 0) den_hi_sig = i; | |
700 | } | |
701 | } | |
2bc77e10 | 702 | |
a0c2c45b | 703 | num_hi_sig = 4; |
2bc77e10 | 704 | |
a0c2c45b | 705 | /* Main loop */ |
706 | for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) | |
2bc77e10 | 707 | { |
a0c2c45b | 708 | /* Guess the next quotient digit, quo_est, by dividing the first |
709 | two remaining dividend digits by the high order quotient digit. | |
710 | quo_est is never low and is at most 2 high. */ | |
711 | unsigned HOST_WIDE_INT tmp; | |
712 | ||
713 | num_hi_sig = i + den_hi_sig + 1; | |
714 | work = num[num_hi_sig] * BASE + num[num_hi_sig - 1]; | |
715 | if (num[num_hi_sig] != den[den_hi_sig]) | |
716 | quo_est = work / den[den_hi_sig]; | |
717 | else | |
718 | quo_est = BASE - 1; | |
2bc77e10 | 719 | |
1e625a2e | 720 | /* Refine quo_est so it's usually correct, and at most one high. */ |
a0c2c45b | 721 | tmp = work - quo_est * den[den_hi_sig]; |
722 | if (tmp < BASE | |
723 | && (den[den_hi_sig - 1] * quo_est | |
724 | > (tmp * BASE + num[num_hi_sig - 2]))) | |
725 | quo_est--; | |
2bc77e10 | 726 | |
a0c2c45b | 727 | /* Try QUO_EST as the quotient digit, by multiplying the |
728 | divisor by QUO_EST and subtracting from the remaining dividend. | |
729 | Keep in mind that QUO_EST is the I - 1st digit. */ | |
730 | ||
731 | carry = 0; | |
2bc77e10 | 732 | for (j = 0; j <= den_hi_sig; j++) |
733 | { | |
a0c2c45b | 734 | work = quo_est * den[j] + carry; |
bd5b3bce | 735 | carry = HIGHPART (work); |
a0c2c45b | 736 | work = num[i + j] - LOWPART (work); |
bd5b3bce | 737 | num[i + j] = LOWPART (work); |
a0c2c45b | 738 | carry += HIGHPART (work) != 0; |
2bc77e10 | 739 | } |
2bc77e10 | 740 | |
a0c2c45b | 741 | /* If quo_est was high by one, then num[i] went negative and |
742 | we need to correct things. */ | |
f9a532b0 | 743 | if (num[num_hi_sig] < (HOST_WIDE_INT) carry) |
a0c2c45b | 744 | { |
745 | quo_est--; | |
746 | carry = 0; /* add divisor back in */ | |
747 | for (j = 0; j <= den_hi_sig; j++) | |
748 | { | |
749 | work = num[i + j] + den[j] + carry; | |
750 | carry = HIGHPART (work); | |
751 | num[i + j] = LOWPART (work); | |
752 | } | |
753 | ||
754 | num [num_hi_sig] += carry; | |
755 | } | |
756 | ||
757 | /* Store the quotient digit. */ | |
758 | quo[i] = quo_est; | |
759 | } | |
2bc77e10 | 760 | } |
2bc77e10 | 761 | |
762 | decode (quo, lquo, hquo); | |
763 | ||
764 | finish_up: | |
b4b174c3 | 765 | /* If result is negative, make it so. */ |
2bc77e10 | 766 | if (quo_neg) |
767 | neg_double (*lquo, *hquo, lquo, hquo); | |
768 | ||
aab2cf92 | 769 | /* Compute trial remainder: rem = num - (quo * den) */ |
2bc77e10 | 770 | mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem); |
771 | neg_double (*lrem, *hrem, lrem, hrem); | |
772 | add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem); | |
773 | ||
774 | switch (code) | |
775 | { | |
776 | case TRUNC_DIV_EXPR: | |
777 | case TRUNC_MOD_EXPR: /* round toward zero */ | |
778 | case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */ | |
b9e999f0 | 779 | return overflow; |
2bc77e10 | 780 | |
781 | case FLOOR_DIV_EXPR: | |
782 | case FLOOR_MOD_EXPR: /* round toward negative infinity */ | |
783 | if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */ | |
784 | { | |
785 | /* quo = quo - 1; */ | |
b572011e | 786 | add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, |
787 | lquo, hquo); | |
2bc77e10 | 788 | } |
a0c2c45b | 789 | else |
790 | return overflow; | |
2bc77e10 | 791 | break; |
792 | ||
793 | case CEIL_DIV_EXPR: | |
794 | case CEIL_MOD_EXPR: /* round toward positive infinity */ | |
795 | if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */ | |
796 | { | |
b572011e | 797 | add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0, |
798 | lquo, hquo); | |
2bc77e10 | 799 | } |
a0c2c45b | 800 | else |
801 | return overflow; | |
2bc77e10 | 802 | break; |
cc049fa3 | 803 | |
2bc77e10 | 804 | case ROUND_DIV_EXPR: |
805 | case ROUND_MOD_EXPR: /* round to closest integer */ | |
806 | { | |
a0c2c45b | 807 | unsigned HOST_WIDE_INT labs_rem = *lrem; |
808 | HOST_WIDE_INT habs_rem = *hrem; | |
809 | unsigned HOST_WIDE_INT labs_den = lden, ltwice; | |
810 | HOST_WIDE_INT habs_den = hden, htwice; | |
811 | ||
2358393e | 812 | /* Get absolute values. */ |
a0c2c45b | 813 | if (*hrem < 0) |
814 | neg_double (*lrem, *hrem, &labs_rem, &habs_rem); | |
815 | if (hden < 0) | |
816 | neg_double (lden, hden, &labs_den, &habs_den); | |
817 | ||
818 | /* If (2 * abs (lrem) >= abs (lden)) */ | |
b572011e | 819 | mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0, |
820 | labs_rem, habs_rem, <wice, &htwice); | |
a0c2c45b | 821 | |
b572011e | 822 | if (((unsigned HOST_WIDE_INT) habs_den |
823 | < (unsigned HOST_WIDE_INT) htwice) | |
824 | || (((unsigned HOST_WIDE_INT) habs_den | |
825 | == (unsigned HOST_WIDE_INT) htwice) | |
a0c2c45b | 826 | && (labs_den < ltwice))) |
2bc77e10 | 827 | { |
828 | if (*hquo < 0) | |
829 | /* quo = quo - 1; */ | |
b572011e | 830 | add_double (*lquo, *hquo, |
831 | (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo); | |
2bc77e10 | 832 | else |
833 | /* quo = quo + 1; */ | |
b572011e | 834 | add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0, |
835 | lquo, hquo); | |
2bc77e10 | 836 | } |
a0c2c45b | 837 | else |
838 | return overflow; | |
2bc77e10 | 839 | } |
840 | break; | |
841 | ||
842 | default: | |
fdada98f | 843 | gcc_unreachable (); |
2bc77e10 | 844 | } |
845 | ||
21dda4ee | 846 | /* Compute true remainder: rem = num - (quo * den) */ |
2bc77e10 | 847 | mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem); |
848 | neg_double (*lrem, *hrem, lrem, hrem); | |
849 | add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem); | |
b9e999f0 | 850 | return overflow; |
2bc77e10 | 851 | } |
86f023fe | 852 | |
853 | /* If ARG2 divides ARG1 with zero remainder, carries out the division | |
854 | of type CODE and returns the quotient. | |
855 | Otherwise returns NULL_TREE. */ | |
856 | ||
857 | static tree | |
858 | div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2) | |
859 | { | |
860 | unsigned HOST_WIDE_INT int1l, int2l; | |
861 | HOST_WIDE_INT int1h, int2h; | |
862 | unsigned HOST_WIDE_INT quol, reml; | |
863 | HOST_WIDE_INT quoh, remh; | |
864 | tree type = TREE_TYPE (arg1); | |
865 | int uns = TYPE_UNSIGNED (type); | |
866 | ||
867 | int1l = TREE_INT_CST_LOW (arg1); | |
868 | int1h = TREE_INT_CST_HIGH (arg1); | |
869 | int2l = TREE_INT_CST_LOW (arg2); | |
870 | int2h = TREE_INT_CST_HIGH (arg2); | |
871 | ||
872 | div_and_round_double (code, uns, int1l, int1h, int2l, int2h, | |
873 | &quol, &quoh, &reml, &remh); | |
874 | if (remh != 0 || reml != 0) | |
875 | return NULL_TREE; | |
876 | ||
877 | return build_int_cst_wide (type, quol, quoh); | |
878 | } | |
2bc77e10 | 879 | \f |
352e5c7a | 880 | /* Return true if the built-in mathematical function specified by CODE |
881 | is odd, i.e. -f(x) == f(-x). */ | |
bd214d13 | 882 | |
883 | static bool | |
884 | negate_mathfn_p (enum built_in_function code) | |
885 | { | |
886 | switch (code) | |
887 | { | |
4f35b1fc | 888 | CASE_FLT_FN (BUILT_IN_ASIN): |
889 | CASE_FLT_FN (BUILT_IN_ASINH): | |
890 | CASE_FLT_FN (BUILT_IN_ATAN): | |
891 | CASE_FLT_FN (BUILT_IN_ATANH): | |
892 | CASE_FLT_FN (BUILT_IN_CBRT): | |
893 | CASE_FLT_FN (BUILT_IN_SIN): | |
894 | CASE_FLT_FN (BUILT_IN_SINH): | |
895 | CASE_FLT_FN (BUILT_IN_TAN): | |
896 | CASE_FLT_FN (BUILT_IN_TANH): | |
bd214d13 | 897 | return true; |
898 | ||
899 | default: | |
900 | break; | |
901 | } | |
902 | return false; | |
903 | } | |
904 | ||
bb445479 | 905 | /* Check whether we may negate an integer constant T without causing |
906 | overflow. */ | |
907 | ||
908 | bool | |
909 | may_negate_without_overflow_p (tree t) | |
910 | { | |
911 | unsigned HOST_WIDE_INT val; | |
912 | unsigned int prec; | |
913 | tree type; | |
914 | ||
fdada98f | 915 | gcc_assert (TREE_CODE (t) == INTEGER_CST); |
bb445479 | 916 | |
917 | type = TREE_TYPE (t); | |
918 | if (TYPE_UNSIGNED (type)) | |
919 | return false; | |
920 | ||
921 | prec = TYPE_PRECISION (type); | |
922 | if (prec > HOST_BITS_PER_WIDE_INT) | |
923 | { | |
924 | if (TREE_INT_CST_LOW (t) != 0) | |
925 | return true; | |
926 | prec -= HOST_BITS_PER_WIDE_INT; | |
927 | val = TREE_INT_CST_HIGH (t); | |
928 | } | |
929 | else | |
930 | val = TREE_INT_CST_LOW (t); | |
931 | if (prec < HOST_BITS_PER_WIDE_INT) | |
932 | val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1; | |
933 | return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1)); | |
934 | } | |
935 | ||
22331643 | 936 | /* Determine whether an expression T can be cheaply negated using |
58b22aa6 | 937 | the function negate_expr without introducing undefined overflow. */ |
22331643 | 938 | |
939 | static bool | |
de1b648b | 940 | negate_expr_p (tree t) |
22331643 | 941 | { |
22331643 | 942 | tree type; |
943 | ||
944 | if (t == 0) | |
945 | return false; | |
946 | ||
947 | type = TREE_TYPE (t); | |
948 | ||
949 | STRIP_SIGN_NOPS (t); | |
950 | switch (TREE_CODE (t)) | |
951 | { | |
952 | case INTEGER_CST: | |
58b22aa6 | 953 | if (TYPE_UNSIGNED (type) |
954 | || (flag_wrapv && ! flag_trapv)) | |
bd214d13 | 955 | return true; |
22331643 | 956 | |
957 | /* Check that -CST will not overflow type. */ | |
bb445479 | 958 | return may_negate_without_overflow_p (t); |
5ea8b65a | 959 | case BIT_NOT_EXPR: |
2ba580f0 | 960 | return INTEGRAL_TYPE_P (type) |
961 | && (TYPE_UNSIGNED (type) | |
962 | || (flag_wrapv && !flag_trapv)); | |
22331643 | 963 | |
964 | case REAL_CST: | |
965 | case NEGATE_EXPR: | |
22331643 | 966 | return true; |
967 | ||
bd214d13 | 968 | case COMPLEX_CST: |
969 | return negate_expr_p (TREE_REALPART (t)) | |
970 | && negate_expr_p (TREE_IMAGPART (t)); | |
971 | ||
2169cab6 | 972 | case PLUS_EXPR: |
973 | if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations) | |
974 | return false; | |
975 | /* -(A + B) -> (-B) - A. */ | |
976 | if (negate_expr_p (TREE_OPERAND (t, 1)) | |
977 | && reorder_operands_p (TREE_OPERAND (t, 0), | |
978 | TREE_OPERAND (t, 1))) | |
979 | return true; | |
980 | /* -(A + B) -> (-A) - B. */ | |
981 | return negate_expr_p (TREE_OPERAND (t, 0)); | |
982 | ||
d842742d | 983 | case MINUS_EXPR: |
984 | /* We can't turn -(A-B) into B-A when we honor signed zeros. */ | |
bd214d13 | 985 | return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations) |
986 | && reorder_operands_p (TREE_OPERAND (t, 0), | |
987 | TREE_OPERAND (t, 1)); | |
d842742d | 988 | |
a12ecaaa | 989 | case MULT_EXPR: |
78a8ed03 | 990 | if (TYPE_UNSIGNED (TREE_TYPE (t))) |
a12ecaaa | 991 | break; |
992 | ||
993 | /* Fall through. */ | |
994 | ||
995 | case RDIV_EXPR: | |
996 | if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t)))) | |
997 | return negate_expr_p (TREE_OPERAND (t, 1)) | |
998 | || negate_expr_p (TREE_OPERAND (t, 0)); | |
999 | break; | |
1000 | ||
212f6d6d | 1001 | case TRUNC_DIV_EXPR: |
1002 | case ROUND_DIV_EXPR: | |
1003 | case FLOOR_DIV_EXPR: | |
1004 | case CEIL_DIV_EXPR: | |
1005 | case EXACT_DIV_EXPR: | |
1006 | if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv) | |
1007 | break; | |
1008 | return negate_expr_p (TREE_OPERAND (t, 1)) | |
1009 | || negate_expr_p (TREE_OPERAND (t, 0)); | |
1010 | ||
bd214d13 | 1011 | case NOP_EXPR: |
1012 | /* Negate -((double)float) as (double)(-float). */ | |
1013 | if (TREE_CODE (type) == REAL_TYPE) | |
1014 | { | |
1015 | tree tem = strip_float_extensions (t); | |
1016 | if (tem != t) | |
1017 | return negate_expr_p (tem); | |
1018 | } | |
1019 | break; | |
1020 | ||
1021 | case CALL_EXPR: | |
1022 | /* Negate -f(x) as f(-x). */ | |
1023 | if (negate_mathfn_p (builtin_mathfn_code (t))) | |
1024 | return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))); | |
1025 | break; | |
1026 | ||
a22fd555 | 1027 | case RSHIFT_EXPR: |
1028 | /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */ | |
1029 | if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) | |
1030 | { | |
1031 | tree op1 = TREE_OPERAND (t, 1); | |
1032 | if (TREE_INT_CST_HIGH (op1) == 0 | |
1033 | && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1) | |
1034 | == TREE_INT_CST_LOW (op1)) | |
1035 | return true; | |
1036 | } | |
1037 | break; | |
1038 | ||
22331643 | 1039 | default: |
1040 | break; | |
1041 | } | |
1042 | return false; | |
1043 | } | |
1044 | ||
58b22aa6 | 1045 | /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no |
1046 | simplification is possible. | |
1047 | If negate_expr_p would return true for T, NULL_TREE will never be | |
1048 | returned. */ | |
2bc77e10 | 1049 | |
23ec2d5e | 1050 | static tree |
58b22aa6 | 1051 | fold_negate_expr (tree t) |
23ec2d5e | 1052 | { |
58b22aa6 | 1053 | tree type = TREE_TYPE (t); |
23ec2d5e | 1054 | tree tem; |
1055 | ||
23ec2d5e | 1056 | switch (TREE_CODE (t)) |
1057 | { | |
5ea8b65a | 1058 | /* Convert - (~A) to A + 1. */ |
1059 | case BIT_NOT_EXPR: | |
58b22aa6 | 1060 | if (INTEGRAL_TYPE_P (type)) |
5ea8b65a | 1061 | return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0), |
1062 | build_int_cst (type, 1)); | |
3a07b940 | 1063 | break; |
5ea8b65a | 1064 | |
23ec2d5e | 1065 | case INTEGER_CST: |
9d77437d | 1066 | tem = fold_negate_const (t, type); |
bd214d13 | 1067 | if (! TREE_OVERFLOW (tem) |
78a8ed03 | 1068 | || TYPE_UNSIGNED (type) |
bd214d13 | 1069 | || ! flag_trapv) |
23ec2d5e | 1070 | return tem; |
1071 | break; | |
1072 | ||
a12ecaaa | 1073 | case REAL_CST: |
9d77437d | 1074 | tem = fold_negate_const (t, type); |
a12ecaaa | 1075 | /* Two's complement FP formats, such as c4x, may overflow. */ |
bd214d13 | 1076 | if (! TREE_OVERFLOW (tem) || ! flag_trapping_math) |
58b22aa6 | 1077 | return tem; |
a12ecaaa | 1078 | break; |
1079 | ||
bd214d13 | 1080 | case COMPLEX_CST: |
1081 | { | |
1082 | tree rpart = negate_expr (TREE_REALPART (t)); | |
1083 | tree ipart = negate_expr (TREE_IMAGPART (t)); | |
1084 | ||
1085 | if ((TREE_CODE (rpart) == REAL_CST | |
1086 | && TREE_CODE (ipart) == REAL_CST) | |
1087 | || (TREE_CODE (rpart) == INTEGER_CST | |
1088 | && TREE_CODE (ipart) == INTEGER_CST)) | |
1089 | return build_complex (type, rpart, ipart); | |
1090 | } | |
1091 | break; | |
1092 | ||
23ec2d5e | 1093 | case NEGATE_EXPR: |
58b22aa6 | 1094 | return TREE_OPERAND (t, 0); |
23ec2d5e | 1095 | |
2169cab6 | 1096 | case PLUS_EXPR: |
1097 | if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations) | |
1098 | { | |
1099 | /* -(A + B) -> (-B) - A. */ | |
1100 | if (negate_expr_p (TREE_OPERAND (t, 1)) | |
1101 | && reorder_operands_p (TREE_OPERAND (t, 0), | |
1102 | TREE_OPERAND (t, 1))) | |
fd96eeef | 1103 | { |
1104 | tem = negate_expr (TREE_OPERAND (t, 1)); | |
58b22aa6 | 1105 | return fold_build2 (MINUS_EXPR, type, |
1106 | tem, TREE_OPERAND (t, 0)); | |
fd96eeef | 1107 | } |
1108 | ||
2169cab6 | 1109 | /* -(A + B) -> (-A) - B. */ |
1110 | if (negate_expr_p (TREE_OPERAND (t, 0))) | |
fd96eeef | 1111 | { |
1112 | tem = negate_expr (TREE_OPERAND (t, 0)); | |
58b22aa6 | 1113 | return fold_build2 (MINUS_EXPR, type, |
1114 | tem, TREE_OPERAND (t, 1)); | |
fd96eeef | 1115 | } |
2169cab6 | 1116 | } |
1117 | break; | |
1118 | ||
23ec2d5e | 1119 | case MINUS_EXPR: |
1120 | /* - (A - B) -> B - A */ | |
bd214d13 | 1121 | if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations) |
1122 | && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1))) | |
58b22aa6 | 1123 | return fold_build2 (MINUS_EXPR, type, |
1124 | TREE_OPERAND (t, 1), TREE_OPERAND (t, 0)); | |
23ec2d5e | 1125 | break; |
1126 | ||
a12ecaaa | 1127 | case MULT_EXPR: |
58b22aa6 | 1128 | if (TYPE_UNSIGNED (type)) |
a12ecaaa | 1129 | break; |
1130 | ||
1131 | /* Fall through. */ | |
1132 | ||
1133 | case RDIV_EXPR: | |
58b22aa6 | 1134 | if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))) |
a12ecaaa | 1135 | { |
1136 | tem = TREE_OPERAND (t, 1); | |
1137 | if (negate_expr_p (tem)) | |
58b22aa6 | 1138 | return fold_build2 (TREE_CODE (t), type, |
1139 | TREE_OPERAND (t, 0), negate_expr (tem)); | |
a12ecaaa | 1140 | tem = TREE_OPERAND (t, 0); |
1141 | if (negate_expr_p (tem)) | |
58b22aa6 | 1142 | return fold_build2 (TREE_CODE (t), type, |
1143 | negate_expr (tem), TREE_OPERAND (t, 1)); | |
a12ecaaa | 1144 | } |
1145 | break; | |
1146 | ||
212f6d6d | 1147 | case TRUNC_DIV_EXPR: |
1148 | case ROUND_DIV_EXPR: | |
1149 | case FLOOR_DIV_EXPR: | |
1150 | case CEIL_DIV_EXPR: | |
1151 | case EXACT_DIV_EXPR: | |
58b22aa6 | 1152 | if (!TYPE_UNSIGNED (type) && !flag_wrapv) |
212f6d6d | 1153 | { |
1154 | tem = TREE_OPERAND (t, 1); | |
1155 | if (negate_expr_p (tem)) | |
58b22aa6 | 1156 | return fold_build2 (TREE_CODE (t), type, |
1157 | TREE_OPERAND (t, 0), negate_expr (tem)); | |
212f6d6d | 1158 | tem = TREE_OPERAND (t, 0); |
1159 | if (negate_expr_p (tem)) | |
58b22aa6 | 1160 | return fold_build2 (TREE_CODE (t), type, |
1161 | negate_expr (tem), TREE_OPERAND (t, 1)); | |
212f6d6d | 1162 | } |
1163 | break; | |
1164 | ||
bd214d13 | 1165 | case NOP_EXPR: |
1166 | /* Convert -((double)float) into (double)(-float). */ | |
1167 | if (TREE_CODE (type) == REAL_TYPE) | |
1168 | { | |
1169 | tem = strip_float_extensions (t); | |
1170 | if (tem != t && negate_expr_p (tem)) | |
58b22aa6 | 1171 | return negate_expr (tem); |
bd214d13 | 1172 | } |
1173 | break; | |
1174 | ||
1175 | case CALL_EXPR: | |
1176 | /* Negate -f(x) as f(-x). */ | |
1177 | if (negate_mathfn_p (builtin_mathfn_code (t)) | |
1178 | && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)))) | |
1179 | { | |
1180 | tree fndecl, arg, arglist; | |
1181 | ||
1182 | fndecl = get_callee_fndecl (t); | |
1183 | arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1))); | |
1184 | arglist = build_tree_list (NULL_TREE, arg); | |
1185 | return build_function_call_expr (fndecl, arglist); | |
1186 | } | |
1187 | break; | |
1188 | ||
a22fd555 | 1189 | case RSHIFT_EXPR: |
1190 | /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */ | |
1191 | if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) | |
1192 | { | |
1193 | tree op1 = TREE_OPERAND (t, 1); | |
1194 | if (TREE_INT_CST_HIGH (op1) == 0 | |
1195 | && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1) | |
1196 | == TREE_INT_CST_LOW (op1)) | |
1197 | { | |
78a8ed03 | 1198 | tree ntype = TYPE_UNSIGNED (type) |
fa8b888f | 1199 | ? lang_hooks.types.signed_type (type) |
1200 | : lang_hooks.types.unsigned_type (type); | |
a22fd555 | 1201 | tree temp = fold_convert (ntype, TREE_OPERAND (t, 0)); |
7ab7fd4f | 1202 | temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1); |
a22fd555 | 1203 | return fold_convert (type, temp); |
1204 | } | |
1205 | } | |
1206 | break; | |
1207 | ||
23ec2d5e | 1208 | default: |
1209 | break; | |
1210 | } | |
1211 | ||
58b22aa6 | 1212 | return NULL_TREE; |
1213 | } | |
1214 | ||
1215 | /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be | |
1216 | negated in a simpler way. Also allow for T to be NULL_TREE, in which case | |
1217 | return NULL_TREE. */ | |
1218 | ||
1219 | static tree | |
1220 | negate_expr (tree t) | |
1221 | { | |
1222 | tree type, tem; | |
1223 | ||
1224 | if (t == NULL_TREE) | |
1225 | return NULL_TREE; | |
1226 | ||
1227 | type = TREE_TYPE (t); | |
1228 | STRIP_SIGN_NOPS (t); | |
1229 | ||
1230 | tem = fold_negate_expr (t); | |
1231 | if (!tem) | |
1232 | tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t); | |
b30e3dbc | 1233 | return fold_convert (type, tem); |
23ec2d5e | 1234 | } |
1235 | \f | |
1236 | /* Split a tree IN into a constant, literal and variable parts that could be | |
1237 | combined with CODE to make IN. "constant" means an expression with | |
1238 | TREE_CONSTANT but that isn't an actual constant. CODE must be a | |
1239 | commutative arithmetic operation. Store the constant part into *CONP, | |
b07ba9ff | 1240 | the literal in *LITP and return the variable part. If a part isn't |
23ec2d5e | 1241 | present, set it to null. If the tree does not decompose in this way, |
1242 | return the entire tree as the variable part and the other parts as null. | |
1243 | ||
1244 | If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that | |
b07ba9ff | 1245 | case, we negate an operand that was subtracted. Except if it is a |
1246 | literal for which we use *MINUS_LITP instead. | |
1247 | ||
1248 | If NEGATE_P is true, we are negating all of IN, again except a literal | |
1249 | for which we use *MINUS_LITP instead. | |
23ec2d5e | 1250 | |
1251 | If IN is itself a literal or constant, return it as appropriate. | |
1252 | ||
1253 | Note that we do not guarantee that any of the three values will be the | |
1254 | same type as IN, but they will have the same signedness and mode. */ | |
1255 | ||
1256 | static tree | |
dc81944a | 1257 | split_tree (tree in, enum tree_code code, tree *conp, tree *litp, |
1258 | tree *minus_litp, int negate_p) | |
2bc77e10 | 1259 | { |
23ec2d5e | 1260 | tree var = 0; |
1261 | ||
2bc77e10 | 1262 | *conp = 0; |
23ec2d5e | 1263 | *litp = 0; |
b07ba9ff | 1264 | *minus_litp = 0; |
23ec2d5e | 1265 | |
6312a35e | 1266 | /* Strip any conversions that don't change the machine mode or signedness. */ |
23ec2d5e | 1267 | STRIP_SIGN_NOPS (in); |
1268 | ||
1269 | if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST) | |
1270 | *litp = in; | |
23ec2d5e | 1271 | else if (TREE_CODE (in) == code |
1272 | || (! FLOAT_TYPE_P (TREE_TYPE (in)) | |
1273 | /* We can associate addition and subtraction together (even | |
1274 | though the C standard doesn't say so) for integers because | |
1275 | the value is not affected. For reals, the value might be | |
1276 | affected, so we can't. */ | |
1277 | && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR) | |
1278 | || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR)))) | |
1279 | { | |
1280 | tree op0 = TREE_OPERAND (in, 0); | |
1281 | tree op1 = TREE_OPERAND (in, 1); | |
1282 | int neg1_p = TREE_CODE (in) == MINUS_EXPR; | |
1283 | int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0; | |
1284 | ||
1285 | /* First see if either of the operands is a literal, then a constant. */ | |
1286 | if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST) | |
1287 | *litp = op0, op0 = 0; | |
1288 | else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST) | |
1289 | *litp = op1, neg_litp_p = neg1_p, op1 = 0; | |
1290 | ||
1291 | if (op0 != 0 && TREE_CONSTANT (op0)) | |
1292 | *conp = op0, op0 = 0; | |
1293 | else if (op1 != 0 && TREE_CONSTANT (op1)) | |
1294 | *conp = op1, neg_conp_p = neg1_p, op1 = 0; | |
1295 | ||
1296 | /* If we haven't dealt with either operand, this is not a case we can | |
6312a35e | 1297 | decompose. Otherwise, VAR is either of the ones remaining, if any. */ |
23ec2d5e | 1298 | if (op0 != 0 && op1 != 0) |
1299 | var = in; | |
1300 | else if (op0 != 0) | |
1301 | var = op0; | |
1302 | else | |
1303 | var = op1, neg_var_p = neg1_p; | |
2bc77e10 | 1304 | |
23ec2d5e | 1305 | /* Now do any needed negations. */ |
b07ba9ff | 1306 | if (neg_litp_p) |
1307 | *minus_litp = *litp, *litp = 0; | |
1308 | if (neg_conp_p) | |
1309 | *conp = negate_expr (*conp); | |
1310 | if (neg_var_p) | |
1311 | var = negate_expr (var); | |
23ec2d5e | 1312 | } |
8541c166 | 1313 | else if (TREE_CONSTANT (in)) |
1314 | *conp = in; | |
23ec2d5e | 1315 | else |
1316 | var = in; | |
1317 | ||
1318 | if (negate_p) | |
2bc77e10 | 1319 | { |
b07ba9ff | 1320 | if (*litp) |
1321 | *minus_litp = *litp, *litp = 0; | |
1322 | else if (*minus_litp) | |
1323 | *litp = *minus_litp, *minus_litp = 0; | |
23ec2d5e | 1324 | *conp = negate_expr (*conp); |
b07ba9ff | 1325 | var = negate_expr (var); |
2bc77e10 | 1326 | } |
23ec2d5e | 1327 | |
1328 | return var; | |
1329 | } | |
1330 | ||
1331 | /* Re-associate trees split by the above function. T1 and T2 are either | |
1332 | expressions to associate or null. Return the new expression, if any. If | |
b07ba9ff | 1333 | we build an operation, do it in TYPE and with CODE. */ |
23ec2d5e | 1334 | |
1335 | static tree | |
de1b648b | 1336 | associate_trees (tree t1, tree t2, enum tree_code code, tree type) |
23ec2d5e | 1337 | { |
23ec2d5e | 1338 | if (t1 == 0) |
1339 | return t2; | |
1340 | else if (t2 == 0) | |
1341 | return t1; | |
1342 | ||
23ec2d5e | 1343 | /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't |
1344 | try to fold this since we will have infinite recursion. But do | |
1345 | deal with any NEGATE_EXPRs. */ | |
1346 | if (TREE_CODE (t1) == code || TREE_CODE (t2) == code | |
1347 | || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR) | |
1348 | { | |
5a3fb4d3 | 1349 | if (code == PLUS_EXPR) |
1350 | { | |
1351 | if (TREE_CODE (t1) == NEGATE_EXPR) | |
fd96eeef | 1352 | return build2 (MINUS_EXPR, type, fold_convert (type, t2), |
1353 | fold_convert (type, TREE_OPERAND (t1, 0))); | |
5a3fb4d3 | 1354 | else if (TREE_CODE (t2) == NEGATE_EXPR) |
fd96eeef | 1355 | return build2 (MINUS_EXPR, type, fold_convert (type, t1), |
1356 | fold_convert (type, TREE_OPERAND (t2, 0))); | |
faab57e3 | 1357 | else if (integer_zerop (t2)) |
1358 | return fold_convert (type, t1); | |
5a3fb4d3 | 1359 | } |
faab57e3 | 1360 | else if (code == MINUS_EXPR) |
1361 | { | |
1362 | if (integer_zerop (t2)) | |
1363 | return fold_convert (type, t1); | |
1364 | } | |
1365 | ||
fd96eeef | 1366 | return build2 (code, type, fold_convert (type, t1), |
1367 | fold_convert (type, t2)); | |
23ec2d5e | 1368 | } |
1369 | ||
7ab7fd4f | 1370 | return fold_build2 (code, type, fold_convert (type, t1), |
1371 | fold_convert (type, t2)); | |
2bc77e10 | 1372 | } |
1373 | \f | |
0dbd1c74 | 1374 | /* Combine two integer constants ARG1 and ARG2 under operation CODE |
32cef1cc | 1375 | to produce a new constant. Return NULL_TREE if we don't know how |
1376 | to evaluate CODE at compile-time. | |
5485823f | 1377 | |
15d769aa | 1378 | If NOTRUNC is nonzero, do not truncate the result to fit the data type. */ |
2bc77e10 | 1379 | |
4ee9c684 | 1380 | tree |
de1b648b | 1381 | int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) |
2bc77e10 | 1382 | { |
a0c2c45b | 1383 | unsigned HOST_WIDE_INT int1l, int2l; |
1384 | HOST_WIDE_INT int1h, int2h; | |
1385 | unsigned HOST_WIDE_INT low; | |
1386 | HOST_WIDE_INT hi; | |
1387 | unsigned HOST_WIDE_INT garbagel; | |
1388 | HOST_WIDE_INT garbageh; | |
19cb6b50 | 1389 | tree t; |
15d769aa | 1390 | tree type = TREE_TYPE (arg1); |
78a8ed03 | 1391 | int uns = TYPE_UNSIGNED (type); |
15d769aa | 1392 | int is_sizetype |
1393 | = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)); | |
0dbd1c74 | 1394 | int overflow = 0; |
8ea862a9 | 1395 | |
0dbd1c74 | 1396 | int1l = TREE_INT_CST_LOW (arg1); |
1397 | int1h = TREE_INT_CST_HIGH (arg1); | |
1398 | int2l = TREE_INT_CST_LOW (arg2); | |
1399 | int2h = TREE_INT_CST_HIGH (arg2); | |
1400 | ||
1401 | switch (code) | |
2bc77e10 | 1402 | { |
0dbd1c74 | 1403 | case BIT_IOR_EXPR: |
1404 | low = int1l | int2l, hi = int1h | int2h; | |
1405 | break; | |
2bc77e10 | 1406 | |
0dbd1c74 | 1407 | case BIT_XOR_EXPR: |
1408 | low = int1l ^ int2l, hi = int1h ^ int2h; | |
1409 | break; | |
2bc77e10 | 1410 | |
0dbd1c74 | 1411 | case BIT_AND_EXPR: |
1412 | low = int1l & int2l, hi = int1h & int2h; | |
1413 | break; | |
2bc77e10 | 1414 | |
0dbd1c74 | 1415 | case RSHIFT_EXPR: |
cc049fa3 | 1416 | int2l = -int2l; |
0dbd1c74 | 1417 | case LSHIFT_EXPR: |
1418 | /* It's unclear from the C standard whether shifts can overflow. | |
1419 | The following code ignores overflow; perhaps a C standard | |
1420 | interpretation ruling is needed. */ | |
15d769aa | 1421 | lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type), |
02e7a332 | 1422 | &low, &hi, !uns); |
0dbd1c74 | 1423 | break; |
2bc77e10 | 1424 | |
0dbd1c74 | 1425 | case RROTATE_EXPR: |
1426 | int2l = - int2l; | |
1427 | case LROTATE_EXPR: | |
15d769aa | 1428 | lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type), |
0dbd1c74 | 1429 | &low, &hi); |
1430 | break; | |
2bc77e10 | 1431 | |
0dbd1c74 | 1432 | case PLUS_EXPR: |
1433 | overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi); | |
1434 | break; | |
2bc77e10 | 1435 | |
0dbd1c74 | 1436 | case MINUS_EXPR: |
1437 | neg_double (int2l, int2h, &low, &hi); | |
1438 | add_double (int1l, int1h, low, hi, &low, &hi); | |
083a2b5e | 1439 | overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h); |
0dbd1c74 | 1440 | break; |
2bc77e10 | 1441 | |
0dbd1c74 | 1442 | case MULT_EXPR: |
1443 | overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi); | |
1444 | break; | |
2bc77e10 | 1445 | |
0dbd1c74 | 1446 | case TRUNC_DIV_EXPR: |
1447 | case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR: | |
1448 | case EXACT_DIV_EXPR: | |
1449 | /* This is a shortcut for a common special case. */ | |
a0c2c45b | 1450 | if (int2h == 0 && (HOST_WIDE_INT) int2l > 0 |
0dbd1c74 | 1451 | && ! TREE_CONSTANT_OVERFLOW (arg1) |
1452 | && ! TREE_CONSTANT_OVERFLOW (arg2) | |
a0c2c45b | 1453 | && int1h == 0 && (HOST_WIDE_INT) int1l >= 0) |
0dbd1c74 | 1454 | { |
1455 | if (code == CEIL_DIV_EXPR) | |
1456 | int1l += int2l - 1; | |
a0c2c45b | 1457 | |
0dbd1c74 | 1458 | low = int1l / int2l, hi = 0; |
2bc77e10 | 1459 | break; |
0dbd1c74 | 1460 | } |
2bc77e10 | 1461 | |
6312a35e | 1462 | /* ... fall through ... */ |
2bc77e10 | 1463 | |
cc049fa3 | 1464 | case ROUND_DIV_EXPR: |
32cef1cc | 1465 | if (int2h == 0 && int2l == 0) |
1466 | return NULL_TREE; | |
0dbd1c74 | 1467 | if (int2h == 0 && int2l == 1) |
1468 | { | |
1469 | low = int1l, hi = int1h; | |
2bc77e10 | 1470 | break; |
0dbd1c74 | 1471 | } |
1472 | if (int1l == int2l && int1h == int2h | |
1473 | && ! (int1l == 0 && int1h == 0)) | |
1474 | { | |
1475 | low = 1, hi = 0; | |
c13e6dce | 1476 | break; |
0dbd1c74 | 1477 | } |
15d769aa | 1478 | overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h, |
0dbd1c74 | 1479 | &low, &hi, &garbagel, &garbageh); |
1480 | break; | |
c13e6dce | 1481 | |
0dbd1c74 | 1482 | case TRUNC_MOD_EXPR: |
1483 | case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR: | |
1484 | /* This is a shortcut for a common special case. */ | |
a0c2c45b | 1485 | if (int2h == 0 && (HOST_WIDE_INT) int2l > 0 |
0dbd1c74 | 1486 | && ! TREE_CONSTANT_OVERFLOW (arg1) |
1487 | && ! TREE_CONSTANT_OVERFLOW (arg2) | |
a0c2c45b | 1488 | && int1h == 0 && (HOST_WIDE_INT) int1l >= 0) |
0dbd1c74 | 1489 | { |
1490 | if (code == CEIL_MOD_EXPR) | |
1491 | int1l += int2l - 1; | |
1492 | low = int1l % int2l, hi = 0; | |
c13e6dce | 1493 | break; |
0dbd1c74 | 1494 | } |
c13e6dce | 1495 | |
6312a35e | 1496 | /* ... fall through ... */ |
0dbd1c74 | 1497 | |
cc049fa3 | 1498 | case ROUND_MOD_EXPR: |
32cef1cc | 1499 | if (int2h == 0 && int2l == 0) |
1500 | return NULL_TREE; | |
0dbd1c74 | 1501 | overflow = div_and_round_double (code, uns, |
1502 | int1l, int1h, int2l, int2h, | |
1503 | &garbagel, &garbageh, &low, &hi); | |
1504 | break; | |
1505 | ||
1506 | case MIN_EXPR: | |
1507 | case MAX_EXPR: | |
1508 | if (uns) | |
083a2b5e | 1509 | low = (((unsigned HOST_WIDE_INT) int1h |
1510 | < (unsigned HOST_WIDE_INT) int2h) | |
1511 | || (((unsigned HOST_WIDE_INT) int1h | |
1512 | == (unsigned HOST_WIDE_INT) int2h) | |
a0c2c45b | 1513 | && int1l < int2l)); |
a3f1e3ec | 1514 | else |
a0c2c45b | 1515 | low = (int1h < int2h |
1516 | || (int1h == int2h && int1l < int2l)); | |
083a2b5e | 1517 | |
0dbd1c74 | 1518 | if (low == (code == MIN_EXPR)) |
1519 | low = int1l, hi = int1h; | |
1520 | else | |
1521 | low = int2l, hi = int2h; | |
1522 | break; | |
8ea862a9 | 1523 | |
0dbd1c74 | 1524 | default: |
32cef1cc | 1525 | return NULL_TREE; |
8ea862a9 | 1526 | } |
0dbd1c74 | 1527 | |
7016c612 | 1528 | t = build_int_cst_wide (TREE_TYPE (arg1), low, hi); |
0dbd1c74 | 1529 | |
4d28c5d1 | 1530 | if (notrunc) |
1531 | { | |
1532 | /* Propagate overflow flags ourselves. */ | |
1533 | if (((!uns || is_sizetype) && overflow) | |
1534 | | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)) | |
00b76131 | 1535 | { |
1536 | t = copy_node (t); | |
1537 | TREE_OVERFLOW (t) = 1; | |
1538 | TREE_CONSTANT_OVERFLOW (t) = 1; | |
1539 | } | |
1540 | else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2)) | |
1541 | { | |
1542 | t = copy_node (t); | |
1543 | TREE_CONSTANT_OVERFLOW (t) = 1; | |
1544 | } | |
4d28c5d1 | 1545 | } |
1546 | else | |
1547 | t = force_fit_type (t, 1, | |
1548 | ((!uns || is_sizetype) && overflow) | |
1549 | | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2), | |
1550 | TREE_CONSTANT_OVERFLOW (arg1) | |
1551 | | TREE_CONSTANT_OVERFLOW (arg2)); | |
0c5713a2 | 1552 | |
0dbd1c74 | 1553 | return t; |
1554 | } | |
1555 | ||
083a2b5e | 1556 | /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new |
1557 | constant. We assume ARG1 and ARG2 have the same data type, or at least | |
0b6fa2ba | 1558 | are the same kind of constant and the same machine mode. Return zero if |
1559 | combining the constants is not allowed in the current operating mode. | |
0dbd1c74 | 1560 | |
1561 | If NOTRUNC is nonzero, do not truncate the result to fit the data type. */ | |
1562 | ||
1563 | static tree | |
de1b648b | 1564 | const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) |
0dbd1c74 | 1565 | { |
0b6fa2ba | 1566 | /* Sanity check for the recursive cases. */ |
1567 | if (!arg1 || !arg2) | |
1568 | return NULL_TREE; | |
1569 | ||
cc049fa3 | 1570 | STRIP_NOPS (arg1); |
1571 | STRIP_NOPS (arg2); | |
0dbd1c74 | 1572 | |
1573 | if (TREE_CODE (arg1) == INTEGER_CST) | |
15d769aa | 1574 | return int_const_binop (code, arg1, arg2, notrunc); |
0dbd1c74 | 1575 | |
2bc77e10 | 1576 | if (TREE_CODE (arg1) == REAL_CST) |
1577 | { | |
276beea2 | 1578 | enum machine_mode mode; |
9a24cfc6 | 1579 | REAL_VALUE_TYPE d1; |
1580 | REAL_VALUE_TYPE d2; | |
536f5fb1 | 1581 | REAL_VALUE_TYPE value; |
a47b9d79 | 1582 | REAL_VALUE_TYPE result; |
1583 | bool inexact; | |
276beea2 | 1584 | tree t, type; |
2bc77e10 | 1585 | |
32cef1cc | 1586 | /* The following codes are handled by real_arithmetic. */ |
1587 | switch (code) | |
1588 | { | |
1589 | case PLUS_EXPR: | |
1590 | case MINUS_EXPR: | |
1591 | case MULT_EXPR: | |
1592 | case RDIV_EXPR: | |
1593 | case MIN_EXPR: | |
1594 | case MAX_EXPR: | |
1595 | break; | |
1596 | ||
1597 | default: | |
1598 | return NULL_TREE; | |
1599 | } | |
1600 | ||
9a24cfc6 | 1601 | d1 = TREE_REAL_CST (arg1); |
1602 | d2 = TREE_REAL_CST (arg2); | |
9248d3e0 | 1603 | |
276beea2 | 1604 | type = TREE_TYPE (arg1); |
1605 | mode = TYPE_MODE (type); | |
1606 | ||
1607 | /* Don't perform operation if we honor signaling NaNs and | |
1608 | either operand is a NaN. */ | |
1609 | if (HONOR_SNANS (mode) | |
1610 | && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2))) | |
1611 | return NULL_TREE; | |
1612 | ||
1613 | /* Don't perform operation if it would raise a division | |
1614 | by zero exception. */ | |
1615 | if (code == RDIV_EXPR | |
1616 | && REAL_VALUES_EQUAL (d2, dconst0) | |
1617 | && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode))) | |
1618 | return NULL_TREE; | |
1619 | ||
9248d3e0 | 1620 | /* If either operand is a NaN, just return it. Otherwise, set up |
1621 | for floating-point trap; we return an overflow. */ | |
1622 | if (REAL_VALUE_ISNAN (d1)) | |
1623 | return arg1; | |
1624 | else if (REAL_VALUE_ISNAN (d2)) | |
1625 | return arg2; | |
70192c5e | 1626 | |
a47b9d79 | 1627 | inexact = real_arithmetic (&value, code, &d1, &d2); |
1628 | real_convert (&result, mode, &value); | |
cc049fa3 | 1629 | |
f2e0dda3 | 1630 | /* Don't constant fold this floating point operation if |
1631 | the result has overflowed and flag_trapping_math. */ | |
f2e0dda3 | 1632 | if (flag_trapping_math |
1633 | && MODE_HAS_INFINITIES (mode) | |
1634 | && REAL_VALUE_ISINF (result) | |
1635 | && !REAL_VALUE_ISINF (d1) | |
1636 | && !REAL_VALUE_ISINF (d2)) | |
1637 | return NULL_TREE; | |
1638 | ||
a47b9d79 | 1639 | /* Don't constant fold this floating point operation if the |
1640 | result may dependent upon the run-time rounding mode and | |
09fde96c | 1641 | flag_rounding_math is set, or if GCC's software emulation |
1642 | is unable to accurately represent the result. */ | |
09fde96c | 1643 | if ((flag_rounding_math |
1644 | || (REAL_MODE_FORMAT_COMPOSITE_P (mode) | |
1645 | && !flag_unsafe_math_optimizations)) | |
a47b9d79 | 1646 | && (inexact || !real_identical (&result, &value))) |
1647 | return NULL_TREE; | |
1648 | ||
1649 | t = build_real (type, result); | |
23fed9b2 | 1650 | |
4d28c5d1 | 1651 | TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2); |
23fed9b2 | 1652 | TREE_CONSTANT_OVERFLOW (t) |
1653 | = TREE_OVERFLOW (t) | |
1654 | | TREE_CONSTANT_OVERFLOW (arg1) | |
1655 | | TREE_CONSTANT_OVERFLOW (arg2); | |
c0244247 | 1656 | return t; |
2bc77e10 | 1657 | } |
32cef1cc | 1658 | |
2bc77e10 | 1659 | if (TREE_CODE (arg1) == COMPLEX_CST) |
1660 | { | |
19cb6b50 | 1661 | tree type = TREE_TYPE (arg1); |
1662 | tree r1 = TREE_REALPART (arg1); | |
1663 | tree i1 = TREE_IMAGPART (arg1); | |
1664 | tree r2 = TREE_REALPART (arg2); | |
1665 | tree i2 = TREE_IMAGPART (arg2); | |
0b6fa2ba | 1666 | tree real, imag; |
2bc77e10 | 1667 | |
1668 | switch (code) | |
1669 | { | |
1670 | case PLUS_EXPR: | |
2bc77e10 | 1671 | case MINUS_EXPR: |
0b6fa2ba | 1672 | real = const_binop (code, r1, r2, notrunc); |
1673 | imag = const_binop (code, i1, i2, notrunc); | |
2bc77e10 | 1674 | break; |
1675 | ||
1676 | case MULT_EXPR: | |
0b6fa2ba | 1677 | real = const_binop (MINUS_EXPR, |
1678 | const_binop (MULT_EXPR, r1, r2, notrunc), | |
1679 | const_binop (MULT_EXPR, i1, i2, notrunc), | |
1680 | notrunc); | |
1681 | imag = const_binop (PLUS_EXPR, | |
1682 | const_binop (MULT_EXPR, r1, i2, notrunc), | |
1683 | const_binop (MULT_EXPR, i1, r2, notrunc), | |
1684 | notrunc); | |
2bc77e10 | 1685 | break; |
1686 | ||
1687 | case RDIV_EXPR: | |
1688 | { | |
19cb6b50 | 1689 | tree magsquared |
2bc77e10 | 1690 | = const_binop (PLUS_EXPR, |
5485823f | 1691 | const_binop (MULT_EXPR, r2, r2, notrunc), |
1692 | const_binop (MULT_EXPR, i2, i2, notrunc), | |
1693 | notrunc); | |
0b6fa2ba | 1694 | tree t1 |
1695 | = const_binop (PLUS_EXPR, | |
1696 | const_binop (MULT_EXPR, r1, r2, notrunc), | |
1697 | const_binop (MULT_EXPR, i1, i2, notrunc), | |
1698 | notrunc); | |
1699 | tree t2 | |
1700 | = const_binop (MINUS_EXPR, | |
1701 | const_binop (MULT_EXPR, i1, r2, notrunc), | |
1702 | const_binop (MULT_EXPR, r1, i2, notrunc), | |
1703 | notrunc); | |
86a914ce | 1704 | |
1705 | if (INTEGRAL_TYPE_P (TREE_TYPE (r1))) | |
0b6fa2ba | 1706 | code = TRUNC_DIV_EXPR; |
1707 | ||
1708 | real = const_binop (code, t1, magsquared, notrunc); | |
1709 | imag = const_binop (code, t2, magsquared, notrunc); | |
2bc77e10 | 1710 | } |
1711 | break; | |
1712 | ||
1713 | default: | |
32cef1cc | 1714 | return NULL_TREE; |
2bc77e10 | 1715 | } |
0b6fa2ba | 1716 | |
1717 | if (real && imag) | |
1718 | return build_complex (type, real, imag); | |
2bc77e10 | 1719 | } |
0b6fa2ba | 1720 | |
32cef1cc | 1721 | return NULL_TREE; |
2bc77e10 | 1722 | } |
15d769aa | 1723 | |
85390276 | 1724 | /* Create a size type INT_CST node with NUMBER sign extended. KIND |
1725 | indicates which particular sizetype to create. */ | |
083a2b5e | 1726 | |
902de8ed | 1727 | tree |
1e9d55d7 | 1728 | size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind) |
902de8ed | 1729 | { |
85390276 | 1730 | return build_int_cst (sizetype_tab[(int) kind], number); |
902de8ed | 1731 | } |
85390276 | 1732 | \f |
902de8ed | 1733 | /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE |
1734 | is a tree code. The type of the result is taken from the operands. | |
1735 | Both must be the same type integer type and it must be a size type. | |
2bc77e10 | 1736 | If the operands are constant, so is the result. */ |
1737 | ||
1738 | tree | |
de1b648b | 1739 | size_binop (enum tree_code code, tree arg0, tree arg1) |
2bc77e10 | 1740 | { |
902de8ed | 1741 | tree type = TREE_TYPE (arg0); |
1742 | ||
4a698d62 | 1743 | if (arg0 == error_mark_node || arg1 == error_mark_node) |
1744 | return error_mark_node; | |
1745 | ||
fdada98f | 1746 | gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type) |
1747 | && type == TREE_TYPE (arg1)); | |
902de8ed | 1748 | |
2bc77e10 | 1749 | /* Handle the special case of two integer constants faster. */ |
1750 | if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) | |
1751 | { | |
1752 | /* And some specific cases even faster than that. */ | |
a7baffe5 | 1753 | if (code == PLUS_EXPR && integer_zerop (arg0)) |
2bc77e10 | 1754 | return arg1; |
a7baffe5 | 1755 | else if ((code == MINUS_EXPR || code == PLUS_EXPR) |
1756 | && integer_zerop (arg1)) | |
2bc77e10 | 1757 | return arg0; |
a7baffe5 | 1758 | else if (code == MULT_EXPR && integer_onep (arg0)) |
2bc77e10 | 1759 | return arg1; |
a7baffe5 | 1760 | |
2bc77e10 | 1761 | /* Handle general case of two integer constants. */ |
15d769aa | 1762 | return int_const_binop (code, arg0, arg1, 0); |
2bc77e10 | 1763 | } |
1764 | ||
7ab7fd4f | 1765 | return fold_build2 (code, type, arg0, arg1); |
2bc77e10 | 1766 | } |
3fd3b688 | 1767 | |
902de8ed | 1768 | /* Given two values, either both of sizetype or both of bitsizetype, |
1769 | compute the difference between the two values. Return the value | |
1770 | in signed type corresponding to the type of the operands. */ | |
3fd3b688 | 1771 | |
1772 | tree | |
de1b648b | 1773 | size_diffop (tree arg0, tree arg1) |
3fd3b688 | 1774 | { |
902de8ed | 1775 | tree type = TREE_TYPE (arg0); |
1776 | tree ctype; | |
3fd3b688 | 1777 | |
fdada98f | 1778 | gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type) |
1779 | && type == TREE_TYPE (arg1)); | |
3fd3b688 | 1780 | |
902de8ed | 1781 | /* If the type is already signed, just do the simple thing. */ |
78a8ed03 | 1782 | if (!TYPE_UNSIGNED (type)) |
902de8ed | 1783 | return size_binop (MINUS_EXPR, arg0, arg1); |
1784 | ||
e2134ab3 | 1785 | ctype = type == bitsizetype ? sbitsizetype : ssizetype; |
902de8ed | 1786 | |
1787 | /* If either operand is not a constant, do the conversions to the signed | |
1788 | type and subtract. The hardware will do the right thing with any | |
1789 | overflow in the subtraction. */ | |
1790 | if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST) | |
b30e3dbc | 1791 | return size_binop (MINUS_EXPR, fold_convert (ctype, arg0), |
1792 | fold_convert (ctype, arg1)); | |
902de8ed | 1793 | |
1794 | /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE. | |
1795 | Otherwise, subtract the other way, convert to CTYPE (we know that can't | |
1796 | overflow) and negate (which can't either). Special-case a result | |
1797 | of zero while we're here. */ | |
1798 | if (tree_int_cst_equal (arg0, arg1)) | |
3c6185f1 | 1799 | return build_int_cst (ctype, 0); |
902de8ed | 1800 | else if (tree_int_cst_lt (arg1, arg0)) |
b30e3dbc | 1801 | return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1)); |
902de8ed | 1802 | else |
3c6185f1 | 1803 | return size_binop (MINUS_EXPR, build_int_cst (ctype, 0), |
b30e3dbc | 1804 | fold_convert (ctype, size_binop (MINUS_EXPR, |
1805 | arg1, arg0))); | |
3fd3b688 | 1806 | } |
2bc77e10 | 1807 | \f |
b38d56be | 1808 | /* A subroutine of fold_convert_const handling conversions of an |
1809 | INTEGER_CST to another integer type. */ | |
80db63ef | 1810 | |
1811 | static tree | |
b38d56be | 1812 | fold_convert_const_int_from_int (tree type, tree arg1) |
80db63ef | 1813 | { |
b38d56be | 1814 | tree t; |
80db63ef | 1815 | |
b38d56be | 1816 | /* Given an integer constant, make new constant with new type, |
1817 | appropriately sign-extended or truncated. */ | |
1818 | t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1), | |
1819 | TREE_INT_CST_HIGH (arg1)); | |
1820 | ||
1821 | t = force_fit_type (t, | |
1822 | /* Don't set the overflow when | |
1823 | converting a pointer */ | |
1824 | !POINTER_TYPE_P (TREE_TYPE (arg1)), | |
1825 | (TREE_INT_CST_HIGH (arg1) < 0 | |
1826 | && (TYPE_UNSIGNED (type) | |
1827 | < TYPE_UNSIGNED (TREE_TYPE (arg1)))) | |
1828 | | TREE_OVERFLOW (arg1), | |
1829 | TREE_CONSTANT_OVERFLOW (arg1)); | |
80db63ef | 1830 | |
b38d56be | 1831 | return t; |
80db63ef | 1832 | } |
1833 | ||
b38d56be | 1834 | /* A subroutine of fold_convert_const handling conversions a REAL_CST |
1835 | to an integer type. */ | |
2bc77e10 | 1836 | |
1837 | static tree | |
b38d56be | 1838 | fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1) |
2bc77e10 | 1839 | { |
23fed9b2 | 1840 | int overflow = 0; |
04b253e8 | 1841 | tree t; |
1842 | ||
b38d56be | 1843 | /* The following code implements the floating point to integer |
1844 | conversion rules required by the Java Language Specification, | |
1845 | that IEEE NaNs are mapped to zero and values that overflow | |
1846 | the target precision saturate, i.e. values greater than | |
1847 | INT_MAX are mapped to INT_MAX, and values less than INT_MIN | |
1848 | are mapped to INT_MIN. These semantics are allowed by the | |
1849 | C and C++ standards that simply state that the behavior of | |
1850 | FP-to-integer conversion is unspecified upon overflow. */ | |
2bc77e10 | 1851 | |
b38d56be | 1852 | HOST_WIDE_INT high, low; |
1853 | REAL_VALUE_TYPE r; | |
1854 | REAL_VALUE_TYPE x = TREE_REAL_CST (arg1); | |
1855 | ||
1856 | switch (code) | |
2bc77e10 | 1857 | { |
b38d56be | 1858 | case FIX_TRUNC_EXPR: |
1859 | real_trunc (&r, VOIDmode, &x); | |
1860 | break; | |
1861 | ||
1862 | case FIX_CEIL_EXPR: | |
1863 | real_ceil (&r, VOIDmode, &x); | |
1864 | break; | |
1865 | ||
1866 | case FIX_FLOOR_EXPR: | |
1867 | real_floor (&r, VOIDmode, &x); | |
1868 | break; | |
1869 | ||
1870 | case FIX_ROUND_EXPR: | |
1871 | real_round (&r, VOIDmode, &x); | |
1872 | break; | |
1873 | ||
1874 | default: | |
1875 | gcc_unreachable (); | |
1876 | } | |
1877 | ||
1878 | /* If R is NaN, return zero and show we have an overflow. */ | |
1879 | if (REAL_VALUE_ISNAN (r)) | |
1880 | { | |
1881 | overflow = 1; | |
1882 | high = 0; | |
1883 | low = 0; | |
1884 | } | |
1885 | ||
1886 | /* See if R is less than the lower bound or greater than the | |
1887 | upper bound. */ | |
1888 | ||
1889 | if (! overflow) | |
1890 | { | |
1891 | tree lt = TYPE_MIN_VALUE (type); | |
1892 | REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt); | |
1893 | if (REAL_VALUES_LESS (r, l)) | |
2bc77e10 | 1894 | { |
b38d56be | 1895 | overflow = 1; |
1896 | high = TREE_INT_CST_HIGH (lt); | |
1897 | low = TREE_INT_CST_LOW (lt); | |
2bc77e10 | 1898 | } |
b38d56be | 1899 | } |
1900 | ||
1901 | if (! overflow) | |
1902 | { | |
1903 | tree ut = TYPE_MAX_VALUE (type); | |
1904 | if (ut) | |
2bc77e10 | 1905 | { |
b38d56be | 1906 | REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut); |
1907 | if (REAL_VALUES_LESS (u, r)) | |
04b253e8 | 1908 | { |
b38d56be | 1909 | overflow = 1; |
1910 | high = TREE_INT_CST_HIGH (ut); | |
1911 | low = TREE_INT_CST_LOW (ut); | |
1912 | } | |
1913 | } | |
1914 | } | |
04b253e8 | 1915 | |
b38d56be | 1916 | if (! overflow) |
1917 | REAL_VALUE_TO_INT (&low, &high, r); | |
04b253e8 | 1918 | |
b38d56be | 1919 | t = build_int_cst_wide (type, low, high); |
04b253e8 | 1920 | |
b38d56be | 1921 | t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1), |
1922 | TREE_CONSTANT_OVERFLOW (arg1)); | |
1923 | return t; | |
1924 | } | |
50c90ea2 | 1925 | |
b38d56be | 1926 | /* A subroutine of fold_convert_const handling conversions a REAL_CST |
1927 | to another floating point type. */ | |
04b253e8 | 1928 | |
b38d56be | 1929 | static tree |
1930 | fold_convert_const_real_from_real (tree type, tree arg1) | |
1931 | { | |
a47b9d79 | 1932 | REAL_VALUE_TYPE value; |
b38d56be | 1933 | tree t; |
f52483b5 | 1934 | |
a47b9d79 | 1935 | real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1)); |
1936 | t = build_real (type, value); | |
67c65562 | 1937 | |
b38d56be | 1938 | TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); |
1939 | TREE_CONSTANT_OVERFLOW (t) | |
1940 | = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1); | |
1941 | return t; | |
1942 | } | |
67c65562 | 1943 | |
b38d56be | 1944 | /* Attempt to fold type conversion operation CODE of expression ARG1 to |
1945 | type TYPE. If no simplification can be done return NULL_TREE. */ | |
67c65562 | 1946 | |
b38d56be | 1947 | static tree |
1948 | fold_convert_const (enum tree_code code, tree type, tree arg1) | |
1949 | { | |
1950 | if (TREE_TYPE (arg1) == type) | |
1951 | return arg1; | |
4d28c5d1 | 1952 | |
b38d56be | 1953 | if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)) |
1954 | { | |
1955 | if (TREE_CODE (arg1) == INTEGER_CST) | |
1956 | return fold_convert_const_int_from_int (type, arg1); | |
1957 | else if (TREE_CODE (arg1) == REAL_CST) | |
1958 | return fold_convert_const_int_from_real (code, type, arg1); | |
2bc77e10 | 1959 | } |
1960 | else if (TREE_CODE (type) == REAL_TYPE) | |
1961 | { | |
2bc77e10 | 1962 | if (TREE_CODE (arg1) == INTEGER_CST) |
1963 | return build_real_from_int_cst (type, arg1); | |
2bc77e10 | 1964 | if (TREE_CODE (arg1) == REAL_CST) |
b38d56be | 1965 | return fold_convert_const_real_from_real (type, arg1); |
2bc77e10 | 1966 | } |
04b253e8 | 1967 | return NULL_TREE; |
2bc77e10 | 1968 | } |
b30e3dbc | 1969 | |
b38d56be | 1970 | /* Construct a vector of zero elements of vector type TYPE. */ |
1971 | ||
1972 | static tree | |
1973 | build_zero_vector (tree type) | |
1974 | { | |
1975 | tree elem, list; | |
1976 | int i, units; | |
1977 | ||
1978 | elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); | |
1979 | units = TYPE_VECTOR_SUBPARTS (type); | |
1980 | ||
1981 | list = NULL_TREE; | |
1982 | for (i = 0; i < units; i++) | |
1983 | list = tree_cons (NULL_TREE, elem, list); | |
1984 | return build_vector (type, list); | |
1985 | } | |
1986 | ||
b30e3dbc | 1987 | /* Convert expression ARG to type TYPE. Used by the middle-end for |
1988 | simple conversions in preference to calling the front-end's convert. */ | |
1989 | ||
d7aeca92 | 1990 | tree |
b30e3dbc | 1991 | fold_convert (tree type, tree arg) |
1992 | { | |
1993 | tree orig = TREE_TYPE (arg); | |
1994 | tree tem; | |
1995 | ||
1996 | if (type == orig) | |
1997 | return arg; | |
1998 | ||
1999 | if (TREE_CODE (arg) == ERROR_MARK | |
2000 | || TREE_CODE (type) == ERROR_MARK | |
2001 | || TREE_CODE (orig) == ERROR_MARK) | |
2002 | return error_mark_node; | |
2003 | ||
88e62366 | 2004 | if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig) |
2005 | || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type), | |
2006 | TYPE_MAIN_VARIANT (orig))) | |
7ab7fd4f | 2007 | return fold_build1 (NOP_EXPR, type, arg); |
b30e3dbc | 2008 | |
fdada98f | 2009 | switch (TREE_CODE (type)) |
b30e3dbc | 2010 | { |
63bf54cf | 2011 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
fdada98f | 2012 | case POINTER_TYPE: case REFERENCE_TYPE: |
2013 | case OFFSET_TYPE: | |
b30e3dbc | 2014 | if (TREE_CODE (arg) == INTEGER_CST) |
2015 | { | |
2016 | tem = fold_convert_const (NOP_EXPR, type, arg); | |
2017 | if (tem != NULL_TREE) | |
2018 | return tem; | |
2019 | } | |
8d4b8f86 | 2020 | if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) |
2021 | || TREE_CODE (orig) == OFFSET_TYPE) | |
7ab7fd4f | 2022 | return fold_build1 (NOP_EXPR, type, arg); |
b30e3dbc | 2023 | if (TREE_CODE (orig) == COMPLEX_TYPE) |
2024 | { | |
7ab7fd4f | 2025 | tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg); |
b30e3dbc | 2026 | return fold_convert (type, tem); |
2027 | } | |
fdada98f | 2028 | gcc_assert (TREE_CODE (orig) == VECTOR_TYPE |
2029 | && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); | |
7ab7fd4f | 2030 | return fold_build1 (NOP_EXPR, type, arg); |
0c5713a2 | 2031 | |
fdada98f | 2032 | case REAL_TYPE: |
b30e3dbc | 2033 | if (TREE_CODE (arg) == INTEGER_CST) |
2034 | { | |
2035 | tem = fold_convert_const (FLOAT_EXPR, type, arg); | |
2036 | if (tem != NULL_TREE) | |
2037 | return tem; | |
2038 | } | |
2039 | else if (TREE_CODE (arg) == REAL_CST) | |
2040 | { | |
2041 | tem = fold_convert_const (NOP_EXPR, type, arg); | |
2042 | if (tem != NULL_TREE) | |
2043 | return tem; | |
2044 | } | |
2045 | ||
fdada98f | 2046 | switch (TREE_CODE (orig)) |
b30e3dbc | 2047 | { |
63bf54cf | 2048 | case INTEGER_TYPE: |
fdada98f | 2049 | case BOOLEAN_TYPE: case ENUMERAL_TYPE: |
2050 | case POINTER_TYPE: case REFERENCE_TYPE: | |
7ab7fd4f | 2051 | return fold_build1 (FLOAT_EXPR, type, arg); |
0c5713a2 | 2052 | |
fdada98f | 2053 | case REAL_TYPE: |
1917f1d7 | 2054 | return fold_build1 (NOP_EXPR, type, arg); |
0c5713a2 | 2055 | |
fdada98f | 2056 | case COMPLEX_TYPE: |
7ab7fd4f | 2057 | tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg); |
b30e3dbc | 2058 | return fold_convert (type, tem); |
0c5713a2 | 2059 | |
fdada98f | 2060 | default: |
2061 | gcc_unreachable (); | |
b30e3dbc | 2062 | } |
0c5713a2 | 2063 | |
fdada98f | 2064 | case COMPLEX_TYPE: |
2065 | switch (TREE_CODE (orig)) | |
2066 | { | |
63bf54cf | 2067 | case INTEGER_TYPE: |
fdada98f | 2068 | case BOOLEAN_TYPE: case ENUMERAL_TYPE: |
2069 | case POINTER_TYPE: case REFERENCE_TYPE: | |
2070 | case REAL_TYPE: | |
2071 | return build2 (COMPLEX_EXPR, type, | |
2072 | fold_convert (TREE_TYPE (type), arg), | |
2073 | fold_convert (TREE_TYPE (type), integer_zero_node)); | |
2074 | case COMPLEX_TYPE: | |
2075 | { | |
2076 | tree rpart, ipart; | |
0c5713a2 | 2077 | |
fdada98f | 2078 | if (TREE_CODE (arg) == COMPLEX_EXPR) |
2079 | { | |
2080 | rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0)); | |
2081 | ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1)); | |
7ab7fd4f | 2082 | return fold_build2 (COMPLEX_EXPR, type, rpart, ipart); |
fdada98f | 2083 | } |
0c5713a2 | 2084 | |
fdada98f | 2085 | arg = save_expr (arg); |
7ab7fd4f | 2086 | rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg); |
2087 | ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg); | |
fdada98f | 2088 | rpart = fold_convert (TREE_TYPE (type), rpart); |
2089 | ipart = fold_convert (TREE_TYPE (type), ipart); | |
7ab7fd4f | 2090 | return fold_build2 (COMPLEX_EXPR, type, rpart, ipart); |
fdada98f | 2091 | } |
0c5713a2 | 2092 | |
fdada98f | 2093 | default: |
2094 | gcc_unreachable (); | |
2095 | } | |
0c5713a2 | 2096 | |
fdada98f | 2097 | case VECTOR_TYPE: |
80db63ef | 2098 | if (integer_zerop (arg)) |
2099 | return build_zero_vector (type); | |
fdada98f | 2100 | gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); |
2101 | gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) | |
2102 | || TREE_CODE (orig) == VECTOR_TYPE); | |
a0bfd1b9 | 2103 | return fold_build1 (VIEW_CONVERT_EXPR, type, arg); |
b30e3dbc | 2104 | |
fdada98f | 2105 | case VOID_TYPE: |
1917f1d7 | 2106 | return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg)); |
b30e3dbc | 2107 | |
fdada98f | 2108 | default: |
2109 | gcc_unreachable (); | |
b30e3dbc | 2110 | } |
b30e3dbc | 2111 | } |
2bc77e10 | 2112 | \f |
77aa6362 | 2113 | /* Return false if expr can be assumed not to be an lvalue, true |
98fc7ffa | 2114 | otherwise. */ |
2bc77e10 | 2115 | |
98fc7ffa | 2116 | static bool |
2117 | maybe_lvalue_p (tree x) | |
2bc77e10 | 2118 | { |
f4d47aeb | 2119 | /* We only need to wrap lvalue tree codes. */ |
2120 | switch (TREE_CODE (x)) | |
2121 | { | |
2122 | case VAR_DECL: | |
2123 | case PARM_DECL: | |
2124 | case RESULT_DECL: | |
2125 | case LABEL_DECL: | |
2126 | case FUNCTION_DECL: | |
2127 | case SSA_NAME: | |
2128 | ||
2129 | case COMPONENT_REF: | |
2130 | case INDIRECT_REF: | |
b056d812 | 2131 | case ALIGN_INDIRECT_REF: |
2132 | case MISALIGNED_INDIRECT_REF: | |
f4d47aeb | 2133 | case ARRAY_REF: |
6374121b | 2134 | case ARRAY_RANGE_REF: |
f4d47aeb | 2135 | case BIT_FIELD_REF: |
215e2f1d | 2136 | case OBJ_TYPE_REF: |
f4d47aeb | 2137 | |
2138 | case REALPART_EXPR: | |
2139 | case IMAGPART_EXPR: | |
2140 | case PREINCREMENT_EXPR: | |
2141 | case PREDECREMENT_EXPR: | |
2142 | case SAVE_EXPR: | |
f4d47aeb | 2143 | case TRY_CATCH_EXPR: |
2144 | case WITH_CLEANUP_EXPR: | |
2145 | case COMPOUND_EXPR: | |
2146 | case MODIFY_EXPR: | |
2147 | case TARGET_EXPR: | |
2148 | case COND_EXPR: | |
2149 | case BIND_EXPR: | |
2150 | case MIN_EXPR: | |
2151 | case MAX_EXPR: | |
f4d47aeb | 2152 | break; |
2153 | ||
2154 | default: | |
2155 | /* Assume the worst for front-end tree codes. */ | |
2156 | if ((int)TREE_CODE (x) >= NUM_TREE_CODES) | |
2157 | break; | |
98fc7ffa | 2158 | return false; |
f4d47aeb | 2159 | } |
98fc7ffa | 2160 | |
2161 | return true; | |
2162 | } | |
2163 | ||
2164 | /* Return an expr equal to X but certainly not valid as an lvalue. */ | |
2165 | ||
2166 | tree | |
2167 | non_lvalue (tree x) | |
2168 | { | |
2169 | /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to | |
2170 | us. */ | |
2171 | if (in_gimple_form) | |
2172 | return x; | |
2173 | ||
2174 | if (! maybe_lvalue_p (x)) | |
2175 | return x; | |
4ee9c684 | 2176 | return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x); |
2bc77e10 | 2177 | } |
56753054 | 2178 | |
b12c26dc | 2179 | /* Nonzero means lvalues are limited to those valid in pedantic ANSI C. |
2180 | Zero means allow extended lvalues. */ | |
2181 | ||
2182 | int pedantic_lvalues; | |
2183 | ||
56753054 | 2184 | /* When pedantic, return an expr equal to X but certainly not valid as a |
2185 | pedantic lvalue. Otherwise, return X. */ | |
2186 | ||
d50efa49 | 2187 | static tree |
de1b648b | 2188 | pedantic_non_lvalue (tree x) |
56753054 | 2189 | { |
b12c26dc | 2190 | if (pedantic_lvalues) |
56753054 | 2191 | return non_lvalue (x); |
2192 | else | |
2193 | return x; | |
2194 | } | |
e233264a | 2195 | \f |
2196 | /* Given a tree comparison code, return the code that is the logical inverse | |
2197 | of the given code. It is not safe to do this for floating-point | |
318a728f | 2198 | comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode |
2199 | as well: if reversing the comparison is unsafe, return ERROR_MARK. */ | |
2bc77e10 | 2200 | |
eea12c72 | 2201 | enum tree_code |
318a728f | 2202 | invert_tree_comparison (enum tree_code code, bool honor_nans) |
e233264a | 2203 | { |
318a728f | 2204 | if (honor_nans && flag_trapping_math) |
2205 | return ERROR_MARK; | |
2206 | ||
e233264a | 2207 | switch (code) |
2208 | { | |
2209 | case EQ_EXPR: | |
2210 | return NE_EXPR; | |
2211 | case NE_EXPR: | |
2212 | return EQ_EXPR; | |
2213 | case GT_EXPR: | |
318a728f | 2214 | return honor_nans ? UNLE_EXPR : LE_EXPR; |
e233264a | 2215 | case GE_EXPR: |
318a728f | 2216 | return honor_nans ? UNLT_EXPR : LT_EXPR; |
e233264a | 2217 | case LT_EXPR: |
318a728f | 2218 | return honor_nans ? UNGE_EXPR : GE_EXPR; |
e233264a | 2219 | case LE_EXPR: |
318a728f | 2220 | return honor_nans ? UNGT_EXPR : GT_EXPR; |
2221 | case LTGT_EXPR: | |
2222 | return UNEQ_EXPR; | |
2223 | case UNEQ_EXPR: | |
2224 | return LTGT_EXPR; | |
2225 | case UNGT_EXPR: | |
2226 | return LE_EXPR; | |
2227 | case UNGE_EXPR: | |
2228 | return LT_EXPR; | |
2229 | case UNLT_EXPR: | |
2230 | return GE_EXPR; | |
2231 | case UNLE_EXPR: | |
e233264a | 2232 | return GT_EXPR; |
318a728f | 2233 | case ORDERED_EXPR: |
2234 | return UNORDERED_EXPR; | |
2235 | case UNORDERED_EXPR: | |
2236 | return ORDERED_EXPR; | |
e233264a | 2237 | default: |
fdada98f | 2238 | gcc_unreachable (); |
e233264a | 2239 | } |
2240 | } | |
2241 | ||
2242 | /* Similar, but return the comparison that results if the operands are | |
2243 | swapped. This is safe for floating-point. */ | |
2244 | ||
cc0bdf91 | 2245 | enum tree_code |
de1b648b | 2246 | swap_tree_comparison (enum tree_code code) |
e233264a | 2247 | { |
2248 | switch (code) | |
2249 | { | |
2250 | case EQ_EXPR: | |
2251 | case NE_EXPR: | |
6a0aeeaa | 2252 | case ORDERED_EXPR: |
2253 | case UNORDERED_EXPR: | |
2254 | case LTGT_EXPR: | |
2255 | case UNEQ_EXPR: | |
e233264a | 2256 | return code; |
2257 | case GT_EXPR: | |
2258 | return LT_EXPR; | |
2259 | case GE_EXPR: | |
2260 | return LE_EXPR; | |
2261 | case LT_EXPR: | |
2262 | return GT_EXPR; | |
2263 | case LE_EXPR: | |
2264 | return GE_EXPR; | |
6a0aeeaa | 2265 | case UNGT_EXPR: |
2266 | return UNLT_EXPR; | |
2267 | case UNGE_EXPR: | |
2268 | return UNLE_EXPR; | |
2269 | case UNLT_EXPR: | |
2270 | return UNGT_EXPR; | |
2271 | case UNLE_EXPR: | |
2272 | return UNGE_EXPR; | |
e233264a | 2273 | default: |
fdada98f | 2274 | gcc_unreachable (); |
e233264a | 2275 | } |
2276 | } | |
8b94828f | 2277 | |
7835f163 | 2278 | |
2279 | /* Convert a comparison tree code from an enum tree_code representation | |
2280 | into a compcode bit-based encoding. This function is the inverse of | |
2281 | compcode_to_comparison. */ | |
2282 | ||
318a728f | 2283 | static enum comparison_code |
de1b648b | 2284 | comparison_to_compcode (enum tree_code code) |
7835f163 | 2285 | { |
2286 | switch (code) | |
2287 | { | |
2288 | case LT_EXPR: | |
2289 | return COMPCODE_LT; | |
2290 | case EQ_EXPR: | |
2291 | return COMPCODE_EQ; | |
2292 | case LE_EXPR: | |
2293 | return COMPCODE_LE; | |
2294 | case GT_EXPR: | |
2295 | return COMPCODE_GT; | |
2296 | case NE_EXPR: | |
2297 | return COMPCODE_NE; | |
2298 | case GE_EXPR: | |
2299 | return COMPCODE_GE; | |
318a728f | 2300 | case ORDERED_EXPR: |
2301 | return COMPCODE_ORD; | |
2302 | case UNORDERED_EXPR: | |
2303 | return COMPCODE_UNORD; | |
2304 | case UNLT_EXPR: | |
2305 | return COMPCODE_UNLT; | |
2306 | case UNEQ_EXPR: | |
2307 | return COMPCODE_UNEQ; | |
2308 | case UNLE_EXPR: | |
2309 | return COMPCODE_UNLE; | |
2310 | case UNGT_EXPR: | |
2311 | return COMPCODE_UNGT; | |
2312 | case LTGT_EXPR: | |
2313 | return COMPCODE_LTGT; | |
2314 | case UNGE_EXPR: | |
2315 | return COMPCODE_UNGE; | |
7835f163 | 2316 | default: |
fdada98f | 2317 | gcc_unreachable (); |
7835f163 | 2318 | } |
2319 | } | |
2320 | ||
2321 | /* Convert a compcode bit-based encoding of a comparison operator back | |
2322 | to GCC's enum tree_code representation. This function is the | |
2323 | inverse of comparison_to_compcode. */ | |
2324 | ||
2325 | static enum tree_code | |
318a728f | 2326 | compcode_to_comparison (enum comparison_code code) |
7835f163 | 2327 | { |
2328 | switch (code) | |
2329 | { | |
2330 | case COMPCODE_LT: | |
2331 | return LT_EXPR; | |
2332 | case COMPCODE_EQ: | |
2333 | return EQ_EXPR; | |
2334 | case COMPCODE_LE: | |
2335 | return LE_EXPR; | |
2336 | case COMPCODE_GT: | |
2337 | return GT_EXPR; | |
2338 | case COMPCODE_NE: | |
2339 | return NE_EXPR; | |
2340 | case COMPCODE_GE: | |
2341 | return GE_EXPR; | |
318a728f | 2342 | case COMPCODE_ORD: |
2343 | return ORDERED_EXPR; | |
2344 | case COMPCODE_UNORD: | |
2345 | return UNORDERED_EXPR; | |
2346 | case COMPCODE_UNLT: | |
2347 | return UNLT_EXPR; | |
2348 | case COMPCODE_UNEQ: | |
2349 | return UNEQ_EXPR; | |
2350 | case COMPCODE_UNLE: | |
2351 | return UNLE_EXPR; | |
2352 | case COMPCODE_UNGT: | |
2353 | return UNGT_EXPR; | |
2354 | case COMPCODE_LTGT: | |
2355 | return LTGT_EXPR; | |
2356 | case COMPCODE_UNGE: | |
2357 | return UNGE_EXPR; | |
7835f163 | 2358 | default: |
fdada98f | 2359 | gcc_unreachable (); |
7835f163 | 2360 | } |
2361 | } | |
2362 | ||
318a728f | 2363 | /* Return a tree for the comparison which is the combination of |
2364 | doing the AND or OR (depending on CODE) of the two operations LCODE | |
2365 | and RCODE on the identical operands LL_ARG and LR_ARG. Take into account | |
2366 | the possibility of trapping if the mode has NaNs, and return NULL_TREE | |
2367 | if this makes the transformation invalid. */ | |
2368 | ||
2369 | tree | |
2370 | combine_comparisons (enum tree_code code, enum tree_code lcode, | |
2371 | enum tree_code rcode, tree truth_type, | |
2372 | tree ll_arg, tree lr_arg) | |
2373 | { | |
2374 | bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg))); | |
2375 | enum comparison_code lcompcode = comparison_to_compcode (lcode); | |
2376 | enum comparison_code rcompcode = comparison_to_compcode (rcode); | |
2377 | enum comparison_code compcode; | |
2378 | ||
2379 | switch (code) | |
2380 | { | |
2381 | case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR: | |
2382 | compcode = lcompcode & rcompcode; | |
2383 | break; | |
2384 | ||
2385 | case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR: | |
2386 | compcode = lcompcode | rcompcode; | |
2387 | break; | |
2388 | ||
2389 | default: | |
2390 | return NULL_TREE; | |
2391 | } | |
2392 | ||
2393 | if (!honor_nans) | |
2394 | { | |
2395 | /* Eliminate unordered comparisons, as well as LTGT and ORD | |
2396 | which are not used unless the mode has NaNs. */ | |
2397 | compcode &= ~COMPCODE_UNORD; | |
2398 | if (compcode == COMPCODE_LTGT) | |
2399 | compcode = COMPCODE_NE; | |
2400 | else if (compcode == COMPCODE_ORD) | |
2401 | compcode = COMPCODE_TRUE; | |
2402 | } | |
2403 | else if (flag_trapping_math) | |
2404 | { | |
7206da1b | 2405 | /* Check that the original operation and the optimized ones will trap |
318a728f | 2406 | under the same condition. */ |
2407 | bool ltrap = (lcompcode & COMPCODE_UNORD) == 0 | |
2408 | && (lcompcode != COMPCODE_EQ) | |
2409 | && (lcompcode != COMPCODE_ORD); | |
2410 | bool rtrap = (rcompcode & COMPCODE_UNORD) == 0 | |
2411 | && (rcompcode != COMPCODE_EQ) | |
2412 | && (rcompcode != COMPCODE_ORD); | |
2413 | bool trap = (compcode & COMPCODE_UNORD) == 0 | |
2414 | && (compcode != COMPCODE_EQ) | |
2415 | && (compcode != COMPCODE_ORD); | |
2416 | ||
2417 | /* In a short-circuited boolean expression the LHS might be | |
2418 | such that the RHS, if evaluated, will never trap. For | |
2419 | example, in ORD (x, y) && (x < y), we evaluate the RHS only | |
2420 | if neither x nor y is NaN. (This is a mixed blessing: for | |
2421 | example, the expression above will never trap, hence | |
2422 | optimizing it to x < y would be invalid). */ | |
2423 | if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD)) | |
2424 | || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD))) | |
2425 | rtrap = false; | |
2426 | ||
2427 | /* If the comparison was short-circuited, and only the RHS | |
2428 | trapped, we may now generate a spurious trap. */ | |
2429 | if (rtrap && !ltrap | |
2430 | && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) | |
2431 | return NULL_TREE; | |
2432 | ||
2433 | /* If we changed the conditions that cause a trap, we lose. */ | |
2434 | if ((ltrap || rtrap) != trap) | |
2435 | return NULL_TREE; | |
2436 | } | |
2437 | ||
2438 | if (compcode == COMPCODE_TRUE) | |
20783f07 | 2439 | return constant_boolean_node (true, truth_type); |
318a728f | 2440 | else if (compcode == COMPCODE_FALSE) |
20783f07 | 2441 | return constant_boolean_node (false, truth_type); |
318a728f | 2442 | else |
7ab7fd4f | 2443 | return fold_build2 (compcode_to_comparison (compcode), |
2444 | truth_type, ll_arg, lr_arg); | |
318a728f | 2445 | } |
2446 | ||
8b94828f | 2447 | /* Return nonzero if CODE is a tree code that represents a truth value. */ |
2448 | ||
2449 | static int | |
de1b648b | 2450 | truth_value_p (enum tree_code code) |
8b94828f | 2451 | { |
ce45a448 | 2452 | return (TREE_CODE_CLASS (code) == tcc_comparison |
8b94828f | 2453 | || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR |
2454 | || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR | |
2455 | || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR); | |
2456 | } | |
e233264a | 2457 | \f |
9e6f4cc9 | 2458 | /* Return nonzero if two operands (typically of the same tree node) |
2459 | are necessarily equal. If either argument has side-effects this | |
365db11e | 2460 | function returns zero. FLAGS modifies behavior as follows: |
9e6f4cc9 | 2461 | |
4ee9c684 | 2462 | If OEP_ONLY_CONST is set, only return nonzero for constants. |
11acc1df | 2463 | This function tests whether the operands are indistinguishable; |
2464 | it does not test whether they are equal using C's == operation. | |
2465 | The distinction is important for IEEE floating point, because | |
2466 | (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and | |
9e6f4cc9 | 2467 | (2) two NaNs may be indistinguishable, but NaN!=NaN. |
2468 | ||
4ee9c684 | 2469 | If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself |
9e6f4cc9 | 2470 | even though it may hold multiple values during a function. |
2471 | This is because a GCC tree node guarantees that nothing else is | |
2472 | executed between the evaluation of its "operands" (which may often | |
2473 | be evaluated in arbitrary order). Hence if the operands themselves | |
2474 | don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the | |
9b931277 | 2475 | same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST |
2476 | unset means assuming isochronic (or instantaneous) tree equivalence. | |
2477 | Unless comparing arbitrary expression trees, such as from different | |
2478 | statements, this flag can usually be left unset. | |
4ee9c684 | 2479 | |
2480 | If OEP_PURE_SAME is set, then pure functions with identical arguments | |
2481 | are considered the same. It is used when the caller has other ways | |
2482 | to ensure that global memory is unchanged in between. */ | |
2bc77e10 | 2483 | |
2484 | int | |
4ee9c684 | 2485 | operand_equal_p (tree arg0, tree arg1, unsigned int flags) |
2bc77e10 | 2486 | { |
78a8ed03 | 2487 | /* If either is ERROR_MARK, they aren't equal. */ |
fa31fec1 | 2488 | if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK) |
78a8ed03 | 2489 | return 0; |
2490 | ||
2bc77e10 | 2491 | /* If both types don't have the same signedness, then we can't consider |
2492 | them equal. We must check this before the STRIP_NOPS calls | |
2493 | because they may change the signedness of the arguments. */ | |
78a8ed03 | 2494 | if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))) |
2bc77e10 | 2495 | return 0; |
2496 | ||
68826b15 | 2497 | /* If both types don't have the same precision, then it is not safe |
2498 | to strip NOPs. */ | |
2499 | if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1))) | |
2500 | return 0; | |
2501 | ||
2bc77e10 | 2502 | STRIP_NOPS (arg0); |
2503 | STRIP_NOPS (arg1); | |
2504 | ||
ffb99bfe | 2505 | /* In case both args are comparisons but with different comparison |
2506 | code, try to swap the comparison operands of one arg to produce | |
2507 | a match and compare that variant. */ | |
2508 | if (TREE_CODE (arg0) != TREE_CODE (arg1) | |
2509 | && COMPARISON_CLASS_P (arg0) | |
2510 | && COMPARISON_CLASS_P (arg1)) | |
2511 | { | |
2512 | enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1)); | |
2513 | ||
2514 | if (TREE_CODE (arg0) == swap_code) | |
2515 | return operand_equal_p (TREE_OPERAND (arg0, 0), | |
2516 | TREE_OPERAND (arg1, 1), flags) | |
2517 | && operand_equal_p (TREE_OPERAND (arg0, 1), | |
2518 | TREE_OPERAND (arg1, 0), flags); | |
2519 | } | |
2520 | ||
8faaadf1 | 2521 | if (TREE_CODE (arg0) != TREE_CODE (arg1) |
2522 | /* This is needed for conversions and for COMPONENT_REF. | |
2523 | Might as well play it safe and always test this. */ | |
6a4737bf | 2524 | || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK |
2525 | || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK | |
8faaadf1 | 2526 | || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))) |
2bc77e10 | 2527 | return 0; |
2528 | ||
8faaadf1 | 2529 | /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal. |
2530 | We don't care about side effects in that case because the SAVE_EXPR | |
2531 | takes care of that for us. In all other cases, two expressions are | |
2532 | equal if they have no side effects. If we have two identical | |
2533 | expressions with side effects that should be treated the same due | |
2534 | to the only side effects being identical SAVE_EXPR's, that will | |
2535 | be detected in the recursive calls below. */ | |
4ee9c684 | 2536 | if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST) |
8faaadf1 | 2537 | && (TREE_CODE (arg0) == SAVE_EXPR |
2538 | || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1)))) | |
2bc77e10 | 2539 | return 1; |
2540 | ||
8faaadf1 | 2541 | /* Next handle constant cases, those for which we can return 1 even |
2542 | if ONLY_CONST is set. */ | |
2543 | if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)) | |
2544 | switch (TREE_CODE (arg0)) | |
2545 | { | |
2546 | case INTEGER_CST: | |
d3041b98 | 2547 | return (! TREE_CONSTANT_OVERFLOW (arg0) |
2548 | && ! TREE_CONSTANT_OVERFLOW (arg1) | |
a0c2c45b | 2549 | && tree_int_cst_equal (arg0, arg1)); |
8faaadf1 | 2550 | |
2551 | case REAL_CST: | |
d3041b98 | 2552 | return (! TREE_CONSTANT_OVERFLOW (arg0) |
2553 | && ! TREE_CONSTANT_OVERFLOW (arg1) | |
62aa7862 | 2554 | && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0), |
2555 | TREE_REAL_CST (arg1))); | |
8faaadf1 | 2556 | |
886cfd4f | 2557 | case VECTOR_CST: |
2558 | { | |
2559 | tree v1, v2; | |
2560 | ||
2561 | if (TREE_CONSTANT_OVERFLOW (arg0) | |
2562 | || TREE_CONSTANT_OVERFLOW (arg1)) | |
2563 | return 0; | |
2564 | ||
2565 | v1 = TREE_VECTOR_CST_ELTS (arg0); | |
2566 | v2 = TREE_VECTOR_CST_ELTS (arg1); | |
2567 | while (v1 && v2) | |
2568 | { | |
11cb6006 | 2569 | if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2), |
4ee9c684 | 2570 | flags)) |
886cfd4f | 2571 | return 0; |
2572 | v1 = TREE_CHAIN (v1); | |
2573 | v2 = TREE_CHAIN (v2); | |
2574 | } | |
2575 | ||
6349b545 | 2576 | return v1 == v2; |
886cfd4f | 2577 | } |
2578 | ||
8faaadf1 | 2579 | case COMPLEX_CST: |
2580 | return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1), | |
4ee9c684 | 2581 | flags) |
8faaadf1 | 2582 | && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1), |
4ee9c684 | 2583 | flags)); |
8faaadf1 | 2584 | |
2585 | case STRING_CST: | |
2586 | return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1) | |
6b918462 | 2587 | && ! memcmp (TREE_STRING_POINTER (arg0), |
8faaadf1 | 2588 | TREE_STRING_POINTER (arg1), |
2589 | TREE_STRING_LENGTH (arg0))); | |
2590 | ||
2591 | case ADDR_EXPR: | |
2592 | return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), | |
2593 | 0); | |
0dbd1c74 | 2594 | default: |
2595 | break; | |
8faaadf1 | 2596 | } |
2bc77e10 | 2597 | |
4ee9c684 | 2598 | if (flags & OEP_ONLY_CONST) |
2bc77e10 | 2599 | return 0; |
2600 | ||
66bab57a | 2601 | /* Define macros to test an operand from arg0 and arg1 for equality and a |
fa31fec1 | 2602 | variant that allows null and views null as being different from any |
2603 | non-null value. In the latter case, if either is null, the both | |
2604 | must be; otherwise, do the normal comparison. */ | |
2605 | #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \ | |
2606 | TREE_OPERAND (arg1, N), flags) | |
2607 | ||
2608 | #define OP_SAME_WITH_NULL(N) \ | |
2609 | ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \ | |
2610 | ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N)) | |
2611 | ||
2bc77e10 | 2612 | switch (TREE_CODE_CLASS (TREE_CODE (arg0))) |
2613 | { | |
ce45a448 | 2614 | case tcc_unary: |
2bc77e10 | 2615 | /* Two conversions are equal only if signedness and modes match. */ |
e6546627 | 2616 | switch (TREE_CODE (arg0)) |
2617 | { | |
2618 | case NOP_EXPR: | |
2619 | case CONVERT_EXPR: | |
2620 | case FIX_CEIL_EXPR: | |
2621 | case FIX_TRUNC_EXPR: | |
2622 | case FIX_FLOOR_EXPR: | |
2623 | case FIX_ROUND_EXPR: | |
2624 | if (TYPE_UNSIGNED (TREE_TYPE (arg0)) | |
2625 | != TYPE_UNSIGNED (TREE_TYPE (arg1))) | |
2626 | return 0; | |
2627 | break; | |
2628 | default: | |
2629 | break; | |
2630 | } | |
2bc77e10 | 2631 | |
fa31fec1 | 2632 | return OP_SAME (0); |
2633 | ||
2bc77e10 | 2634 | |
ce45a448 | 2635 | case tcc_comparison: |
2636 | case tcc_binary: | |
fa31fec1 | 2637 | if (OP_SAME (0) && OP_SAME (1)) |
8faaadf1 | 2638 | return 1; |
2639 | ||
2640 | /* For commutative ops, allow the other order. */ | |
21dff555 | 2641 | return (commutative_tree_code (TREE_CODE (arg0)) |
8faaadf1 | 2642 | && operand_equal_p (TREE_OPERAND (arg0, 0), |
4ee9c684 | 2643 | TREE_OPERAND (arg1, 1), flags) |
2bc77e10 | 2644 | && operand_equal_p (TREE_OPERAND (arg0, 1), |
4ee9c684 | 2645 | TREE_OPERAND (arg1, 0), flags)); |
2bc77e10 | 2646 | |
ce45a448 | 2647 | case tcc_reference: |
06506f5d | 2648 | /* If either of the pointer (or reference) expressions we are |
2649 | dereferencing contain a side effect, these cannot be equal. */ | |
dbc71562 | 2650 | if (TREE_SIDE_EFFECTS (arg0) |
2651 | || TREE_SIDE_EFFECTS (arg1)) | |
2652 | return 0; | |
2653 | ||
2bc77e10 | 2654 | switch (TREE_CODE (arg0)) |
2655 | { | |
2656 | case INDIRECT_REF: | |
b056d812 | 2657 | case ALIGN_INDIRECT_REF: |
2658 | case MISALIGNED_INDIRECT_REF: | |
b25de375 | 2659 | case REALPART_EXPR: |
2660 | case IMAGPART_EXPR: | |
fa31fec1 | 2661 | return OP_SAME (0); |
2bc77e10 | 2662 | |
2bc77e10 | 2663 | case ARRAY_REF: |
ba04d9d5 | 2664 | case ARRAY_RANGE_REF: |
fa31fec1 | 2665 | /* Operands 2 and 3 may be null. */ |
2666 | return (OP_SAME (0) | |
2667 | && OP_SAME (1) | |
2668 | && OP_SAME_WITH_NULL (2) | |
2669 | && OP_SAME_WITH_NULL (3)); | |
6ab43650 | 2670 | |
2671 | case COMPONENT_REF: | |
2f16183e | 2672 | /* Handle operand 2 the same as for ARRAY_REF. Operand 0 |
2673 | may be NULL when we're called to compare MEM_EXPRs. */ | |
2674 | return OP_SAME_WITH_NULL (0) | |
2675 | && OP_SAME (1) | |
2676 | && OP_SAME_WITH_NULL (2); | |
8d061c60 | 2677 | |
e715d92e | 2678 | case BIT_FIELD_REF: |
fa31fec1 | 2679 | return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); |
2680 | ||
0dbd1c74 | 2681 | default: |
2682 | return 0; | |
2bc77e10 | 2683 | } |
1d322a97 | 2684 | |
ce45a448 | 2685 | case tcc_expression: |
564989a5 | 2686 | switch (TREE_CODE (arg0)) |
2687 | { | |
2688 | case ADDR_EXPR: | |
2689 | case TRUTH_NOT_EXPR: | |
fa31fec1 | 2690 | return OP_SAME (0); |
564989a5 | 2691 | |
bd975dc2 | 2692 | case TRUTH_ANDIF_EXPR: |
2693 | case TRUTH_ORIF_EXPR: | |
fa31fec1 | 2694 | return OP_SAME (0) && OP_SAME (1); |
bd975dc2 | 2695 | |
2696 | case TRUTH_AND_EXPR: | |
2697 | case TRUTH_OR_EXPR: | |
2698 | case TRUTH_XOR_EXPR: | |
fa31fec1 | 2699 | if (OP_SAME (0) && OP_SAME (1)) |
2700 | return 1; | |
2701 | ||
2702 | /* Otherwise take into account this is a commutative operation. */ | |
bd975dc2 | 2703 | return (operand_equal_p (TREE_OPERAND (arg0, 0), |
fa31fec1 | 2704 | TREE_OPERAND (arg1, 1), flags) |
bd975dc2 | 2705 | && operand_equal_p (TREE_OPERAND (arg0, 1), |
fa31fec1 | 2706 | TREE_OPERAND (arg1, 0), flags)); |
bd975dc2 | 2707 | |
06506f5d | 2708 | case CALL_EXPR: |
2709 | /* If the CALL_EXPRs call different functions, then they | |
2710 | clearly can not be equal. */ | |
fa31fec1 | 2711 | if (!OP_SAME (0)) |
06506f5d | 2712 | return 0; |
2713 | ||
4ee9c684 | 2714 | { |
2715 | unsigned int cef = call_expr_flags (arg0); | |
2716 | if (flags & OEP_PURE_SAME) | |
2717 | cef &= ECF_CONST | ECF_PURE; | |
2718 | else | |
2719 | cef &= ECF_CONST; | |
2720 | if (!cef) | |
2721 | return 0; | |
2722 | } | |
06506f5d | 2723 | |
2724 | /* Now see if all the arguments are the same. operand_equal_p | |
2725 | does not handle TREE_LIST, so we walk the operands here | |
2726 | feeding them to operand_equal_p. */ | |
2727 | arg0 = TREE_OPERAND (arg0, 1); | |
2728 | arg1 = TREE_OPERAND (arg1, 1); | |
2729 | while (arg0 && arg1) | |
2730 | { | |
4ee9c684 | 2731 | if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), |
2732 | flags)) | |
06506f5d | 2733 | return 0; |
2734 | ||
2735 | arg0 = TREE_CHAIN (arg0); | |
2736 | arg1 = TREE_CHAIN (arg1); | |
2737 | } | |
2738 | ||
2739 | /* If we get here and both argument lists are exhausted | |
2740 | then the CALL_EXPRs are equal. */ | |
2741 | return ! (arg0 || arg1); | |
2742 | ||
564989a5 | 2743 | default: |
2744 | return 0; | |
2745 | } | |
cc049fa3 | 2746 | |
ce45a448 | 2747 | case tcc_declaration: |
4ee9c684 | 2748 | /* Consider __builtin_sqrt equal to sqrt. */ |
2749 | return (TREE_CODE (arg0) == FUNCTION_DECL | |
2750 | && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1) | |
2751 | && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1) | |
2752 | && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1)); | |
06506f5d | 2753 | |
0dbd1c74 | 2754 | default: |
2755 | return 0; | |
2bc77e10 | 2756 | } |
fa31fec1 | 2757 | |
2758 | #undef OP_SAME | |
2759 | #undef OP_SAME_WITH_NULL | |
2bc77e10 | 2760 | } |
e233264a | 2761 | \f |
2762 | /* Similar to operand_equal_p, but see if ARG0 might have been made by | |
cc049fa3 | 2763 | shorten_compare from ARG1 when ARG1 was being compared with OTHER. |
2bc77e10 | 2764 | |
2bc77e10 | 2765 | When in doubt, return 0. */ |
2766 | ||
cc049fa3 | 2767 | static int |
de1b648b | 2768 | operand_equal_for_comparison_p (tree arg0, tree arg1, tree other) |
2bc77e10 | 2769 | { |
e233264a | 2770 | int unsignedp1, unsignedpo; |
df7caa7b | 2771 | tree primarg0, primarg1, primother; |
02e7a332 | 2772 | unsigned int correct_width; |
2bc77e10 | 2773 | |
e233264a | 2774 | if (operand_equal_p (arg0, arg1, 0)) |
2bc77e10 | 2775 | return 1; |
2776 | ||
154e6f12 | 2777 | if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)) |
2778 | || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1))) | |
2bc77e10 | 2779 | return 0; |
2780 | ||
df7caa7b | 2781 | /* Discard any conversions that don't change the modes of ARG0 and ARG1 |
2782 | and see if the inner values are the same. This removes any | |
2783 | signedness comparison, which doesn't matter here. */ | |
2784 | primarg0 = arg0, primarg1 = arg1; | |
cc049fa3 | 2785 | STRIP_NOPS (primarg0); |
2786 | STRIP_NOPS (primarg1); | |
df7caa7b | 2787 | if (operand_equal_p (primarg0, primarg1, 0)) |
2788 | return 1; | |
2789 | ||
e233264a | 2790 | /* Duplicate what shorten_compare does to ARG1 and see if that gives the |
2791 | actual comparison operand, ARG0. | |
2bc77e10 | 2792 | |
e233264a | 2793 | First throw away any conversions to wider types |
2bc77e10 | 2794 | already present in the operands. */ |
2bc77e10 | 2795 | |
e233264a | 2796 | primarg1 = get_narrower (arg1, &unsignedp1); |
2797 | primother = get_narrower (other, &unsignedpo); | |
2798 | ||
2799 | correct_width = TYPE_PRECISION (TREE_TYPE (arg1)); | |
2800 | if (unsignedp1 == unsignedpo | |
2801 | && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width | |
2802 | && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width) | |
2bc77e10 | 2803 | { |
e233264a | 2804 | tree type = TREE_TYPE (arg0); |
2bc77e10 | 2805 | |
2806 | /* Make sure shorter operand is extended the right way | |
2807 | to match the longer operand. */ | |
fa8b888f | 2808 | primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type |
b30e3dbc | 2809 | (unsignedp1, TREE_TYPE (primarg1)), primarg1); |
2bc77e10 | 2810 | |
b30e3dbc | 2811 | if (operand_equal_p (arg0, fold_convert (type, primarg1), 0)) |
2bc77e10 | 2812 | return 1; |
2813 | } | |
2814 | ||
2815 | return 0; | |
2816 | } | |
2817 | \f | |
eb2f80f3 | 2818 | /* See if ARG is an expression that is either a comparison or is performing |
e233264a | 2819 | arithmetic on comparisons. The comparisons must only be comparing |
2820 | two different values, which will be stored in *CVAL1 and *CVAL2; if | |
6ef828f9 | 2821 | they are nonzero it means that some operands have already been found. |
e233264a | 2822 | No variables may be used anywhere else in the expression except in the |
d0314131 | 2823 | comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around |
2824 | the expression and save_expr needs to be called with CVAL1 and CVAL2. | |
e233264a | 2825 | |
2826 | If this is true, return 1. Otherwise, return zero. */ | |
2827 | ||
2828 | static int | |
de1b648b | 2829 | twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p) |
e233264a | 2830 | { |
2831 | enum tree_code code = TREE_CODE (arg); | |
ce45a448 | 2832 | enum tree_code_class class = TREE_CODE_CLASS (code); |
e233264a | 2833 | |
ce45a448 | 2834 | /* We can handle some of the tcc_expression cases here. */ |
2835 | if (class == tcc_expression && code == TRUTH_NOT_EXPR) | |
2836 | class = tcc_unary; | |
2837 | else if (class == tcc_expression | |
e233264a | 2838 | && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR |
2839 | || code == COMPOUND_EXPR)) | |
ce45a448 | 2840 | class = tcc_binary; |
8be91fe5 | 2841 | |
ce45a448 | 2842 | else if (class == tcc_expression && code == SAVE_EXPR |
083a2b5e | 2843 | && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0))) |
d0314131 | 2844 | { |
2845 | /* If we've already found a CVAL1 or CVAL2, this expression is | |
2846 | two complex to handle. */ | |
2847 | if (*cval1 || *cval2) | |
2848 | return 0; | |
2849 | ||
ce45a448 | 2850 | class = tcc_unary; |
d0314131 | 2851 | *save_p = 1; |
2852 | } | |
e233264a | 2853 | |
2854 | switch (class) | |
2855 | { | |
ce45a448 | 2856 | case tcc_unary: |
d0314131 | 2857 | return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p); |
e233264a | 2858 | |
ce45a448 | 2859 | case tcc_binary: |
d0314131 | 2860 | return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p) |
2861 | && twoval_comparison_p (TREE_OPERAND (arg, 1), | |
2862 | cval1, cval2, save_p)); | |
e233264a | 2863 | |
ce45a448 | 2864 | case tcc_constant: |
e233264a | 2865 | return 1; |
2866 | ||
ce45a448 | 2867 | case tcc_expression: |
e233264a | 2868 | if (code == COND_EXPR) |
d0314131 | 2869 | return (twoval_comparison_p (TREE_OPERAND (arg, 0), |
2870 | cval1, cval2, save_p) | |
2871 | && twoval_comparison_p (TREE_OPERAND (arg, 1), | |
2872 | cval1, cval2, save_p) | |
e233264a | 2873 | && twoval_comparison_p (TREE_OPERAND (arg, 2), |
d0314131 | 2874 | cval1, cval2, save_p)); |
e233264a | 2875 | return 0; |
cc049fa3 | 2876 | |
ce45a448 | 2877 | case tcc_comparison: |
e233264a | 2878 | /* First see if we can handle the first operand, then the second. For |
2879 | the second operand, we know *CVAL1 can't be zero. It must be that | |
2880 | one side of the comparison is each of the values; test for the | |
2881 | case where this isn't true by failing if the two operands | |
2882 | are the same. */ | |
2883 | ||
2884 | if (operand_equal_p (TREE_OPERAND (arg, 0), | |
2885 | TREE_OPERAND (arg, 1), 0)) | |
2886 | return 0; | |
2887 | ||
2888 | if (*cval1 == 0) | |
2889 | *cval1 = TREE_OPERAND (arg, 0); | |
2890 | else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0)) | |
2891 | ; | |
2892 | else if (*cval2 == 0) | |
2893 | *cval2 = TREE_OPERAND (arg, 0); | |
2894 | else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0)) | |
2895 | ; | |
2896 | else | |
2897 | return 0; | |
2898 | ||
2899 | if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0)) | |
2900 | ; | |
2901 | else if (*cval2 == 0) | |
2902 | *cval2 = TREE_OPERAND (arg, 1); | |
2903 | else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0)) | |
2904 | ; | |
2905 | else | |
2906 | return 0; | |
2907 | ||
2908 | return 1; | |
e233264a | 2909 | |
0dbd1c74 | 2910 | default: |
2911 | return 0; | |
2912 | } | |
e233264a | 2913 | } |
2914 | \f | |
2915 | /* ARG is a tree that is known to contain just arithmetic operations and | |
2916 | comparisons. Evaluate the operations in the tree substituting NEW0 for | |
eb2f80f3 | 2917 | any occurrence of OLD0 as an operand of a comparison and likewise for |
e233264a | 2918 | NEW1 and OLD1. */ |
2919 | ||
2920 | static tree | |
de1b648b | 2921 | eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1) |
e233264a | 2922 | { |
2923 | tree type = TREE_TYPE (arg); | |
2924 | enum tree_code code = TREE_CODE (arg); | |
ce45a448 | 2925 | enum tree_code_class class = TREE_CODE_CLASS (code); |
e233264a | 2926 | |
ce45a448 | 2927 | /* We can handle some of the tcc_expression cases here. */ |
2928 | if (class == tcc_expression && code == TRUTH_NOT_EXPR) | |
2929 | class = tcc_unary; | |
2930 | else if (class == tcc_expression | |
e233264a | 2931 | && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) |
ce45a448 | 2932 | class = tcc_binary; |
e233264a | 2933 | |
2934 | switch (class) | |
2935 | { | |
ce45a448 | 2936 | case tcc_unary: |
7ab7fd4f | 2937 | return fold_build1 (code, type, |
2938 | eval_subst (TREE_OPERAND (arg, 0), | |
2939 | old0, new0, old1, new1)); | |
e233264a | 2940 | |
ce45a448 | 2941 | case tcc_binary: |
7ab7fd4f | 2942 | return fold_build2 (code, type, |
2943 | eval_subst (TREE_OPERAND (arg, 0), | |
2944 | old0, new0, old1, new1), | |
2945 | eval_subst (TREE_OPERAND (arg, 1), | |
2946 | old0, new0, old1, new1)); | |
e233264a | 2947 | |
ce45a448 | 2948 | case tcc_expression: |
e233264a | 2949 | switch (code) |
2950 | { | |
2951 | case SAVE_EXPR: | |
2952 | return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1); | |
2953 | ||
2954 | case COMPOUND_EXPR: | |
2955 | return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1); | |
2956 | ||
2957 | case COND_EXPR: | |
7ab7fd4f | 2958 | return fold_build3 (code, type, |
2959 | eval_subst (TREE_OPERAND (arg, 0), | |
2960 | old0, new0, old1, new1), | |
2961 | eval_subst (TREE_OPERAND (arg, 1), | |
2962 | old0, new0, old1, new1), | |
2963 | eval_subst (TREE_OPERAND (arg, 2), | |
2964 | old0, new0, old1, new1)); | |
0dbd1c74 | 2965 | default: |
2966 | break; | |
e233264a | 2967 | } |
b4b174c3 | 2968 | /* Fall through - ??? */ |
e233264a | 2969 | |
ce45a448 | 2970 | case tcc_comparison: |
e233264a | 2971 | { |
2972 | tree arg0 = TREE_OPERAND (arg, 0); | |
2973 | tree arg1 = TREE_OPERAND (arg, 1); | |
2974 | ||
2975 | /* We need to check both for exact equality and tree equality. The | |
2976 | former will be true if the operand has a side-effect. In that | |
2977 | case, we know the operand occurred exactly once. */ | |
2978 | ||
2979 | if (arg0 == old0 || operand_equal_p (arg0, old0, 0)) | |
2980 | arg0 = new0; | |
2981 | else if (arg0 == old1 || operand_equal_p (arg0, old1, 0)) | |
2982 | arg0 = new1; | |
2983 | ||
2984 | if (arg1 == old0 || operand_equal_p (arg1, old0, 0)) | |
2985 | arg1 = new0; | |
2986 | else if (arg1 == old1 || operand_equal_p (arg1, old1, 0)) | |
2987 | arg1 = new1; | |
2988 | ||
7ab7fd4f | 2989 | return fold_build2 (code, type, arg0, arg1); |
e233264a | 2990 | } |
e233264a | 2991 | |
0dbd1c74 | 2992 | default: |
2993 | return arg; | |
2994 | } | |
e233264a | 2995 | } |
2996 | \f | |
2bc77e10 | 2997 | /* Return a tree for the case when the result of an expression is RESULT |
2998 | converted to TYPE and OMITTED was previously an operand of the expression | |
2999 | but is now not needed (e.g., we folded OMITTED * 0). | |
3000 | ||
3001 | If OMITTED has side effects, we must evaluate it. Otherwise, just do | |
3002 | the conversion of RESULT to TYPE. */ | |
3003 | ||
e9f80ff5 | 3004 | tree |
de1b648b | 3005 | omit_one_operand (tree type, tree result, tree omitted) |
2bc77e10 | 3006 | { |
b30e3dbc | 3007 | tree t = fold_convert (type, result); |
2bc77e10 | 3008 | |
3009 | if (TREE_SIDE_EFFECTS (omitted)) | |
db97ad41 | 3010 | return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t); |
2bc77e10 | 3011 | |
c3ce5d04 | 3012 | return non_lvalue (t); |
2bc77e10 | 3013 | } |
6df5edfa | 3014 | |
3015 | /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */ | |
3016 | ||
3017 | static tree | |
de1b648b | 3018 | pedantic_omit_one_operand (tree type, tree result, tree omitted) |
6df5edfa | 3019 | { |
b30e3dbc | 3020 | tree t = fold_convert (type, result); |
6df5edfa | 3021 | |
3022 | if (TREE_SIDE_EFFECTS (omitted)) | |
db97ad41 | 3023 | return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t); |
6df5edfa | 3024 | |
3025 | return pedantic_non_lvalue (t); | |
3026 | } | |
9bc9f15f | 3027 | |
3028 | /* Return a tree for the case when the result of an expression is RESULT | |
3029 | converted to TYPE and OMITTED1 and OMITTED2 were previously operands | |
3030 | of the expression but are now not needed. | |
3031 | ||
3032 | If OMITTED1 or OMITTED2 has side effects, they must be evaluated. | |
3033 | If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is | |
3034 | evaluated before OMITTED2. Otherwise, if neither has side effects, | |
3035 | just do the conversion of RESULT to TYPE. */ | |
3036 | ||
3037 | tree | |
3038 | omit_two_operands (tree type, tree result, tree omitted1, tree omitted2) | |
3039 | { | |
3040 | tree t = fold_convert (type, result); | |
3041 | ||
3042 | if (TREE_SIDE_EFFECTS (omitted2)) | |
3043 | t = build2 (COMPOUND_EXPR, type, omitted2, t); | |
3044 | if (TREE_SIDE_EFFECTS (omitted1)) | |
3045 | t = build2 (COMPOUND_EXPR, type, omitted1, t); | |
3046 | ||
3047 | return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t; | |
3048 | } | |
3049 | ||
2bc77e10 | 3050 | \f |
46b0e007 | 3051 | /* Return a simplified tree node for the truth-negation of ARG. This |
3052 | never alters ARG itself. We assume that ARG is an operation that | |
318a728f | 3053 | returns a truth value (0 or 1). |
2bc77e10 | 3054 | |
318a728f | 3055 | FIXME: one would think we would fold the result, but it causes |
3056 | problems with the dominator optimizer. */ | |
6758b11c | 3057 | |
2bc77e10 | 3058 | tree |
6758b11c | 3059 | fold_truth_not_expr (tree arg) |
2bc77e10 | 3060 | { |
3061 | tree type = TREE_TYPE (arg); | |
e233264a | 3062 | enum tree_code code = TREE_CODE (arg); |
2bc77e10 | 3063 | |
e233264a | 3064 | /* If this is a comparison, we can simply invert it, except for |
3065 | floating-point non-equality comparisons, in which case we just | |
3066 | enclose a TRUTH_NOT_EXPR around what we have. */ | |
2bc77e10 | 3067 | |
ce45a448 | 3068 | if (TREE_CODE_CLASS (code) == tcc_comparison) |
2bc77e10 | 3069 | { |
318a728f | 3070 | tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0)); |
3071 | if (FLOAT_TYPE_P (op_type) | |
3072 | && flag_trapping_math | |
3073 | && code != ORDERED_EXPR && code != UNORDERED_EXPR | |
3074 | && code != NE_EXPR && code != EQ_EXPR) | |
6758b11c | 3075 | return NULL_TREE; |
e233264a | 3076 | else |
318a728f | 3077 | { |
3078 | code = invert_tree_comparison (code, | |
3079 | HONOR_NANS (TYPE_MODE (op_type))); | |
3080 | if (code == ERROR_MARK) | |
6758b11c | 3081 | return NULL_TREE; |
318a728f | 3082 | else |
3083 | return build2 (code, type, | |
3084 | TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1)); | |
3085 | } | |
e233264a | 3086 | } |
2bc77e10 | 3087 | |
e233264a | 3088 | switch (code) |
3089 | { | |
2bc77e10 | 3090 | case INTEGER_CST: |
b7f352d5 | 3091 | return constant_boolean_node (integer_zerop (arg), type); |
2bc77e10 | 3092 | |
3093 | case TRUTH_AND_EXPR: | |
fd96eeef | 3094 | return build2 (TRUTH_OR_EXPR, type, |
3095 | invert_truthvalue (TREE_OPERAND (arg, 0)), | |
3096 | invert_truthvalue (TREE_OPERAND (arg, 1))); | |
2bc77e10 | 3097 | |
3098 | case TRUTH_OR_EXPR: | |
fd96eeef | 3099 | return build2 (TRUTH_AND_EXPR, type, |
3100 | invert_truthvalue (TREE_OPERAND (arg, 0)), | |
3101 | invert_truthvalue (TREE_OPERAND (arg, 1))); | |
2bc77e10 | 3102 | |
9a7b73a1 | 3103 | case TRUTH_XOR_EXPR: |
3104 | /* Here we can invert either operand. We invert the first operand | |
3105 | unless the second operand is a TRUTH_NOT_EXPR in which case our | |
3106 | result is the XOR of the first operand with the inside of the | |
3107 | negation of the second operand. */ | |
3108 | ||
3109 | if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR) | |
fd96eeef | 3110 | return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0), |
3111 | TREE_OPERAND (TREE_OPERAND (arg, 1), 0)); | |
9a7b73a1 | 3112 | else |
fd96eeef | 3113 | return build2 (TRUTH_XOR_EXPR, type, |
3114 | invert_truthvalue (TREE_OPERAND (arg, 0)), | |
3115 | TREE_OPERAND (arg, 1)); | |
9a7b73a1 | 3116 | |
2bc77e10 | 3117 | case TRUTH_ANDIF_EXPR: |
fd96eeef | 3118 | return build2 (TRUTH_ORIF_EXPR, type, |
3119 | invert_truthvalue (TREE_OPERAND (arg, 0)), | |
3120 | invert_truthvalue (TREE_OPERAND (arg, 1))); | |
2bc77e10 | 3121 | |
3122 | case TRUTH_ORIF_EXPR: | |
fd96eeef | 3123 | return build2 (TRUTH_ANDIF_EXPR, type, |
3124 | invert_truthvalue (TREE_OPERAND (arg, 0)), | |
3125 | invert_truthvalue (TREE_OPERAND (arg, 1))); | |
2bc77e10 | 3126 | |
3127 | case TRUTH_NOT_EXPR: | |
3128 | return TREE_OPERAND (arg, 0); | |
3129 | ||
3130 | case COND_EXPR: | |
76ce1401 | 3131 | { |
3132 | tree arg1 = TREE_OPERAND (arg, 1); | |
3133 | tree arg2 = TREE_OPERAND (arg, 2); | |
3134 | /* A COND_EXPR may have a throw as one operand, which | |
3135 | then has void type. Just leave void operands | |
3136 | as they are. */ | |
3137 | return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0), | |
3138 | VOID_TYPE_P (TREE_TYPE (arg1)) | |
3139 | ? arg1 : invert_truthvalue (arg1), | |
3140 | VOID_TYPE_P (TREE_TYPE (arg2)) | |
3141 | ? arg2 : invert_truthvalue (arg2)); | |
3142 | } | |
2bc77e10 | 3143 | |
3139f3ce | 3144 | case COMPOUND_EXPR: |
fd96eeef | 3145 | return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0), |
3146 | invert_truthvalue (TREE_OPERAND (arg, 1))); | |
3139f3ce | 3147 | |
2bc77e10 | 3148 | case NON_LVALUE_EXPR: |
3149 | return invert_truthvalue (TREE_OPERAND (arg, 0)); | |
3150 | ||
3151 | case NOP_EXPR: | |
4ee9c684 | 3152 | if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE) |
6758b11c | 3153 | return build1 (TRUTH_NOT_EXPR, type, arg); |
4ee9c684 | 3154 | |
2bc77e10 | 3155 | case CONVERT_EXPR: |
3156 | case FLOAT_EXPR: | |
3157 | return build1 (TREE_CODE (arg), type, | |
3158 | invert_truthvalue (TREE_OPERAND (arg, 0))); | |
3159 | ||
3160 | case BIT_AND_EXPR: | |
c35387e1 | 3161 | if (!integer_onep (TREE_OPERAND (arg, 1))) |
3162 | break; | |
fd96eeef | 3163 | return build2 (EQ_EXPR, type, arg, |
3c6185f1 | 3164 | build_int_cst (type, 0)); |
2bc77e10 | 3165 | |
468d693c | 3166 | case SAVE_EXPR: |
3167 | return build1 (TRUTH_NOT_EXPR, type, arg); | |
f33c3a83 | 3168 | |
3169 | case CLEANUP_POINT_EXPR: | |
3170 | return build1 (CLEANUP_POINT_EXPR, type, | |
3171 | invert_truthvalue (TREE_OPERAND (arg, 0))); | |
0dbd1c74 | 3172 | |
3173 | default: | |
3174 | break; | |
c35387e1 | 3175 | } |
6758b11c | 3176 | |
3177 | return NULL_TREE; | |
3178 | } | |
3179 | ||
3180 | /* Return a simplified tree node for the truth-negation of ARG. This | |
3181 | never alters ARG itself. We assume that ARG is an operation that | |
3182 | returns a truth value (0 or 1). | |
3183 | ||
3184 | FIXME: one would think we would fold the result, but it causes | |
3185 | problems with the dominator optimizer. */ | |
3186 | ||
3187 | tree | |
3188 | invert_truthvalue (tree arg) | |
3189 | { | |
3190 | tree tem; | |
3191 | ||
3192 | if (TREE_CODE (arg) == ERROR_MARK) | |
3193 | return arg; | |
3194 | ||
3195 | tem = fold_truth_not_expr (arg); | |
3196 | if (!tem) | |
3197 | tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg); | |
3198 | ||
3199 | return tem; | |
2bc77e10 | 3200 | } |
3201 | ||
3202 | /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both | |
3203 | operands are another bit-wise operation with a common input. If so, | |
3204 | distribute the bit operations to save an operation and possibly two if | |
3205 | constants are involved. For example, convert | |
de1b648b | 3206 | (A | B) & (A | C) into A | (B & C) |
2bc77e10 | 3207 | Further simplification will occur if B and C are constants. |
3208 | ||
3209 | If this optimization cannot be done, 0 will be returned. */ | |
3210 | ||
3211 | static tree | |
de1b648b | 3212 | distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1) |
2bc77e10 | 3213 | { |
3214 | tree common; | |
3215 | tree left, right; | |
3216 | ||
3217 | if (TREE_CODE (arg0) != TREE_CODE (arg1) | |
3218 | || TREE_CODE (arg0) == code | |
5b1de181 | 3219 | || (TREE_CODE (arg0) != BIT_AND_EXPR |
3220 | && TREE_CODE (arg0) != BIT_IOR_EXPR)) | |
2bc77e10 | 3221 | return 0; |
3222 | ||
3223 | if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)) | |
3224 | { | |
3225 | common = TREE_OPERAND (arg0, 0); | |
3226 | left = TREE_OPERAND (arg0, 1); | |
3227 | right = TREE_OPERAND (arg1, 1); | |
3228 | } | |
3229 | else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0)) | |
3230 | { | |
3231 | common = TREE_OPERAND (arg0, 0); | |
3232 | left = TREE_OPERAND (arg0, 1); | |
3233 | right = TREE_OPERAND (arg1, 0); | |
3234 | } | |
3235 | else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0)) | |
3236 | { | |
3237 | common = TREE_OPERAND (arg0, 1); | |
3238 | left = TREE_OPERAND (arg0, 0); | |
3239 | right = TREE_OPERAND (arg1, 1); | |
3240 | } | |
3241 | else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0)) | |
3242 | { | |
3243 | common = TREE_OPERAND (arg0, 1); | |
3244 | left = TREE_OPERAND (arg0, 0); | |
3245 | right = TREE_OPERAND (arg1, 0); | |
3246 | } | |
3247 | else | |
3248 | return 0; | |
3249 | ||
7ab7fd4f | 3250 | return fold_build2 (TREE_CODE (arg0), type, common, |
3251 | fold_build2 (code, type, left, right)); | |
2bc77e10 | 3252 | } |
429f2f90 | 3253 | |
3254 | /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation | |
3255 | with code CODE. This optimization is unsafe. */ | |
3256 | static tree | |
3257 | distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1) | |
3258 | { | |
3259 | bool mul0 = TREE_CODE (arg0) == MULT_EXPR; | |
3260 | bool mul1 = TREE_CODE (arg1) == MULT_EXPR; | |
3261 | ||
3262 | /* (A / C) +- (B / C) -> (A +- B) / C. */ | |
3263 | if (mul0 == mul1 | |
3264 | && operand_equal_p (TREE_OPERAND (arg0, 1), | |
3265 | TREE_OPERAND (arg1, 1), 0)) | |
3266 | return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type, | |
3267 | fold_build2 (code, type, | |
3268 | TREE_OPERAND (arg0, 0), | |
3269 | TREE_OPERAND (arg1, 0)), | |
3270 | TREE_OPERAND (arg0, 1)); | |
3271 | ||
3272 | /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */ | |
3273 | if (operand_equal_p (TREE_OPERAND (arg0, 0), | |
3274 | TREE_OPERAND (arg1, 0), 0) | |
3275 | && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST | |
3276 | && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST) | |
3277 | { | |
3278 | REAL_VALUE_TYPE r0, r1; | |
3279 | r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1)); | |
3280 | r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1)); | |
3281 | if (!mul0) | |
3282 | real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0); | |
3283 | if (!mul1) | |
3284 | real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1); | |
3285 | real_arithmetic (&r0, code, &r0, &r1); | |
3286 | return fold_build2 (MULT_EXPR, type, | |
3287 | TREE_OPERAND (arg0, 0), | |
3288 | build_real (type, r0)); | |
3289 | } | |
3290 | ||
3291 | return NULL_TREE; | |
3292 | } | |
2bc77e10 | 3293 | \f |
3294 | /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER | |
6ef828f9 | 3295 | starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */ |
2bc77e10 | 3296 | |
3297 | static tree | |
dc81944a | 3298 | make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos, |
3299 | int unsignedp) | |
2bc77e10 | 3300 | { |
85453a57 | 3301 | tree result; |
3302 | ||
3303 | if (bitpos == 0) | |
3304 | { | |
3305 | tree size = TYPE_SIZE (TREE_TYPE (inner)); | |
3306 | if ((INTEGRAL_TYPE_P (TREE_TYPE (inner)) | |
3307 | || POINTER_TYPE_P (TREE_TYPE (inner))) | |
3308 | && host_integerp (size, 0) | |
3309 | && tree_low_cst (size, 0) == bitsize) | |
3310 | return fold_convert (type, inner); | |
3311 | } | |
3312 | ||
3313 | result = build3 (BIT_FIELD_REF, type, inner, | |
3314 | size_int (bitsize), bitsize_int (bitpos)); | |
2bc77e10 | 3315 | |
86ae60fd | 3316 | BIT_FIELD_REF_UNSIGNED (result) = unsignedp; |
2bc77e10 | 3317 | |
3318 | return result; | |
3319 | } | |
3320 | ||
3321 | /* Optimize a bit-field compare. | |
3322 | ||
3323 | There are two cases: First is a compare against a constant and the | |
3324 | second is a comparison of two items where the fields are at the same | |
3325 | bit position relative to the start of a chunk (byte, halfword, word) | |
3326 | large enough to contain it. In these cases we can avoid the shift | |
3327 | implicit in bitfield extractions. | |
3328 | ||
3329 | For constants, we emit a compare of the shifted constant with the | |
3330 | BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being | |
3331 | compared. For two fields at the same position, we do the ANDs with the | |
3332 | similar mask and compare the result of the ANDs. | |
3333 | ||
3334 | CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR. | |
3335 | COMPARE_TYPE is the type of the comparison, and LHS and RHS | |
3336 | are the left and right operands of the comparison, respectively. | |
3337 | ||
4bbea254 | 3338 | If the optimization described above can be done, we return the resulting |
2bc77e10 | 3339 | tree. Otherwise we return zero. */ |
3340 | ||
3341 | static tree | |
dc81944a | 3342 | optimize_bit_field_compare (enum tree_code code, tree compare_type, |
3343 | tree lhs, tree rhs) | |
2bc77e10 | 3344 | { |
02e7a332 | 3345 | HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize; |
2bc77e10 | 3346 | tree type = TREE_TYPE (lhs); |
3347 | tree signed_type, unsigned_type; | |
3348 | int const_p = TREE_CODE (rhs) == INTEGER_CST; | |
4d1060a2 | 3349 | enum machine_mode lmode, rmode, nmode; |
2bc77e10 | 3350 | int lunsignedp, runsignedp; |
3351 | int lvolatilep = 0, rvolatilep = 0; | |
93b6a460 | 3352 | tree linner, rinner = NULL_TREE; |
2bc77e10 | 3353 | tree mask; |
bbfbdece | 3354 | tree offset; |
2bc77e10 | 3355 | |
3356 | /* Get all the information about the extractions being done. If the bit size | |
3357 | if the same as the size of the underlying object, we aren't doing an | |
155b05dc | 3358 | extraction at all and so can do nothing. We also don't want to |
3359 | do anything if the inner expression is a PLACEHOLDER_EXPR since we | |
3360 | then will no longer be able to replace it. */ | |
bbfbdece | 3361 | linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode, |
e7e9416e | 3362 | &lunsignedp, &lvolatilep, false); |
f73497ef | 3363 | if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0 |
155b05dc | 3364 | || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR) |
2bc77e10 | 3365 | return 0; |
3366 | ||
3367 | if (!const_p) | |
3368 | { | |
3369 | /* If this is not a constant, we can only do something if bit positions, | |
1e625a2e | 3370 | sizes, and signedness are the same. */ |
417d3458 | 3371 | rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode, |
e7e9416e | 3372 | &runsignedp, &rvolatilep, false); |
2bc77e10 | 3373 | |
f73497ef | 3374 | if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize |
155b05dc | 3375 | || lunsignedp != runsignedp || offset != 0 |
3376 | || TREE_CODE (rinner) == PLACEHOLDER_EXPR) | |
2bc77e10 | 3377 | return 0; |
3378 | } | |
3379 | ||
3380 | /* See if we can find a mode to refer to this field. We should be able to, | |
3381 | but fail if we can't. */ | |
4d1060a2 | 3382 | nmode = get_best_mode (lbitsize, lbitpos, |
3383 | const_p ? TYPE_ALIGN (TREE_TYPE (linner)) | |
3384 | : MIN (TYPE_ALIGN (TREE_TYPE (linner)), | |
3385 | TYPE_ALIGN (TREE_TYPE (rinner))), | |
3386 | word_mode, lvolatilep || rvolatilep); | |
3387 | if (nmode == VOIDmode) | |
2bc77e10 | 3388 | return 0; |
3389 | ||
3390 | /* Set signed and unsigned types of the precision of this mode for the | |
3391 | shifts below. */ | |
fa8b888f | 3392 | signed_type = lang_hooks.types.type_for_mode (nmode, 0); |
3393 | unsigned_type = lang_hooks.types.type_for_mode (nmode, 1); | |
2bc77e10 | 3394 | |
2bc77e10 | 3395 | /* Compute the bit position and size for the new reference and our offset |
3396 | within it. If the new reference is the same size as the original, we | |
3397 | won't optimize anything, so return zero. */ | |
4d1060a2 | 3398 | nbitsize = GET_MODE_BITSIZE (nmode); |
3399 | nbitpos = lbitpos & ~ (nbitsize - 1); | |
3400 | lbitpos -= nbitpos; | |
3401 | if (nbitsize == lbitsize) | |
2bc77e10 | 3402 | return 0; |
3403 | ||
51356f86 | 3404 | if (BYTES_BIG_ENDIAN) |
4d1060a2 | 3405 | lbitpos = nbitsize - lbitsize - lbitpos; |
2bc77e10 | 3406 | |
3407 | /* Make the mask to be used against the extracted field. */ | |
7016c612 | 3408 | mask = build_int_cst (unsigned_type, -1); |
4d28c5d1 | 3409 | mask = force_fit_type (mask, 0, false, false); |
b30e3dbc | 3410 | mask = fold_convert (unsigned_type, mask); |
4d1060a2 | 3411 | mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0); |
2bc77e10 | 3412 | mask = const_binop (RSHIFT_EXPR, mask, |
4d1060a2 | 3413 | size_int (nbitsize - lbitsize - lbitpos), 0); |
2bc77e10 | 3414 | |
3415 | if (! const_p) | |
3416 | /* If not comparing with constant, just rework the comparison | |
3417 | and return. */ | |
fd96eeef | 3418 | return build2 (code, compare_type, |
3419 | build2 (BIT_AND_EXPR, unsigned_type, | |
3420 | make_bit_field_ref (linner, unsigned_type, | |
3421 | nbitsize, nbitpos, 1), | |
3422 | mask), | |
3423 | build2 (BIT_AND_EXPR, unsigned_type, | |
3424 | make_bit_field_ref (rinner, unsigned_type, | |
3425 | nbitsize, nbitpos, 1), | |
3426 | mask)); | |
2bc77e10 | 3427 | |
3428 | /* Otherwise, we are handling the constant case. See if the constant is too | |
3429 | big for the field. Warn and return a tree of for 0 (false) if so. We do | |
3430 | this not only for its own sake, but to avoid having to test for this | |
3431 | error case below. If we didn't, we might generate wrong code. | |
3432 | ||
3433 | For unsigned fields, the constant shifted right by the field length should | |
cc049fa3 | 3434 | be all zero. For signed fields, the high-order bits should agree with |
2bc77e10 | 3435 | the sign bit. */ |
3436 | ||
3437 | if (lunsignedp) | |
3438 | { | |
3439 | if (! integer_zerop (const_binop (RSHIFT_EXPR, | |
b30e3dbc | 3440 | fold_convert (unsigned_type, rhs), |
5485823f | 3441 | size_int (lbitsize), 0))) |
2bc77e10 | 3442 | { |
c3ceba8e | 3443 | warning (0, "comparison is always %d due to width of bit-field", |
be2828ce | 3444 | code == NE_EXPR); |
20783f07 | 3445 | return constant_boolean_node (code == NE_EXPR, compare_type); |
2bc77e10 | 3446 | } |
3447 | } | |
3448 | else | |
3449 | { | |
b30e3dbc | 3450 | tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs), |
5485823f | 3451 | size_int (lbitsize - 1), 0); |
2bc77e10 | 3452 | if (! integer_zerop (tem) && ! integer_all_onesp (tem)) |
3453 | { | |
c3ceba8e | 3454 | warning (0, "comparison is always %d due to width of bit-field", |
be2828ce | 3455 | code == NE_EXPR); |
20783f07 | 3456 | return constant_boolean_node (code == NE_EXPR, compare_type); |
2bc77e10 | 3457 | } |
3458 | } | |
3459 | ||
3460 | /* Single-bit compares should always be against zero. */ | |
3461 | if (lbitsize == 1 && ! integer_zerop (rhs)) | |
3462 | { | |
3463 | code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR; | |
3c6185f1 | 3464 | rhs = build_int_cst (type, 0); |
2bc77e10 | 3465 | } |
3466 | ||
3467 | /* Make a new bitfield reference, shift the constant over the | |
3468 | appropriate number of bits and mask it with the computed mask | |
3469 | (in case this was a signed field). If we changed it, make a new one. */ | |
4d1060a2 | 3470 | lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1); |
e03ab35e | 3471 | if (lvolatilep) |
3472 | { | |
3473 | TREE_SIDE_EFFECTS (lhs) = 1; | |
3474 | TREE_THIS_VOLATILE (lhs) = 1; | |
3475 | } | |
2bc77e10 | 3476 | |
e7be49a3 | 3477 | rhs = const_binop (BIT_AND_EXPR, |
3478 | const_binop (LSHIFT_EXPR, | |
3479 | fold_convert (unsigned_type, rhs), | |
3480 | size_int (lbitpos), 0), | |
3481 | mask, 0); | |
2bc77e10 | 3482 | |
fd96eeef | 3483 | return build2 (code, compare_type, |
3484 | build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), | |
3485 | rhs); | |
2bc77e10 | 3486 | } |
3487 | \f | |
79109eec | 3488 | /* Subroutine for fold_truthop: decode a field reference. |
2bc77e10 | 3489 | |
3490 | If EXP is a comparison reference, we return the innermost reference. | |
3491 | ||
3492 | *PBITSIZE is set to the number of bits in the reference, *PBITPOS is | |
3493 | set to the starting bit number. | |
3494 | ||
3495 | If the innermost field can be completely contained in a mode-sized | |
3496 | unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode. | |
3497 | ||
3498 | *PVOLATILEP is set to 1 if the any expression encountered is volatile; | |
3499 | otherwise it is not changed. | |
3500 | ||
3501 | *PUNSIGNEDP is set to the signedness of the field. | |
3502 | ||
3503 | *PMASK is set to the mask used. This is either contained in a | |
3504 | BIT_AND_EXPR or derived from the width of the field. | |
3505 | ||
3398e91d | 3506 | *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any. |
2a6329ae | 3507 | |
2bc77e10 | 3508 | Return 0 if this is not a component reference or is one that we can't |
3509 | do anything with. */ | |
3510 | ||
3511 | static tree | |
dc81944a | 3512 | decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize, |
3513 | HOST_WIDE_INT *pbitpos, enum machine_mode *pmode, | |
3514 | int *punsignedp, int *pvolatilep, | |
de1b648b | 3515 | tree *pmask, tree *pand_mask) |
2bc77e10 | 3516 | { |
74878f86 | 3517 | tree outer_type = 0; |
4843fe7c | 3518 | tree and_mask = 0; |
3519 | tree mask, inner, offset; | |
3520 | tree unsigned_type; | |
02e7a332 | 3521 | unsigned int precision; |
2bc77e10 | 3522 | |
cc049fa3 | 3523 | /* All the optimizations using this function assume integer fields. |
e40566fc | 3524 | There are problems with FP fields since the type_for_size call |
3525 | below can fail for, e.g., XFmode. */ | |
3526 | if (! INTEGRAL_TYPE_P (TREE_TYPE (exp))) | |
3527 | return 0; | |
3528 | ||
74878f86 | 3529 | /* We are interested in the bare arrangement of bits, so strip everything |
3530 | that doesn't affect the machine mode. However, record the type of the | |
3531 | outermost expression if it may matter below. */ | |
3532 | if (TREE_CODE (exp) == NOP_EXPR | |
3533 | || TREE_CODE (exp) == CONVERT_EXPR | |
3534 | || TREE_CODE (exp) == NON_LVALUE_EXPR) | |
3535 | outer_type = TREE_TYPE (exp); | |
78379bd9 | 3536 | STRIP_NOPS (exp); |
2bc77e10 | 3537 | |
3538 | if (TREE_CODE (exp) == BIT_AND_EXPR) | |
3539 | { | |
4843fe7c | 3540 | and_mask = TREE_OPERAND (exp, 1); |
2bc77e10 | 3541 | exp = TREE_OPERAND (exp, 0); |
4843fe7c | 3542 | STRIP_NOPS (exp); STRIP_NOPS (and_mask); |
3543 | if (TREE_CODE (and_mask) != INTEGER_CST) | |
2bc77e10 | 3544 | return 0; |
3545 | } | |
3546 | ||
bbfbdece | 3547 | inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode, |
e7e9416e | 3548 | punsignedp, pvolatilep, false); |
94f29e88 | 3549 | if ((inner == exp && and_mask == 0) |
155b05dc | 3550 | || *pbitsize < 0 || offset != 0 |
3551 | || TREE_CODE (inner) == PLACEHOLDER_EXPR) | |
e233264a | 3552 | return 0; |
cc049fa3 | 3553 | |
74878f86 | 3554 | /* If the number of bits in the reference is the same as the bitsize of |
3555 | the outer type, then the outer type gives the signedness. Otherwise | |
3556 | (in case of a small bitfield) the signedness is unchanged. */ | |
18dbec6f | 3557 | if (outer_type && *pbitsize == TYPE_PRECISION (outer_type)) |
78a8ed03 | 3558 | *punsignedp = TYPE_UNSIGNED (outer_type); |
74878f86 | 3559 | |
4843fe7c | 3560 | /* Compute the mask to access the bitfield. */ |
fa8b888f | 3561 | unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1); |
4843fe7c | 3562 | precision = TYPE_PRECISION (unsigned_type); |
3563 | ||
7016c612 | 3564 | mask = build_int_cst (unsigned_type, -1); |
4d28c5d1 | 3565 | mask = force_fit_type (mask, 0, false, false); |
0c5713a2 | 3566 | |
4843fe7c | 3567 | mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0); |
3568 | mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0); | |
3569 | ||
3570 | /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */ | |
3571 | if (and_mask != 0) | |
7ab7fd4f | 3572 | mask = fold_build2 (BIT_AND_EXPR, unsigned_type, |
3573 | fold_convert (unsigned_type, and_mask), mask); | |
2bc77e10 | 3574 | |
3575 | *pmask = mask; | |
2a6329ae | 3576 | *pand_mask = and_mask; |
2bc77e10 | 3577 | return inner; |
3578 | } | |
3579 | ||
6ef828f9 | 3580 | /* Return nonzero if MASK represents a mask of SIZE ones in the low-order |
2bc77e10 | 3581 | bit positions. */ |
3582 | ||
3583 | static int | |
de1b648b | 3584 | all_ones_mask_p (tree mask, int size) |
2bc77e10 | 3585 | { |
3586 | tree type = TREE_TYPE (mask); | |
02e7a332 | 3587 | unsigned int precision = TYPE_PRECISION (type); |
52a49c7c | 3588 | tree tmask; |
2bc77e10 | 3589 | |
7016c612 | 3590 | tmask = build_int_cst (lang_hooks.types.signed_type (type), -1); |
4d28c5d1 | 3591 | tmask = force_fit_type (tmask, 0, false, false); |
0c5713a2 | 3592 | |
2bc77e10 | 3593 | return |
cc049fa3 | 3594 | tree_int_cst_equal (mask, |
94f29e88 | 3595 | const_binop (RSHIFT_EXPR, |
3596 | const_binop (LSHIFT_EXPR, tmask, | |
3597 | size_int (precision - size), | |
3598 | 0), | |
3599 | size_int (precision - size), 0)); | |
2bc77e10 | 3600 | } |
79109eec | 3601 | |
203a24c4 | 3602 | /* Subroutine for fold: determine if VAL is the INTEGER_CONST that |
3603 | represents the sign bit of EXP's type. If EXP represents a sign | |
3604 | or zero extension, also test VAL against the unextended type. | |
3605 | The return value is the (sub)expression whose sign bit is VAL, | |
3606 | or NULL_TREE otherwise. */ | |
3607 | ||
3608 | static tree | |
de1b648b | 3609 | sign_bit_p (tree exp, tree val) |
203a24c4 | 3610 | { |
a4de5624 | 3611 | unsigned HOST_WIDE_INT mask_lo, lo; |
3612 | HOST_WIDE_INT mask_hi, hi; | |
203a24c4 | 3613 | int width; |
3614 | tree t; | |
3615 | ||
95cc2547 | 3616 | /* Tree EXP must have an integral type. */ |
203a24c4 | 3617 | t = TREE_TYPE (exp); |
3618 | if (! INTEGRAL_TYPE_P (t)) | |
3619 | return NULL_TREE; | |
3620 | ||
3621 | /* Tree VAL must be an integer constant. */ | |
3622 | if (TREE_CODE (val) != INTEGER_CST | |
3623 | || TREE_CONSTANT_OVERFLOW (val)) | |
3624 | return NULL_TREE; | |
3625 | ||
3626 | width = TYPE_PRECISION (t); | |
3627 | if (width > HOST_BITS_PER_WIDE_INT) | |
3628 | { | |
3629 | hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1); | |
3630 | lo = 0; | |
a4de5624 | 3631 | |
3632 | mask_hi = ((unsigned HOST_WIDE_INT) -1 | |
3633 | >> (2 * HOST_BITS_PER_WIDE_INT - width)); | |
3634 | mask_lo = -1; | |
203a24c4 | 3635 | } |
3636 | else | |
3637 | { | |
3638 | hi = 0; | |
3639 | lo = (unsigned HOST_WIDE_INT) 1 << (width - 1); | |
a4de5624 | 3640 | |
3641 | mask_hi = 0; | |
3642 | mask_lo = ((unsigned HOST_WIDE_INT) -1 | |
3643 | >> (HOST_BITS_PER_WIDE_INT - width)); | |
203a24c4 | 3644 | } |
3645 | ||
a4de5624 | 3646 | /* We mask off those bits beyond TREE_TYPE (exp) so that we can |
3647 | treat VAL as if it were unsigned. */ | |
3648 | if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi | |
3649 | && (TREE_INT_CST_LOW (val) & mask_lo) == lo) | |
203a24c4 | 3650 | return exp; |
3651 | ||
3652 | /* Handle extension from a narrower type. */ | |
3653 | if (TREE_CODE (exp) == NOP_EXPR | |
3654 | && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width) | |
3655 | return sign_bit_p (TREE_OPERAND (exp, 0), val); | |
3656 | ||
3657 | return NULL_TREE; | |
3658 | } | |
3659 | ||
79109eec | 3660 | /* Subroutine for fold_truthop: determine if an operand is simple enough |
3661 | to be evaluated unconditionally. */ | |
3662 | ||
cc049fa3 | 3663 | static int |
de1b648b | 3664 | simple_operand_p (tree exp) |
79109eec | 3665 | { |
3666 | /* Strip any conversions that don't change the machine mode. */ | |
9a73db25 | 3667 | STRIP_NOPS (exp); |
79109eec | 3668 | |
ce45a448 | 3669 | return (CONSTANT_CLASS_P (exp) |
9a73db25 | 3670 | || TREE_CODE (exp) == SSA_NAME |
9308e976 | 3671 | || (DECL_P (exp) |
79109eec | 3672 | && ! TREE_ADDRESSABLE (exp) |
3673 | && ! TREE_THIS_VOLATILE (exp) | |
7735dddb | 3674 | && ! DECL_NONLOCAL (exp) |
3675 | /* Don't regard global variables as simple. They may be | |
3676 | allocated in ways unknown to the compiler (shared memory, | |
3677 | #pragma weak, etc). */ | |
3678 | && ! TREE_PUBLIC (exp) | |
3679 | && ! DECL_EXTERNAL (exp) | |
3680 | /* Loading a static variable is unduly expensive, but global | |
3681 | registers aren't expensive. */ | |
3682 | && (! TREE_STATIC (exp) || DECL_REGISTER (exp)))); | |
79109eec | 3683 | } |
2bc77e10 | 3684 | \f |
12ec0a8a | 3685 | /* The following functions are subroutines to fold_range_test and allow it to |
3686 | try to change a logical combination of comparisons into a range test. | |
3687 | ||
3688 | For example, both | |
de1b648b | 3689 | X == 2 || X == 3 || X == 4 || X == 5 |
12ec0a8a | 3690 | and |
de1b648b | 3691 | X >= 2 && X <= 5 |
12ec0a8a | 3692 | are converted to |
3693 | (unsigned) (X - 2) <= 3 | |
3694 | ||
ad87de1e | 3695 | We describe each set of comparisons as being either inside or outside |
12ec0a8a | 3696 | a range, using a variable named like IN_P, and then describe the |
3697 | range with a lower and upper bound. If one of the bounds is omitted, | |
3698 | it represents either the highest or lowest value of the type. | |
3699 | ||
3700 | In the comments below, we represent a range by two numbers in brackets | |
ad87de1e | 3701 | preceded by a "+" to designate being inside that range, or a "-" to |
12ec0a8a | 3702 | designate being outside that range, so the condition can be inverted by |
3703 | flipping the prefix. An omitted bound is represented by a "-". For | |
3704 | example, "- [-, 10]" means being outside the range starting at the lowest | |
3705 | possible value and ending at 10, in other words, being greater than 10. | |
3706 | The range "+ [-, -]" is always true and hence the range "- [-, -]" is | |
3707 | always false. | |
3708 | ||
3709 | We set up things so that the missing bounds are handled in a consistent | |
3710 | manner so neither a missing bound nor "true" and "false" need to be | |
3711 | handled using a special case. */ | |
3712 | ||
3713 | /* Return the result of applying CODE to ARG0 and ARG1, but handle the case | |
3714 | of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P | |
3715 | and UPPER1_P are nonzero if the respective argument is an upper bound | |
3716 | and zero for a lower. TYPE, if nonzero, is the type of the result; it | |
3717 | must be specified for a comparison. ARG1 will be converted to ARG0's | |
3718 | type if both are specified. */ | |
6f725368 | 3719 | |
12ec0a8a | 3720 | static tree |
dc81944a | 3721 | range_binop (enum tree_code code, tree type, tree arg0, int upper0_p, |
3722 | tree arg1, int upper1_p) | |
12ec0a8a | 3723 | { |
7560c8de | 3724 | tree tem; |
12ec0a8a | 3725 | int result; |
3726 | int sgn0, sgn1; | |
6f725368 | 3727 | |
12ec0a8a | 3728 | /* If neither arg represents infinity, do the normal operation. |
3729 | Else, if not a comparison, return infinity. Else handle the special | |
3730 | comparison rules. Note that most of the cases below won't occur, but | |
3731 | are handled for consistency. */ | |
6f725368 | 3732 | |
12ec0a8a | 3733 | if (arg0 != 0 && arg1 != 0) |
7560c8de | 3734 | { |
7ab7fd4f | 3735 | tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0), |
3736 | arg0, fold_convert (TREE_TYPE (arg0), arg1)); | |
7560c8de | 3737 | STRIP_NOPS (tem); |
3738 | return TREE_CODE (tem) == INTEGER_CST ? tem : 0; | |
3739 | } | |
6f725368 | 3740 | |
ce45a448 | 3741 | if (TREE_CODE_CLASS (code) != tcc_comparison) |
12ec0a8a | 3742 | return 0; |
3743 | ||
3744 | /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0 | |
621ba396 | 3745 | for neither. In real maths, we cannot assume open ended ranges are |
3746 | the same. But, this is computer arithmetic, where numbers are finite. | |
3747 | We can therefore make the transformation of any unbounded range with | |
3748 | the value Z, Z being greater than any representable number. This permits | |
6312a35e | 3749 | us to treat unbounded ranges as equal. */ |
12ec0a8a | 3750 | sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1); |
263497ab | 3751 | sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1); |
12ec0a8a | 3752 | switch (code) |
3753 | { | |
621ba396 | 3754 | case EQ_EXPR: |
3755 | result = sgn0 == sgn1; | |
3756 | break; | |
3757 | case NE_EXPR: | |
3758 | result = sgn0 != sgn1; | |
12ec0a8a | 3759 | break; |
621ba396 | 3760 | case LT_EXPR: |
12ec0a8a | 3761 | result = sgn0 < sgn1; |
3762 | break; | |
621ba396 | 3763 | case LE_EXPR: |
3764 | result = sgn0 <= sgn1; | |
3765 | break; | |
3766 | case GT_EXPR: | |
12ec0a8a | 3767 | result = sgn0 > sgn1; |
3768 | break; | |
621ba396 | 3769 | case GE_EXPR: |
3770 | result = sgn0 >= sgn1; | |
3771 | break; | |
0dbd1c74 | 3772 | default: |
fdada98f | 3773 | gcc_unreachable (); |
12ec0a8a | 3774 | } |
3775 | ||
20783f07 | 3776 | return constant_boolean_node (result, type); |
12ec0a8a | 3777 | } |
cc049fa3 | 3778 | \f |
12ec0a8a | 3779 | /* Given EXP, a logical expression, set the range it is testing into |
3780 | variables denoted by PIN_P, PLOW, and PHIGH. Return the expression | |
62af9abe | 3781 | actually being tested. *PLOW and *PHIGH will be made of the same type |
12ec0a8a | 3782 | as the returned expression. If EXP is not a comparison, we will most |
3783 | likely not be returning a useful value and range. */ | |
6f725368 | 3784 | |
bfd67d2c | 3785 | static tree |
de1b648b | 3786 | make_range (tree exp, int *pin_p, tree *plow, tree *phigh) |
6f725368 | 3787 | { |
12ec0a8a | 3788 | enum tree_code code; |
7206da1b | 3789 | tree arg0 = NULL_TREE, arg1 = NULL_TREE; |
3790 | tree exp_type = NULL_TREE, arg0_type = NULL_TREE; | |
12ec0a8a | 3791 | int in_p, n_in_p; |
3792 | tree low, high, n_low, n_high; | |
6f725368 | 3793 | |
12ec0a8a | 3794 | /* Start with simply saying "EXP != 0" and then look at the code of EXP |
3795 | and see if we can refine the range. Some of the cases below may not | |
3796 | happen, but it doesn't seem worth worrying about this. We "continue" | |
3797 | the outer loop when we've changed something; otherwise we "break" | |
3798 | the switch, which will "break" the while. */ | |
6f725368 | 3799 | |
b30e3dbc | 3800 | in_p = 0; |
3c6185f1 | 3801 | low = high = build_int_cst (TREE_TYPE (exp), 0); |
12ec0a8a | 3802 | |
3803 | while (1) | |
6f725368 | 3804 | { |
12ec0a8a | 3805 | code = TREE_CODE (exp); |
7206da1b | 3806 | exp_type = TREE_TYPE (exp); |
5eb945de | 3807 | |
3808 | if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))) | |
3809 | { | |
651396d6 | 3810 | if (TREE_CODE_LENGTH (code) > 0) |
13795292 | 3811 | arg0 = TREE_OPERAND (exp, 0); |
ce45a448 | 3812 | if (TREE_CODE_CLASS (code) == tcc_comparison |
3813 | || TREE_CODE_CLASS (code) == tcc_unary | |
3814 | || TREE_CODE_CLASS (code) == tcc_binary) | |
7206da1b | 3815 | arg0_type = TREE_TYPE (arg0); |
ce45a448 | 3816 | if (TREE_CODE_CLASS (code) == tcc_binary |
3817 | || TREE_CODE_CLASS (code) == tcc_comparison | |
3818 | || (TREE_CODE_CLASS (code) == tcc_expression | |
3f1e707c | 3819 | && TREE_CODE_LENGTH (code) > 1)) |
5eb945de | 3820 | arg1 = TREE_OPERAND (exp, 1); |
3821 | } | |
6f725368 | 3822 | |
12ec0a8a | 3823 | switch (code) |
3824 | { | |
3825 | case TRUTH_NOT_EXPR: | |
3826 | in_p = ! in_p, exp = arg0; | |
3827 | continue; | |
3828 | ||
3829 | case EQ_EXPR: case NE_EXPR: | |
3830 | case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR: | |
3831 | /* We can only do something if the range is testing for zero | |
3832 | and if the second operand is an integer constant. Note that | |
3833 | saying something is "in" the range we make is done by | |
3834 | complementing IN_P since it will set in the initial case of | |
3835 | being not equal to zero; "out" is leaving it alone. */ | |
3836 | if (low == 0 || high == 0 | |
3837 | || ! integer_zerop (low) || ! integer_zerop (high) | |
3838 | || TREE_CODE (arg1) != INTEGER_CST) | |
3839 | break; | |
6f725368 | 3840 | |
12ec0a8a | 3841 | switch (code) |
3842 | { | |
3843 | case NE_EXPR: /* - [c, c] */ | |
3844 | low = high = arg1; | |
3845 | break; | |
3846 | case EQ_EXPR: /* + [c, c] */ | |
3847 | in_p = ! in_p, low = high = arg1; | |
3848 | break; | |
3849 | case GT_EXPR: /* - [-, c] */ | |
3850 | low = 0, high = arg1; | |
3851 | break; | |
3852 | case GE_EXPR: /* + [c, -] */ | |
3853 | in_p = ! in_p, low = arg1, high = 0; | |
3854 | break; | |
3855 | case LT_EXPR: /* - [c, -] */ | |
3856 | low = arg1, high = 0; | |
3857 | break; | |
3858 | case LE_EXPR: /* + [-, c] */ | |
3859 | in_p = ! in_p, low = 0, high = arg1; | |
3860 | break; | |
0dbd1c74 | 3861 | default: |
fdada98f | 3862 | gcc_unreachable (); |
12ec0a8a | 3863 | } |
6f725368 | 3864 | |
c317c285 | 3865 | /* If this is an unsigned comparison, we also know that EXP is |
a9e29e86 | 3866 | greater than or equal to zero. We base the range tests we make |
3867 | on that fact, so we record it here so we can parse existing | |
7206da1b | 3868 | range tests. We test arg0_type since often the return type |
3869 | of, e.g. EQ_EXPR, is boolean. */ | |
3870 | if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0)) | |
12ec0a8a | 3871 | { |
5c9198bd | 3872 | if (! merge_ranges (&n_in_p, &n_low, &n_high, |
3873 | in_p, low, high, 1, | |
3c6185f1 | 3874 | build_int_cst (arg0_type, 0), |
a9e29e86 | 3875 | NULL_TREE)) |
12ec0a8a | 3876 | break; |
6f725368 | 3877 | |
12ec0a8a | 3878 | in_p = n_in_p, low = n_low, high = n_high; |
a9e29e86 | 3879 | |
751e10d1 | 3880 | /* If the high bound is missing, but we have a nonzero low |
e524954a | 3881 | bound, reverse the range so it goes from zero to the low bound |
3882 | minus 1. */ | |
3883 | if (high == 0 && low && ! integer_zerop (low)) | |
a9e29e86 | 3884 | { |
3885 | in_p = ! in_p; | |
3886 | high = range_binop (MINUS_EXPR, NULL_TREE, low, 0, | |
3887 | integer_one_node, 0); | |
3c6185f1 | 3888 | low = build_int_cst (arg0_type, 0); |
a9e29e86 | 3889 | } |
12ec0a8a | 3890 | } |
7206da1b | 3891 | |
3892 | exp = arg0; | |
12ec0a8a | 3893 | continue; |
3894 | ||
3895 | case NEGATE_EXPR: | |
3896 | /* (-x) IN [a,b] -> x in [-b, -a] */ | |
7206da1b | 3897 | n_low = range_binop (MINUS_EXPR, exp_type, |
3c6185f1 | 3898 | build_int_cst (exp_type, 0), |
b30e3dbc | 3899 | 0, high, 1); |
7206da1b | 3900 | n_high = range_binop (MINUS_EXPR, exp_type, |
3c6185f1 | 3901 | build_int_cst (exp_type, 0), |
b30e3dbc | 3902 | 0, low, 0); |
12ec0a8a | 3903 | low = n_low, high = n_high; |
3904 | exp = arg0; | |
3905 | continue; | |
3906 | ||
3907 | case BIT_NOT_EXPR: | |
3908 | /* ~ X -> -X - 1 */ | |
7206da1b | 3909 | exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0), |
3c6185f1 | 3910 | build_int_cst (exp_type, 1)); |
12ec0a8a | 3911 | continue; |
3912 | ||
3913 | case PLUS_EXPR: case MINUS_EXPR: | |
3914 | if (TREE_CODE (arg1) != INTEGER_CST) | |
3915 | break; | |
3916 | ||
0a8b4135 | 3917 | /* If flag_wrapv and ARG0_TYPE is signed, then we cannot |
3918 | move a constant to the other side. */ | |
3919 | if (flag_wrapv && !TYPE_UNSIGNED (arg0_type)) | |
3920 | break; | |
3921 | ||
12ec0a8a | 3922 | /* If EXP is signed, any overflow in the computation is undefined, |
3923 | so we don't worry about it so long as our computations on | |
3924 | the bounds don't overflow. For unsigned, overflow is defined | |
3925 | and this is exactly the right thing. */ | |
3926 | n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, | |
7206da1b | 3927 | arg0_type, low, 0, arg1, 0); |
12ec0a8a | 3928 | n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, |
7206da1b | 3929 | arg0_type, high, 1, arg1, 0); |
12ec0a8a | 3930 | if ((n_low != 0 && TREE_OVERFLOW (n_low)) |
3931 | || (n_high != 0 && TREE_OVERFLOW (n_high))) | |
3932 | break; | |
3933 | ||
6b457c77 | 3934 | /* Check for an unsigned range which has wrapped around the maximum |
3935 | value thus making n_high < n_low, and normalize it. */ | |
98db800f | 3936 | if (n_low && n_high && tree_int_cst_lt (n_high, n_low)) |
6b457c77 | 3937 | { |
7206da1b | 3938 | low = range_binop (PLUS_EXPR, arg0_type, n_high, 0, |
a9e29e86 | 3939 | integer_one_node, 0); |
7206da1b | 3940 | high = range_binop (MINUS_EXPR, arg0_type, n_low, 0, |
a80d786b | 3941 | integer_one_node, 0); |
3942 | ||
3943 | /* If the range is of the form +/- [ x+1, x ], we won't | |
3944 | be able to normalize it. But then, it represents the | |
3945 | whole range or the empty set, so make it | |
3946 | +/- [ -, - ]. */ | |
3947 | if (tree_int_cst_equal (n_low, low) | |
3948 | && tree_int_cst_equal (n_high, high)) | |
3949 | low = high = 0; | |
3950 | else | |
3951 | in_p = ! in_p; | |
6b457c77 | 3952 | } |
98db800f | 3953 | else |
3954 | low = n_low, high = n_high; | |
7560c8de | 3955 | |
12ec0a8a | 3956 | exp = arg0; |
3957 | continue; | |
3958 | ||
3959 | case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR: | |
7206da1b | 3960 | if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type)) |
d6d65bd2 | 3961 | break; |
3962 | ||
7206da1b | 3963 | if (! INTEGRAL_TYPE_P (arg0_type) |
3964 | || (low != 0 && ! int_fits_type_p (low, arg0_type)) | |
3965 | || (high != 0 && ! int_fits_type_p (high, arg0_type))) | |
12ec0a8a | 3966 | break; |
3967 | ||
4cd44a59 | 3968 | n_low = low, n_high = high; |
12ec0a8a | 3969 | |
4cd44a59 | 3970 | if (n_low != 0) |
7206da1b | 3971 | n_low = fold_convert (arg0_type, n_low); |
4cd44a59 | 3972 | |
3973 | if (n_high != 0) | |
7206da1b | 3974 | n_high = fold_convert (arg0_type, n_high); |
4cd44a59 | 3975 | |
4cd44a59 | 3976 | |
7206da1b | 3977 | /* If we're converting arg0 from an unsigned type, to exp, |
2c763ed4 | 3978 | a signed type, we will be doing the comparison as unsigned. |
7206da1b | 3979 | The tests above have already verified that LOW and HIGH |
3980 | are both positive. | |
3981 | ||
3982 | So we have to ensure that we will handle large unsigned | |
3983 | values the same way that the current signed bounds treat | |
3984 | negative values. */ | |
3985 | ||
3986 | if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type)) | |
4cd44a59 | 3987 | { |
f52483b5 | 3988 | tree high_positive; |
7206da1b | 3989 | tree equiv_type = lang_hooks.types.type_for_mode |
3990 | (TYPE_MODE (arg0_type), 1); | |
f52483b5 | 3991 | |
3992 | /* A range without an upper bound is, naturally, unbounded. | |
3993 | Since convert would have cropped a very large value, use | |
155b05dc | 3994 | the max value for the destination type. */ |
3995 | high_positive | |
3996 | = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type) | |
7206da1b | 3997 | : TYPE_MAX_VALUE (arg0_type); |
f52483b5 | 3998 | |
7206da1b | 3999 | if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type)) |
7ab7fd4f | 4000 | high_positive = fold_build2 (RSHIFT_EXPR, arg0_type, |
4001 | fold_convert (arg0_type, | |
4002 | high_positive), | |
4003 | fold_convert (arg0_type, | |
4004 | integer_one_node)); | |
cc049fa3 | 4005 | |
4cd44a59 | 4006 | /* If the low bound is specified, "and" the range with the |
4007 | range for which the original unsigned value will be | |
4008 | positive. */ | |
4009 | if (low != 0) | |
4010 | { | |
4011 | if (! merge_ranges (&n_in_p, &n_low, &n_high, | |
b30e3dbc | 4012 | 1, n_low, n_high, 1, |
5c9198bd | 4013 | fold_convert (arg0_type, |
4014 | integer_zero_node), | |
4cd44a59 | 4015 | high_positive)) |
4016 | break; | |
4017 | ||
4018 | in_p = (n_in_p == in_p); | |
4019 | } | |
4020 | else | |
4021 | { | |
4022 | /* Otherwise, "or" the range with the range of the input | |
4023 | that will be interpreted as negative. */ | |
4024 | if (! merge_ranges (&n_in_p, &n_low, &n_high, | |
b30e3dbc | 4025 | 0, n_low, n_high, 1, |
5c9198bd | 4026 | fold_convert (arg0_type, |
4027 | integer_zero_node), | |
4cd44a59 | 4028 | high_positive)) |
4029 | break; | |
4030 | ||
4031 | in_p = (in_p != n_in_p); | |
4032 | } | |
4033 | } | |
12ec0a8a | 4034 | |
4035 | exp = arg0; | |
4cd44a59 | 4036 | low = n_low, high = n_high; |
12ec0a8a | 4037 | continue; |
4cd44a59 | 4038 | |
4039 | default: | |
4040 | break; | |
6f725368 | 4041 | } |
12ec0a8a | 4042 | |
4043 | break; | |
6f725368 | 4044 | } |
12ec0a8a | 4045 | |
f83854c8 | 4046 | /* If EXP is a constant, we can evaluate whether this is true or false. */ |
4047 | if (TREE_CODE (exp) == INTEGER_CST) | |
4048 | { | |
4049 | in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node, | |
4050 | exp, 0, low, 0)) | |
4051 | && integer_onep (range_binop (LE_EXPR, integer_type_node, | |
4052 | exp, 1, high, 1))); | |
4053 | low = high = 0; | |
4054 | exp = 0; | |
4055 | } | |
4056 | ||
12ec0a8a | 4057 | *pin_p = in_p, *plow = low, *phigh = high; |
4058 | return exp; | |
4059 | } | |
4060 | \f | |
4061 | /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result | |
4062 | type, TYPE, return an expression to test if EXP is in (or out of, depending | |
3b3a787a | 4063 | on IN_P) the range. Return 0 if the test couldn't be created. */ |
12ec0a8a | 4064 | |
4065 | static tree | |
de1b648b | 4066 | build_range_check (tree type, tree exp, int in_p, tree low, tree high) |
12ec0a8a | 4067 | { |
4068 | tree etype = TREE_TYPE (exp); | |
843dd7a3 | 4069 | tree value; |
12ec0a8a | 4070 | |
d067185e | 4071 | #ifdef HAVE_canonicalize_funcptr_for_compare |
4072 | /* Disable this optimization for function pointer expressions | |
4073 | on targets that require function pointer canonicalization. */ | |
4074 | if (HAVE_canonicalize_funcptr_for_compare | |
4075 | && TREE_CODE (etype) == POINTER_TYPE | |
4076 | && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE) | |
4077 | return NULL_TREE; | |
4078 | #endif | |
4079 | ||
3b3a787a | 4080 | if (! in_p) |
4081 | { | |
4082 | value = build_range_check (type, exp, 1, low, high); | |
4083 | if (value != 0) | |
4084 | return invert_truthvalue (value); | |
4085 | ||
4086 | return 0; | |
4087 | } | |
12ec0a8a | 4088 | |
843dd7a3 | 4089 | if (low == 0 && high == 0) |
3c6185f1 | 4090 | return build_int_cst (type, 1); |
12ec0a8a | 4091 | |
843dd7a3 | 4092 | if (low == 0) |
2fdd1d9f | 4093 | return fold_build2 (LE_EXPR, type, exp, |
4094 | fold_convert (etype, high)); | |
12ec0a8a | 4095 | |
843dd7a3 | 4096 | if (high == 0) |
2fdd1d9f | 4097 | return fold_build2 (GE_EXPR, type, exp, |
4098 | fold_convert (etype, low)); | |
12ec0a8a | 4099 | |
843dd7a3 | 4100 | if (operand_equal_p (low, high, 0)) |
2fdd1d9f | 4101 | return fold_build2 (EQ_EXPR, type, exp, |
4102 | fold_convert (etype, low)); | |
12ec0a8a | 4103 | |
843dd7a3 | 4104 | if (integer_zerop (low)) |
6f725368 | 4105 | { |
78a8ed03 | 4106 | if (! TYPE_UNSIGNED (etype)) |
d3371fcd | 4107 | { |
fa8b888f | 4108 | etype = lang_hooks.types.unsigned_type (etype); |
b30e3dbc | 4109 | high = fold_convert (etype, high); |
4110 | exp = fold_convert (etype, exp); | |
d3371fcd | 4111 | } |
843dd7a3 | 4112 | return build_range_check (type, exp, 1, 0, high); |
12ec0a8a | 4113 | } |
6f725368 | 4114 | |
843dd7a3 | 4115 | /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */ |
4116 | if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST) | |
4117 | { | |
4118 | unsigned HOST_WIDE_INT lo; | |
4119 | HOST_WIDE_INT hi; | |
4120 | int prec; | |
4121 | ||
4122 | prec = TYPE_PRECISION (etype); | |
4123 | if (prec <= HOST_BITS_PER_WIDE_INT) | |
d3371fcd | 4124 | { |
4125 | hi = 0; | |
4126 | lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1; | |
4127 | } | |
843dd7a3 | 4128 | else |
d3371fcd | 4129 | { |
4130 | hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1; | |
4131 | lo = (unsigned HOST_WIDE_INT) -1; | |
4132 | } | |
843dd7a3 | 4133 | |
4134 | if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo) | |
d3371fcd | 4135 | { |
78a8ed03 | 4136 | if (TYPE_UNSIGNED (etype)) |
d3371fcd | 4137 | { |
fa8b888f | 4138 | etype = lang_hooks.types.signed_type (etype); |
b30e3dbc | 4139 | exp = fold_convert (etype, exp); |
d3371fcd | 4140 | } |
7ab7fd4f | 4141 | return fold_build2 (GT_EXPR, type, exp, |
3c6185f1 | 4142 | build_int_cst (etype, 0)); |
d3371fcd | 4143 | } |
843dd7a3 | 4144 | } |
4145 | ||
66108e20 | 4146 | /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low). |
4147 | This requires wrap-around arithmetics for the type of the expression. */ | |
4148 | switch (TREE_CODE (etype)) | |
4149 | { | |
4150 | case INTEGER_TYPE: | |
4151 | /* There is no requirement that LOW be within the range of ETYPE | |
4152 | if the latter is a subtype. It must, however, be within the base | |
4153 | type of ETYPE. So be sure we do the subtraction in that type. */ | |
4154 | if (TREE_TYPE (etype)) | |
4155 | etype = TREE_TYPE (etype); | |
4156 | break; | |
4157 | ||
4158 | case ENUMERAL_TYPE: | |
4159 | case BOOLEAN_TYPE: | |
4160 | etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), | |
4161 | TYPE_UNSIGNED (etype)); | |
4162 | break; | |
4163 | ||
4164 | default: | |
4165 | break; | |
4166 | } | |
4167 | ||
4168 | /* If we don't have wrap-around arithmetics upfront, try to force it. */ | |
4169 | if (TREE_CODE (etype) == INTEGER_TYPE | |
4170 | && !TYPE_UNSIGNED (etype) && !flag_wrapv) | |
3b3a787a | 4171 | { |
4172 | tree utype, minv, maxv; | |
4173 | ||
4174 | /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN | |
4175 | for the type in question, as we rely on this here. */ | |
66108e20 | 4176 | utype = lang_hooks.types.unsigned_type (etype); |
4177 | maxv = fold_convert (utype, TYPE_MAX_VALUE (etype)); | |
4178 | maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1, | |
4179 | integer_one_node, 1); | |
4180 | minv = fold_convert (utype, TYPE_MIN_VALUE (etype)); | |
4181 | ||
4182 | if (integer_zerop (range_binop (NE_EXPR, integer_type_node, | |
4183 | minv, 1, maxv, 1))) | |
4184 | etype = utype; | |
4185 | else | |
4186 | return 0; | |
3b3a787a | 4187 | } |
4188 | ||
66108e20 | 4189 | high = fold_convert (etype, high); |
4190 | low = fold_convert (etype, low); | |
4191 | exp = fold_convert (etype, exp); | |
3f7c18bc | 4192 | |
66108e20 | 4193 | value = const_binop (MINUS_EXPR, high, low, 0); |
4194 | ||
4195 | if (value != 0 && !TREE_OVERFLOW (value)) | |
4196 | return build_range_check (type, | |
4197 | fold_build2 (MINUS_EXPR, etype, exp, low), | |
4198 | 1, build_int_cst (etype, 0), value); | |
843dd7a3 | 4199 | |
4200 | return 0; | |
12ec0a8a | 4201 | } |
4202 | \f | |
1557b0a0 | 4203 | /* Return the predecessor of VAL in its type, handling the infinite case. */ |
4204 | ||
4205 | static tree | |
4206 | range_predecessor (tree val) | |
4207 | { | |
4208 | tree type = TREE_TYPE (val); | |
4209 | ||
20efd591 | 4210 | if (INTEGRAL_TYPE_P (type) |
4211 | && operand_equal_p (val, TYPE_MIN_VALUE (type), 0)) | |
1557b0a0 | 4212 | return 0; |
4213 | else | |
4214 | return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0); | |
4215 | } | |
4216 | ||
4217 | /* Return the successor of VAL in its type, handling the infinite case. */ | |
4218 | ||
4219 | static tree | |
4220 | range_successor (tree val) | |
4221 | { | |
4222 | tree type = TREE_TYPE (val); | |
4223 | ||
20efd591 | 4224 | if (INTEGRAL_TYPE_P (type) |
4225 | && operand_equal_p (val, TYPE_MAX_VALUE (type), 0)) | |
1557b0a0 | 4226 | return 0; |
4227 | else | |
4228 | return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0); | |
4229 | } | |
4230 | ||
cc049fa3 | 4231 | /* Given two ranges, see if we can merge them into one. Return 1 if we |
12ec0a8a | 4232 | can, 0 if we can't. Set the output range into the specified parameters. */ |
6f725368 | 4233 | |
12ec0a8a | 4234 | static int |
dc81944a | 4235 | merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0, |
4236 | tree high0, int in1_p, tree low1, tree high1) | |
12ec0a8a | 4237 | { |
4238 | int no_overlap; | |
4239 | int subset; | |
4240 | int temp; | |
4241 | tree tem; | |
4242 | int in_p; | |
4243 | tree low, high; | |
4cd44a59 | 4244 | int lowequal = ((low0 == 0 && low1 == 0) |
4245 | || integer_onep (range_binop (EQ_EXPR, integer_type_node, | |
4246 | low0, 0, low1, 0))); | |
4247 | int highequal = ((high0 == 0 && high1 == 0) | |
4248 | || integer_onep (range_binop (EQ_EXPR, integer_type_node, | |
4249 | high0, 1, high1, 1))); | |
4250 | ||
4251 | /* Make range 0 be the range that starts first, or ends last if they | |
4252 | start at the same value. Swap them if it isn't. */ | |
cc049fa3 | 4253 | if (integer_onep (range_binop (GT_EXPR, integer_type_node, |
12ec0a8a | 4254 | low0, 0, low1, 0)) |
4cd44a59 | 4255 | || (lowequal |
12ec0a8a | 4256 | && integer_onep (range_binop (GT_EXPR, integer_type_node, |
4cd44a59 | 4257 | high1, 1, high0, 1)))) |
12ec0a8a | 4258 | { |
4259 | temp = in0_p, in0_p = in1_p, in1_p = temp; | |
4260 | tem = low0, low0 = low1, low1 = tem; | |
4261 | tem = high0, high0 = high1, high1 = tem; | |
4262 | } | |
6f725368 | 4263 | |
12ec0a8a | 4264 | /* Now flag two cases, whether the ranges are disjoint or whether the |
4265 | second range is totally subsumed in the first. Note that the tests | |
4266 | below are simplified by the ones above. */ | |
4267 | no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node, | |
4268 | high0, 1, low1, 0)); | |
718acf6d | 4269 | subset = integer_onep (range_binop (LE_EXPR, integer_type_node, |
12ec0a8a | 4270 | high1, 1, high0, 1)); |
4271 | ||
4272 | /* We now have four cases, depending on whether we are including or | |
4273 | excluding the two ranges. */ | |
4274 | if (in0_p && in1_p) | |
4275 | { | |
4276 | /* If they don't overlap, the result is false. If the second range | |
4277 | is a subset it is the result. Otherwise, the range is from the start | |
4278 | of the second to the end of the first. */ | |
4279 | if (no_overlap) | |
4280 | in_p = 0, low = high = 0; | |
4281 | else if (subset) | |
4282 | in_p = 1, low = low1, high = high1; | |
4283 | else | |
4284 | in_p = 1, low = low1, high = high0; | |
4285 | } | |
6f725368 | 4286 | |
12ec0a8a | 4287 | else if (in0_p && ! in1_p) |
4288 | { | |
4cd44a59 | 4289 | /* If they don't overlap, the result is the first range. If they are |
4290 | equal, the result is false. If the second range is a subset of the | |
4291 | first, and the ranges begin at the same place, we go from just after | |
66108e20 | 4292 | the end of the second range to the end of the first. If the second |
4cd44a59 | 4293 | range is not a subset of the first, or if it is a subset and both |
4294 | ranges end at the same place, the range starts at the start of the | |
4295 | first range and ends just before the second range. | |
4296 | Otherwise, we can't describe this as a single range. */ | |
12ec0a8a | 4297 | if (no_overlap) |
4298 | in_p = 1, low = low0, high = high0; | |
4cd44a59 | 4299 | else if (lowequal && highequal) |
08986c47 | 4300 | in_p = 0, low = high = 0; |
4cd44a59 | 4301 | else if (subset && lowequal) |
4302 | { | |
66108e20 | 4303 | low = range_successor (high1); |
4304 | high = high0; | |
4305 | in_p = (low != 0); | |
4cd44a59 | 4306 | } |
4307 | else if (! subset || highequal) | |
12ec0a8a | 4308 | { |
66108e20 | 4309 | low = low0; |
4310 | high = range_predecessor (low1); | |
4311 | in_p = (high != 0); | |
12ec0a8a | 4312 | } |
4cd44a59 | 4313 | else |
4314 | return 0; | |
12ec0a8a | 4315 | } |
6f725368 | 4316 | |
12ec0a8a | 4317 | else if (! in0_p && in1_p) |
4318 | { | |
4319 | /* If they don't overlap, the result is the second range. If the second | |
4320 | is a subset of the first, the result is false. Otherwise, | |
4321 | the range starts just after the first range and ends at the | |
4322 | end of the second. */ | |
4323 | if (no_overlap) | |
4324 | in_p = 1, low = low1, high = high1; | |
155b05dc | 4325 | else if (subset || highequal) |
12ec0a8a | 4326 | in_p = 0, low = high = 0; |
4327 | else | |
4328 | { | |
66108e20 | 4329 | low = range_successor (high0); |
4330 | high = high1; | |
4331 | in_p = (low != 0); | |
6f725368 | 4332 | } |
4333 | } | |
4334 | ||
12ec0a8a | 4335 | else |
4336 | { | |
4337 | /* The case where we are excluding both ranges. Here the complex case | |
4338 | is if they don't overlap. In that case, the only time we have a | |
4339 | range is if they are adjacent. If the second is a subset of the | |
4340 | first, the result is the first. Otherwise, the range to exclude | |
4341 | starts at the beginning of the first range and ends at the end of the | |
4342 | second. */ | |
4343 | if (no_overlap) | |
4344 | { | |
4345 | if (integer_onep (range_binop (EQ_EXPR, integer_type_node, | |
66108e20 | 4346 | range_successor (high0), |
12ec0a8a | 4347 | 1, low1, 0))) |
4348 | in_p = 0, low = low0, high = high1; | |
4349 | else | |
3b3a787a | 4350 | { |
4351 | /* Canonicalize - [min, x] into - [-, x]. */ | |
4352 | if (low0 && TREE_CODE (low0) == INTEGER_CST) | |
4353 | switch (TREE_CODE (TREE_TYPE (low0))) | |
4354 | { | |
4355 | case ENUMERAL_TYPE: | |
4356 | if (TYPE_PRECISION (TREE_TYPE (low0)) | |
4357 | != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0)))) | |
4358 | break; | |
4359 | /* FALLTHROUGH */ | |
4360 | case INTEGER_TYPE: | |
3b3a787a | 4361 | if (tree_int_cst_equal (low0, |
4362 | TYPE_MIN_VALUE (TREE_TYPE (low0)))) | |
4363 | low0 = 0; | |
4364 | break; | |
4365 | case POINTER_TYPE: | |
4366 | if (TYPE_UNSIGNED (TREE_TYPE (low0)) | |
4367 | && integer_zerop (low0)) | |
4368 | low0 = 0; | |
4369 | break; | |
4370 | default: | |
4371 | break; | |
4372 | } | |
4373 | ||
4374 | /* Canonicalize - [x, max] into - [x, -]. */ | |
4375 | if (high1 && TREE_CODE (high1) == INTEGER_CST) | |
4376 | switch (TREE_CODE (TREE_TYPE (high1))) | |
4377 | { | |
4378 | case ENUMERAL_TYPE: | |
4379 | if (TYPE_PRECISION (TREE_TYPE (high1)) | |
4380 | != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1)))) | |
4381 | break; | |
4382 | /* FALLTHROUGH */ | |
4383 | case INTEGER_TYPE: | |
3b3a787a | 4384 | if (tree_int_cst_equal (high1, |
4385 | TYPE_MAX_VALUE (TREE_TYPE (high1)))) | |
4386 | high1 = 0; | |
4387 | break; | |
4388 | case POINTER_TYPE: | |
4389 | if (TYPE_UNSIGNED (TREE_TYPE (high1)) | |
4390 | && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE, | |
4391 | high1, 1, | |
4392 | integer_one_node, 1))) | |
4393 | high1 = 0; | |
4394 | break; | |
4395 | default: | |
4396 | break; | |
4397 | } | |
4398 | ||
4399 | /* The ranges might be also adjacent between the maximum and | |
4400 | minimum values of the given type. For | |
4401 | - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y | |
4402 | return + [x + 1, y - 1]. */ | |
4403 | if (low0 == 0 && high1 == 0) | |
4404 | { | |
1557b0a0 | 4405 | low = range_successor (high0); |
4406 | high = range_predecessor (low1); | |
3b3a787a | 4407 | if (low == 0 || high == 0) |
4408 | return 0; | |
4409 | ||
4410 | in_p = 1; | |
4411 | } | |
4412 | else | |
4413 | return 0; | |
4414 | } | |
12ec0a8a | 4415 | } |
4416 | else if (subset) | |
4417 | in_p = 0, low = low0, high = high0; | |
4418 | else | |
4419 | in_p = 0, low = low0, high = high1; | |
4420 | } | |
b29eae68 | 4421 | |
12ec0a8a | 4422 | *pin_p = in_p, *plow = low, *phigh = high; |
4423 | return 1; | |
4424 | } | |
0023616d | 4425 | \f |
4426 | ||
4427 | /* Subroutine of fold, looking inside expressions of the form | |
9b1fa4a0 | 4428 | A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands |
4429 | of the COND_EXPR. This function is being used also to optimize | |
4430 | A op B ? C : A, by reversing the comparison first. | |
0023616d | 4431 | |
4432 | Return a folded expression whose code is not a COND_EXPR | |
4433 | anymore, or NULL_TREE if no folding opportunity is found. */ | |
4434 | ||
4435 | static tree | |
9b1fa4a0 | 4436 | fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) |
0023616d | 4437 | { |
4438 | enum tree_code comp_code = TREE_CODE (arg0); | |
4439 | tree arg00 = TREE_OPERAND (arg0, 0); | |
4440 | tree arg01 = TREE_OPERAND (arg0, 1); | |
9b1fa4a0 | 4441 | tree arg1_type = TREE_TYPE (arg1); |
0023616d | 4442 | tree tem; |
9b1fa4a0 | 4443 | |
4444 | STRIP_NOPS (arg1); | |
0023616d | 4445 | STRIP_NOPS (arg2); |
4446 | ||
4447 | /* If we have A op 0 ? A : -A, consider applying the following | |
4448 | transformations: | |
4449 | ||
4450 | A == 0? A : -A same as -A | |
4451 | A != 0? A : -A same as A | |
4452 | A >= 0? A : -A same as abs (A) | |
4453 | A > 0? A : -A same as abs (A) | |
4454 | A <= 0? A : -A same as -abs (A) | |
4455 | A < 0? A : -A same as -abs (A) | |
4456 | ||
4457 | None of these transformations work for modes with signed | |
4458 | zeros. If A is +/-0, the first two transformations will | |
4459 | change the sign of the result (from +0 to -0, or vice | |
4460 | versa). The last four will fix the sign of the result, | |
4461 | even though the original expressions could be positive or | |
4462 | negative, depending on the sign of A. | |
4463 | ||
4464 | Note that all these transformations are correct if A is | |
4465 | NaN, since the two alternatives (A and -A) are also NaNs. */ | |
4466 | if ((FLOAT_TYPE_P (TREE_TYPE (arg01)) | |
4467 | ? real_zerop (arg01) | |
4468 | : integer_zerop (arg01)) | |
38f916c2 | 4469 | && ((TREE_CODE (arg2) == NEGATE_EXPR |
4470 | && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0)) | |
4471 | /* In the case that A is of the form X-Y, '-A' (arg2) may | |
4472 | have already been folded to Y-X, check for that. */ | |
4473 | || (TREE_CODE (arg1) == MINUS_EXPR | |
4474 | && TREE_CODE (arg2) == MINUS_EXPR | |
4475 | && operand_equal_p (TREE_OPERAND (arg1, 0), | |
4476 | TREE_OPERAND (arg2, 1), 0) | |
4477 | && operand_equal_p (TREE_OPERAND (arg1, 1), | |
4478 | TREE_OPERAND (arg2, 0), 0)))) | |
0023616d | 4479 | switch (comp_code) |
4480 | { | |
4481 | case EQ_EXPR: | |
fe9b47eb | 4482 | case UNEQ_EXPR: |
9b1fa4a0 | 4483 | tem = fold_convert (arg1_type, arg1); |
4484 | return pedantic_non_lvalue (fold_convert (type, negate_expr (tem))); | |
0023616d | 4485 | case NE_EXPR: |
fe9b47eb | 4486 | case LTGT_EXPR: |
9b1fa4a0 | 4487 | return pedantic_non_lvalue (fold_convert (type, arg1)); |
fe9b47eb | 4488 | case UNGE_EXPR: |
4489 | case UNGT_EXPR: | |
4490 | if (flag_trapping_math) | |
4491 | break; | |
4492 | /* Fall through. */ | |
0023616d | 4493 | case GE_EXPR: |
4494 | case GT_EXPR: | |
9b1fa4a0 | 4495 | if (TYPE_UNSIGNED (TREE_TYPE (arg1))) |
4496 | arg1 = fold_convert (lang_hooks.types.signed_type | |
4497 | (TREE_TYPE (arg1)), arg1); | |
7ab7fd4f | 4498 | tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1); |
0023616d | 4499 | return pedantic_non_lvalue (fold_convert (type, tem)); |
fe9b47eb | 4500 | case UNLE_EXPR: |
4501 | case UNLT_EXPR: | |
4502 | if (flag_trapping_math) | |
4503 | break; | |
0023616d | 4504 | case LE_EXPR: |
4505 | case LT_EXPR: | |
9b1fa4a0 | 4506 | if (TYPE_UNSIGNED (TREE_TYPE (arg1))) |
4507 | arg1 = fold_convert (lang_hooks.types.signed_type | |
4508 | (TREE_TYPE (arg1)), arg1); | |
7ab7fd4f | 4509 | tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1); |
0023616d | 4510 | return negate_expr (fold_convert (type, tem)); |
4511 | default: | |
ce45a448 | 4512 | gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); |
fe9b47eb | 4513 | break; |
0023616d | 4514 | } |
4515 | ||
4516 | /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise | |
4517 | A == 0 ? A : 0 is always 0 unless A is -0. Note that | |
4518 | both transformations are correct when A is NaN: A != 0 | |
4519 | is then true, and A == 0 is false. */ | |
4520 | ||
4521 | if (integer_zerop (arg01) && integer_zerop (arg2)) | |
4522 | { | |
4523 | if (comp_code == NE_EXPR) | |
9b1fa4a0 | 4524 | return pedantic_non_lvalue (fold_convert (type, arg1)); |
0023616d | 4525 | else if (comp_code == EQ_EXPR) |
3c6185f1 | 4526 | return build_int_cst (type, 0); |
0023616d | 4527 | } |
4528 | ||
4529 | /* Try some transformations of A op B ? A : B. | |
4530 | ||
4531 | A == B? A : B same as B | |
4532 | A != B? A : B same as A | |
4533 | A >= B? A : B same as max (A, B) | |
4534 | A > B? A : B same as max (B, A) | |
4535 | A <= B? A : B same as min (A, B) | |
4536 | A < B? A : B same as min (B, A) | |
4537 | ||
4538 | As above, these transformations don't work in the presence | |
4539 | of signed zeros. For example, if A and B are zeros of | |
4540 | opposite sign, the first two transformations will change | |
4541 | the sign of the result. In the last four, the original | |
4542 | expressions give different results for (A=+0, B=-0) and | |
4543 | (A=-0, B=+0), but the transformed expressions do not. | |
4544 | ||
4545 | The first two transformations are correct if either A or B | |
4546 | is a NaN. In the first transformation, the condition will | |
4547 | be false, and B will indeed be chosen. In the case of the | |
4548 | second transformation, the condition A != B will be true, | |
4549 | and A will be chosen. | |
4550 | ||
4551 | The conversions to max() and min() are not correct if B is | |
4552 | a number and A is not. The conditions in the original | |
4553 | expressions will be false, so all four give B. The min() | |
4554 | and max() versions would give a NaN instead. */ | |
98fc7ffa | 4555 | if (operand_equal_for_comparison_p (arg01, arg2, arg00) |
4556 | /* Avoid these transformations if the COND_EXPR may be used | |
4557 | as an lvalue in the C++ front-end. PR c++/19199. */ | |
4558 | && (in_gimple_form | |
d0911b8e | 4559 | || (strcmp (lang_hooks.name, "GNU C++") != 0 |
4560 | && strcmp (lang_hooks.name, "GNU Objective-C++") != 0) | |
98fc7ffa | 4561 | || ! maybe_lvalue_p (arg1) |
4562 | || ! maybe_lvalue_p (arg2))) | |
0023616d | 4563 | { |
4564 | tree comp_op0 = arg00; | |
4565 | tree comp_op1 = arg01; | |
4566 | tree comp_type = TREE_TYPE (comp_op0); | |
4567 | ||
4568 | /* Avoid adding NOP_EXPRs in case this is an lvalue. */ | |
4569 | if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type)) | |
4570 | { | |
4571 | comp_type = type; | |
9b1fa4a0 | 4572 | comp_op0 = arg1; |
0023616d | 4573 | comp_op1 = arg2; |
4574 | } | |
4575 | ||
4576 | switch (comp_code) | |
4577 | { | |
4578 | case EQ_EXPR: | |
4579 | return pedantic_non_lvalue (fold_convert (type, arg2)); | |
4580 | case NE_EXPR: | |
9b1fa4a0 | 4581 | return pedantic_non_lvalue (fold_convert (type, arg1)); |
0023616d | 4582 | case LE_EXPR: |
4583 | case LT_EXPR: | |
fe9b47eb | 4584 | case UNLE_EXPR: |
4585 | case UNLT_EXPR: | |
0023616d | 4586 | /* In C++ a ?: expression can be an lvalue, so put the |
4587 | operand which will be used if they are equal first | |
4588 | so that we can convert this back to the | |
4589 | corresponding COND_EXPR. */ | |
9b1fa4a0 | 4590 | if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) |
5c9198bd | 4591 | { |
4592 | comp_op0 = fold_convert (comp_type, comp_op0); | |
4593 | comp_op1 = fold_convert (comp_type, comp_op1); | |
fe9b47eb | 4594 | tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR) |
7ab7fd4f | 4595 | ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1) |
4596 | : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0); | |
5c9198bd | 4597 | return pedantic_non_lvalue (fold_convert (type, tem)); |
4598 | } | |
0023616d | 4599 | break; |
4600 | case GE_EXPR: | |
4601 | case GT_EXPR: | |
fe9b47eb | 4602 | case UNGE_EXPR: |
4603 | case UNGT_EXPR: | |
9b1fa4a0 | 4604 | if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) |
5c9198bd | 4605 | { |
4606 | comp_op0 = fold_convert (comp_type, comp_op0); | |
4607 | comp_op1 = fold_convert (comp_type, comp_op1); | |
fe9b47eb | 4608 | tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR) |
7ab7fd4f | 4609 | ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1) |
4610 | : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0); | |
5c9198bd | 4611 | return pedantic_non_lvalue (fold_convert (type, tem)); |
4612 | } | |
0023616d | 4613 | break; |
fe9b47eb | 4614 | case UNEQ_EXPR: |
4615 | if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) | |
4616 | return pedantic_non_lvalue (fold_convert (type, arg2)); | |
4617 | break; | |
4618 | case LTGT_EXPR: | |
4619 | if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) | |
4620 | return pedantic_non_lvalue (fold_convert (type, arg1)); | |
4621 | break; | |
0023616d | 4622 | default: |
ce45a448 | 4623 | gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); |
fe9b47eb | 4624 | break; |
0023616d | 4625 | } |
4626 | } | |
4627 | ||
4628 | /* If this is A op C1 ? A : C2 with C1 and C2 constant integers, | |
4629 | we might still be able to simplify this. For example, | |
4630 | if C1 is one less or one more than C2, this might have started | |
4631 | out as a MIN or MAX and been transformed by this function. | |
4632 | Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */ | |
4633 | ||
4634 | if (INTEGRAL_TYPE_P (type) | |
4635 | && TREE_CODE (arg01) == INTEGER_CST | |
4636 | && TREE_CODE (arg2) == INTEGER_CST) | |
4637 | switch (comp_code) | |
4638 | { | |
4639 | case EQ_EXPR: | |
4640 | /* We can replace A with C1 in this case. */ | |
9b1fa4a0 | 4641 | arg1 = fold_convert (type, arg01); |
7ab7fd4f | 4642 | return fold_build3 (COND_EXPR, type, arg0, arg1, arg2); |
0023616d | 4643 | |
4644 | case LT_EXPR: | |
4645 | /* If C1 is C2 + 1, this is min(A, C2). */ | |
4646 | if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), | |
4647 | OEP_ONLY_CONST) | |
4648 | && operand_equal_p (arg01, | |
4649 | const_binop (PLUS_EXPR, arg2, | |
4650 | integer_one_node, 0), | |
4651 | OEP_ONLY_CONST)) | |
7ab7fd4f | 4652 | return pedantic_non_lvalue (fold_build2 (MIN_EXPR, |
4653 | type, arg1, arg2)); | |
0023616d | 4654 | break; |
4655 | ||
4656 | case LE_EXPR: | |
4657 | /* If C1 is C2 - 1, this is min(A, C2). */ | |
4658 | if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), | |
4659 | OEP_ONLY_CONST) | |
4660 | && operand_equal_p (arg01, | |
4661 | const_binop (MINUS_EXPR, arg2, | |
4662 | integer_one_node, 0), | |
4663 | OEP_ONLY_CONST)) | |
7ab7fd4f | 4664 | return pedantic_non_lvalue (fold_build2 (MIN_EXPR, |
4665 | type, arg1, arg2)); | |
0023616d | 4666 | break; |
4667 | ||
4668 | case GT_EXPR: | |
4669 | /* If C1 is C2 - 1, this is max(A, C2). */ | |
4670 | if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), | |
4671 | OEP_ONLY_CONST) | |
4672 | && operand_equal_p (arg01, | |
4673 | const_binop (MINUS_EXPR, arg2, | |
4674 | integer_one_node, 0), | |
4675 | OEP_ONLY_CONST)) | |
7ab7fd4f | 4676 | return pedantic_non_lvalue (fold_build2 (MAX_EXPR, |
4677 | type, arg1, arg2)); | |
0023616d | 4678 | break; |
4679 | ||
4680 | case GE_EXPR: | |
4681 | /* If C1 is C2 + 1, this is max(A, C2). */ | |
4682 | if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), | |
4683 | OEP_ONLY_CONST) | |
4684 | && operand_equal_p (arg01, | |
4685 | const_binop (PLUS_EXPR, arg2, | |
4686 | integer_one_node, 0), | |
4687 | OEP_ONLY_CONST)) | |
7ab7fd4f | 4688 | return pedantic_non_lvalue (fold_build2 (MAX_EXPR, |
4689 | type, arg1, arg2)); | |
0023616d | 4690 | break; |
4691 | case NE_EXPR: | |
4692 | break; | |
4693 | default: | |
fdada98f | 4694 | gcc_unreachable (); |
0023616d | 4695 | } |
4696 | ||
4697 | return NULL_TREE; | |
4698 | } | |
4699 | ||
4700 | ||
12ec0a8a | 4701 | \f |
17529f98 | 4702 | #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT |
4703 | #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2) | |
cf451ad8 | 4704 | #endif |
4705 | ||
12ec0a8a | 4706 | /* EXP is some logical combination of boolean tests. See if we can |
4707 | merge it into some range test. Return the new tree if so. */ | |
6f725368 | 4708 | |
12ec0a8a | 4709 | static tree |
2c17ebb2 | 4710 | fold_range_test (enum tree_code code, tree type, tree op0, tree op1) |
12ec0a8a | 4711 | { |
2c17ebb2 | 4712 | int or_op = (code == TRUTH_ORIF_EXPR |
4713 | || code == TRUTH_OR_EXPR); | |
12ec0a8a | 4714 | int in0_p, in1_p, in_p; |
4715 | tree low0, low1, low, high0, high1, high; | |
2c17ebb2 | 4716 | tree lhs = make_range (op0, &in0_p, &low0, &high0); |
4717 | tree rhs = make_range (op1, &in1_p, &low1, &high1); | |
12ec0a8a | 4718 | tree tem; |
6f725368 | 4719 | |
12ec0a8a | 4720 | /* If this is an OR operation, invert both sides; we will invert |
4721 | again at the end. */ | |
4722 | if (or_op) | |
4723 | in0_p = ! in0_p, in1_p = ! in1_p; | |
4724 | ||
4725 | /* If both expressions are the same, if we can merge the ranges, and we | |
f83854c8 | 4726 | can build the range test, return it or it inverted. If one of the |
4727 | ranges is always true or always false, consider it to be the same | |
4728 | expression as the other. */ | |
4729 | if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0)) | |
12ec0a8a | 4730 | && merge_ranges (&in_p, &low, &high, in0_p, low0, high0, |
4731 | in1_p, low1, high1) | |
2c17ebb2 | 4732 | && 0 != (tem = (build_range_check (type, |
f83854c8 | 4733 | lhs != 0 ? lhs |
4734 | : rhs != 0 ? rhs : integer_zero_node, | |
12ec0a8a | 4735 | in_p, low, high)))) |
4736 | return or_op ? invert_truthvalue (tem) : tem; | |
4737 | ||
4738 | /* On machines where the branch cost is expensive, if this is a | |
4739 | short-circuited branch and the underlying object on both sides | |
4740 | is the same, make a non-short-circuit operation. */ | |
17529f98 | 4741 | else if (LOGICAL_OP_NON_SHORT_CIRCUIT |
1fdbc76b | 4742 | && lhs != 0 && rhs != 0 |
2c17ebb2 | 4743 | && (code == TRUTH_ANDIF_EXPR |
4744 | || code == TRUTH_ORIF_EXPR) | |
12ec0a8a | 4745 | && operand_equal_p (lhs, rhs, 0)) |
6f725368 | 4746 | { |
90a73592 | 4747 | /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR |
9e042f31 | 4748 | unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in |
4749 | which cases we can't do this. */ | |
12ec0a8a | 4750 | if (simple_operand_p (lhs)) |
2c17ebb2 | 4751 | return build2 (code == TRUTH_ANDIF_EXPR |
fd96eeef | 4752 | ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, |
2c17ebb2 | 4753 | type, op0, op1); |
90a73592 | 4754 | |
fa8b888f | 4755 | else if (lang_hooks.decls.global_bindings_p () == 0 |
ce3fb06e | 4756 | && ! CONTAINS_PLACEHOLDER_P (lhs)) |
12ec0a8a | 4757 | { |
4758 | tree common = save_expr (lhs); | |
4759 | ||
2c17ebb2 | 4760 | if (0 != (lhs = build_range_check (type, common, |
12ec0a8a | 4761 | or_op ? ! in0_p : in0_p, |
4762 | low0, high0)) | |
2c17ebb2 | 4763 | && (0 != (rhs = build_range_check (type, common, |
12ec0a8a | 4764 | or_op ? ! in1_p : in1_p, |
4765 | low1, high1)))) | |
2c17ebb2 | 4766 | return build2 (code == TRUTH_ANDIF_EXPR |
fd96eeef | 4767 | ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, |
2c17ebb2 | 4768 | type, lhs, rhs); |
12ec0a8a | 4769 | } |
6f725368 | 4770 | } |
831e3af4 | 4771 | |
831e3af4 | 4772 | return 0; |
6f725368 | 4773 | } |
4774 | \f | |
94f29e88 | 4775 | /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P |
b2dcfbf7 | 4776 | bit value. Arrange things so the extra bits will be set to zero if and |
2a6329ae | 4777 | only if C is signed-extended to its full width. If MASK is nonzero, |
4778 | it is an INTEGER_CST that should be AND'ed with the extra bits. */ | |
94f29e88 | 4779 | |
4780 | static tree | |
de1b648b | 4781 | unextend (tree c, int p, int unsignedp, tree mask) |
94f29e88 | 4782 | { |
4783 | tree type = TREE_TYPE (c); | |
4784 | int modesize = GET_MODE_BITSIZE (TYPE_MODE (type)); | |
4785 | tree temp; | |
4786 | ||
4787 | if (p == modesize || unsignedp) | |
4788 | return c; | |
4789 | ||
94f29e88 | 4790 | /* We work by getting just the sign bit into the low-order bit, then |
c3418f42 | 4791 | into the high-order bit, then sign-extend. We then XOR that value |
94f29e88 | 4792 | with C. */ |
4793 | temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0); | |
4794 | temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0); | |
dd5f6dae | 4795 | |
4796 | /* We must use a signed type in order to get an arithmetic right shift. | |
4797 | However, we must also avoid introducing accidental overflows, so that | |
cc049fa3 | 4798 | a subsequent call to integer_zerop will work. Hence we must |
dd5f6dae | 4799 | do the type conversion here. At this point, the constant is either |
4800 | zero or one, and the conversion to a signed type can never overflow. | |
4801 | We could get an overflow if this conversion is done anywhere else. */ | |
78a8ed03 | 4802 | if (TYPE_UNSIGNED (type)) |
fa8b888f | 4803 | temp = fold_convert (lang_hooks.types.signed_type (type), temp); |
dd5f6dae | 4804 | |
94f29e88 | 4805 | temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0); |
4806 | temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0); | |
2a6329ae | 4807 | if (mask != 0) |
b30e3dbc | 4808 | temp = const_binop (BIT_AND_EXPR, temp, |
4809 | fold_convert (TREE_TYPE (c), mask), 0); | |
dd5f6dae | 4810 | /* If necessary, convert the type back to match the type of C. */ |
78a8ed03 | 4811 | if (TYPE_UNSIGNED (type)) |
b30e3dbc | 4812 | temp = fold_convert (type, temp); |
2a6329ae | 4813 | |
b30e3dbc | 4814 | return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0)); |
94f29e88 | 4815 | } |
4816 | \f | |
79109eec | 4817 | /* Find ways of folding logical expressions of LHS and RHS: |
4818 | Try to merge two comparisons to the same innermost item. | |
4819 | Look for range tests like "ch >= '0' && ch <= '9'". | |
4820 | Look for combinations of simple terms on machines with expensive branches | |
4821 | and evaluate the RHS unconditionally. | |
2bc77e10 | 4822 | |
4823 | For example, if we have p->a == 2 && p->b == 4 and we can make an | |
4824 | object large enough to span both A and B, we can do this with a comparison | |
4825 | against the object ANDed with the a mask. | |
4826 | ||
4827 | If we have p->a == q->a && p->b == q->b, we may be able to use bit masking | |
4828 | operations to do this with one comparison. | |
4829 | ||
4830 | We check for both normal comparisons and the BIT_AND_EXPRs made this by | |
4831 | function and the one above. | |
4832 | ||
4833 | CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR, | |
4834 | TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR. | |
4835 | ||
4836 | TRUTH_TYPE is the type of the logical operand and LHS and RHS are its | |
4837 | two operands. | |
4838 | ||
4839 | We return the simplified tree or 0 if no optimization is possible. */ | |
4840 | ||
4841 | static tree | |
de1b648b | 4842 | fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) |
2bc77e10 | 4843 | { |
62af9abe | 4844 | /* If this is the "or" of two comparisons, we can do something if |
2bc77e10 | 4845 | the comparisons are NE_EXPR. If this is the "and", we can do something |
cc049fa3 | 4846 | if the comparisons are EQ_EXPR. I.e., |
de1b648b | 4847 | (a->b == 2 && a->c == 4) can become (a->new == NEW). |
2bc77e10 | 4848 | |
4849 | WANTED_CODE is this operation code. For single bit fields, we can | |
4850 | convert EQ_EXPR to NE_EXPR so we need not reject the "wrong" | |
4851 | comparison for one-bit fields. */ | |
4852 | ||
79109eec | 4853 | enum tree_code wanted_code; |
2bc77e10 | 4854 | enum tree_code lcode, rcode; |
79109eec | 4855 | tree ll_arg, lr_arg, rl_arg, rr_arg; |
2bc77e10 | 4856 | tree ll_inner, lr_inner, rl_inner, rr_inner; |
02e7a332 | 4857 | HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos; |
4858 | HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos; | |
4859 | HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos; | |
4860 | HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos; | |
2bc77e10 | 4861 | int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp; |
4862 | enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode; | |
4863 | enum machine_mode lnmode, rnmode; | |
4864 | tree ll_mask, lr_mask, rl_mask, rr_mask; | |
2a6329ae | 4865 | tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask; |
79109eec | 4866 | tree l_const, r_const; |
ffba564c | 4867 | tree lntype, rntype, result; |
2bc77e10 | 4868 | int first_bit, end_bit; |
79109eec | 4869 | int volatilep; |
40c3c1b3 | 4870 | tree orig_lhs = lhs, orig_rhs = rhs; |
4871 | enum tree_code orig_code = code; | |
2bc77e10 | 4872 | |
12ec0a8a | 4873 | /* Start by getting the comparison codes. Fail if anything is volatile. |
4874 | If one operand is a BIT_AND_EXPR with the constant one, treat it as if | |
4875 | it were surrounded with a NE_EXPR. */ | |
2bc77e10 | 4876 | |
12ec0a8a | 4877 | if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs)) |
79109eec | 4878 | return 0; |
4879 | ||
2bc77e10 | 4880 | lcode = TREE_CODE (lhs); |
4881 | rcode = TREE_CODE (rhs); | |
6f725368 | 4882 | |
b5ab1edd | 4883 | if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1))) |
fd96eeef | 4884 | { |
5c9198bd | 4885 | lhs = build2 (NE_EXPR, truth_type, lhs, |
3c6185f1 | 4886 | build_int_cst (TREE_TYPE (lhs), 0)); |
fd96eeef | 4887 | lcode = NE_EXPR; |
4888 | } | |
b5ab1edd | 4889 | |
4890 | if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1))) | |
fd96eeef | 4891 | { |
5c9198bd | 4892 | rhs = build2 (NE_EXPR, truth_type, rhs, |
3c6185f1 | 4893 | build_int_cst (TREE_TYPE (rhs), 0)); |
fd96eeef | 4894 | rcode = NE_EXPR; |
4895 | } | |
b5ab1edd | 4896 | |
ce45a448 | 4897 | if (TREE_CODE_CLASS (lcode) != tcc_comparison |
4898 | || TREE_CODE_CLASS (rcode) != tcc_comparison) | |
6f725368 | 4899 | return 0; |
4900 | ||
79109eec | 4901 | ll_arg = TREE_OPERAND (lhs, 0); |
4902 | lr_arg = TREE_OPERAND (lhs, 1); | |
4903 | rl_arg = TREE_OPERAND (rhs, 0); | |
4904 | rr_arg = TREE_OPERAND (rhs, 1); | |
cc049fa3 | 4905 | |
7835f163 | 4906 | /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */ |
4907 | if (simple_operand_p (ll_arg) | |
318a728f | 4908 | && simple_operand_p (lr_arg)) |
7835f163 | 4909 | { |
318a728f | 4910 | tree result; |
7835f163 | 4911 | if (operand_equal_p (ll_arg, rl_arg, 0) |
4912 | && operand_equal_p (lr_arg, rr_arg, 0)) | |
318a728f | 4913 | { |
4914 | result = combine_comparisons (code, lcode, rcode, | |
4915 | truth_type, ll_arg, lr_arg); | |
4916 | if (result) | |
4917 | return result; | |
4918 | } | |
7835f163 | 4919 | else if (operand_equal_p (ll_arg, rr_arg, 0) |
4920 | && operand_equal_p (lr_arg, rl_arg, 0)) | |
318a728f | 4921 | { |
4922 | result = combine_comparisons (code, lcode, | |
4923 | swap_tree_comparison (rcode), | |
4924 | truth_type, ll_arg, lr_arg); | |
4925 | if (result) | |
4926 | return result; | |
4927 | } | |
7835f163 | 4928 | } |
4929 | ||
318a728f | 4930 | code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR) |
4931 | ? TRUTH_AND_EXPR : TRUTH_OR_EXPR); | |
4932 | ||
7735dddb | 4933 | /* If the RHS can be evaluated unconditionally and its operands are |
79109eec | 4934 | simple, it wins to evaluate the RHS unconditionally on machines |
4935 | with expensive branches. In this case, this isn't a comparison | |
35212e61 | 4936 | that can be merged. Avoid doing this if the RHS is a floating-point |
4937 | comparison since those can trap. */ | |
79109eec | 4938 | |
4939 | if (BRANCH_COST >= 2 | |
35212e61 | 4940 | && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg)) |
79109eec | 4941 | && simple_operand_p (rl_arg) |
7735dddb | 4942 | && simple_operand_p (rr_arg)) |
0425437e | 4943 | { |
4944 | /* Convert (a != 0) || (b != 0) into (a | b) != 0. */ | |
4945 | if (code == TRUTH_OR_EXPR | |
4946 | && lcode == NE_EXPR && integer_zerop (lr_arg) | |
4947 | && rcode == NE_EXPR && integer_zerop (rr_arg) | |
4948 | && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)) | |
fd96eeef | 4949 | return build2 (NE_EXPR, truth_type, |
4950 | build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), | |
4951 | ll_arg, rl_arg), | |
3c6185f1 | 4952 | build_int_cst (TREE_TYPE (ll_arg), 0)); |
0425437e | 4953 | |
4954 | /* Convert (a == 0) && (b == 0) into (a | b) == 0. */ | |
4955 | if (code == TRUTH_AND_EXPR | |
4956 | && lcode == EQ_EXPR && integer_zerop (lr_arg) | |
4957 | && rcode == EQ_EXPR && integer_zerop (rr_arg) | |
4958 | && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)) | |
fd96eeef | 4959 | return build2 (EQ_EXPR, truth_type, |
4960 | build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), | |
4961 | ll_arg, rl_arg), | |
3c6185f1 | 4962 | build_int_cst (TREE_TYPE (ll_arg), 0)); |
0425437e | 4963 | |
17529f98 | 4964 | if (LOGICAL_OP_NON_SHORT_CIRCUIT) |
40c3c1b3 | 4965 | { |
4966 | if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs) | |
4967 | return build2 (code, truth_type, lhs, rhs); | |
4968 | return NULL_TREE; | |
4969 | } | |
0425437e | 4970 | } |
79109eec | 4971 | |
6f725368 | 4972 | /* See if the comparisons can be merged. Then get all the parameters for |
4973 | each side. */ | |
4974 | ||
2bc77e10 | 4975 | if ((lcode != EQ_EXPR && lcode != NE_EXPR) |
6f725368 | 4976 | || (rcode != EQ_EXPR && rcode != NE_EXPR)) |
2bc77e10 | 4977 | return 0; |
4978 | ||
79109eec | 4979 | volatilep = 0; |
4980 | ll_inner = decode_field_reference (ll_arg, | |
2bc77e10 | 4981 | &ll_bitsize, &ll_bitpos, &ll_mode, |
2a6329ae | 4982 | &ll_unsignedp, &volatilep, &ll_mask, |
4983 | &ll_and_mask); | |
79109eec | 4984 | lr_inner = decode_field_reference (lr_arg, |
2bc77e10 | 4985 | &lr_bitsize, &lr_bitpos, &lr_mode, |
2a6329ae | 4986 | &lr_unsignedp, &volatilep, &lr_mask, |
4987 | &lr_and_mask); | |
79109eec | 4988 | rl_inner = decode_field_reference (rl_arg, |
2bc77e10 | 4989 | &rl_bitsize, &rl_bitpos, &rl_mode, |
2a6329ae | 4990 | &rl_unsignedp, &volatilep, &rl_mask, |
4991 | &rl_and_mask); | |
79109eec | 4992 | rr_inner = decode_field_reference (rr_arg, |
2bc77e10 | 4993 | &rr_bitsize, &rr_bitpos, &rr_mode, |
2a6329ae | 4994 | &rr_unsignedp, &volatilep, &rr_mask, |
4995 | &rr_and_mask); | |
2bc77e10 | 4996 | |
4997 | /* It must be true that the inner operation on the lhs of each | |
4998 | comparison must be the same if we are to be able to do anything. | |
4999 | Then see if we have constants. If not, the same must be true for | |
5000 | the rhs's. */ | |
5001 | if (volatilep || ll_inner == 0 || rl_inner == 0 | |
5002 | || ! operand_equal_p (ll_inner, rl_inner, 0)) | |
5003 | return 0; | |
5004 | ||
79109eec | 5005 | if (TREE_CODE (lr_arg) == INTEGER_CST |
5006 | && TREE_CODE (rr_arg) == INTEGER_CST) | |
5007 | l_const = lr_arg, r_const = rr_arg; | |
2bc77e10 | 5008 | else if (lr_inner == 0 || rr_inner == 0 |
5009 | || ! operand_equal_p (lr_inner, rr_inner, 0)) | |
5010 | return 0; | |
79109eec | 5011 | else |
5012 | l_const = r_const = 0; | |
2bc77e10 | 5013 | |
5014 | /* If either comparison code is not correct for our logical operation, | |
5015 | fail. However, we can convert a one-bit comparison against zero into | |
5016 | the opposite comparison against that bit being set in the field. */ | |
79109eec | 5017 | |
76e4a18b | 5018 | wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR); |
2bc77e10 | 5019 | if (lcode != wanted_code) |
5020 | { | |
5021 | if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask)) | |
c6107ab0 | 5022 | { |
28bb328d | 5023 | /* Make the left operand unsigned, since we are only interested |
5024 | in the value of one bit. Otherwise we are doing the wrong | |
5025 | thing below. */ | |
5026 | ll_unsignedp = 1; | |
68ae709d | 5027 | l_const = ll_mask; |
c6107ab0 | 5028 | } |
2bc77e10 | 5029 | else |
5030 | return 0; | |
5031 | } | |
5032 | ||
68ae709d | 5033 | /* This is analogous to the code for l_const above. */ |
2bc77e10 | 5034 | if (rcode != wanted_code) |
5035 | { | |
5036 | if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask)) | |
c6107ab0 | 5037 | { |
28bb328d | 5038 | rl_unsignedp = 1; |
68ae709d | 5039 | r_const = rl_mask; |
c6107ab0 | 5040 | } |
2bc77e10 | 5041 | else |
5042 | return 0; | |
5043 | } | |
5044 | ||
d50b22af | 5045 | /* After this point all optimizations will generate bit-field |
5046 | references, which we might not want. */ | |
fa8b888f | 5047 | if (! lang_hooks.can_use_bit_fields_p ()) |
d50b22af | 5048 | return 0; |
5049 | ||
2bc77e10 | 5050 | /* See if we can find a mode that contains both fields being compared on |
5051 | the left. If we can't, fail. Otherwise, update all constants and masks | |
5052 | to be relative to a field of that size. */ | |
5053 | first_bit = MIN (ll_bitpos, rl_bitpos); | |
5054 | end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize); | |
5055 | lnmode = get_best_mode (end_bit - first_bit, first_bit, | |
5056 | TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode, | |
5057 | volatilep); | |
5058 | if (lnmode == VOIDmode) | |
5059 | return 0; | |
5060 | ||
5061 | lnbitsize = GET_MODE_BITSIZE (lnmode); | |
5062 | lnbitpos = first_bit & ~ (lnbitsize - 1); | |
fa8b888f | 5063 | lntype = lang_hooks.types.type_for_size (lnbitsize, 1); |
2bc77e10 | 5064 | xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos; |
5065 | ||
51356f86 | 5066 | if (BYTES_BIG_ENDIAN) |
5067 | { | |
5068 | xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize; | |
5069 | xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize; | |
5070 | } | |
2bc77e10 | 5071 | |
b30e3dbc | 5072 | ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask), |
5485823f | 5073 | size_int (xll_bitpos), 0); |
b30e3dbc | 5074 | rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask), |
5485823f | 5075 | size_int (xrl_bitpos), 0); |
2bc77e10 | 5076 | |
2bc77e10 | 5077 | if (l_const) |
5078 | { | |
b30e3dbc | 5079 | l_const = fold_convert (lntype, l_const); |
cc049fa3 | 5080 | l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask); |
94f29e88 | 5081 | l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0); |
5082 | if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const, | |
7ab7fd4f | 5083 | fold_build1 (BIT_NOT_EXPR, |
5084 | lntype, ll_mask), | |
94f29e88 | 5085 | 0))) |
5086 | { | |
c3ceba8e | 5087 | warning (0, "comparison is always %d", wanted_code == NE_EXPR); |
cc049fa3 | 5088 | |
20783f07 | 5089 | return constant_boolean_node (wanted_code == NE_EXPR, truth_type); |
94f29e88 | 5090 | } |
2bc77e10 | 5091 | } |
5092 | if (r_const) | |
5093 | { | |
b30e3dbc | 5094 | r_const = fold_convert (lntype, r_const); |
2a6329ae | 5095 | r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask); |
94f29e88 | 5096 | r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0); |
5097 | if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const, | |
7ab7fd4f | 5098 | fold_build1 (BIT_NOT_EXPR, |
5099 | lntype, rl_mask), | |
94f29e88 | 5100 | 0))) |
5101 | { | |
c3ceba8e | 5102 | warning (0, "comparison is always %d", wanted_code == NE_EXPR); |
be2828ce | 5103 | |
20783f07 | 5104 | return constant_boolean_node (wanted_code == NE_EXPR, truth_type); |
94f29e88 | 5105 | } |
2bc77e10 | 5106 | } |
5107 | ||
5108 | /* If the right sides are not constant, do the same for it. Also, | |
5109 | disallow this optimization if a size or signedness mismatch occurs | |
5110 | between the left and right sides. */ | |
5111 | if (l_const == 0) | |
5112 | { | |
5113 | if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize | |
15e4fe21 | 5114 | || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp |
5115 | /* Make sure the two fields on the right | |
5116 | correspond to the left without being swapped. */ | |
5117 | || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos) | |
2bc77e10 | 5118 | return 0; |
5119 | ||
5120 | first_bit = MIN (lr_bitpos, rr_bitpos); | |
5121 | end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize); | |
5122 | rnmode = get_best_mode (end_bit - first_bit, first_bit, | |
5123 | TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode, | |
5124 | volatilep); | |
5125 | if (rnmode == VOIDmode) | |
5126 | return 0; | |
5127 | ||
5128 | rnbitsize = GET_MODE_BITSIZE (rnmode); | |
5129 | rnbitpos = first_bit & ~ (rnbitsize - 1); | |
fa8b888f | 5130 | rntype = lang_hooks.types.type_for_size (rnbitsize, 1); |
2bc77e10 | 5131 | xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos; |
5132 | ||
51356f86 | 5133 | if (BYTES_BIG_ENDIAN) |
5134 | { | |
5135 | xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize; | |
5136 | xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize; | |
5137 | } | |
2bc77e10 | 5138 | |
b30e3dbc | 5139 | lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask), |
5485823f | 5140 | size_int (xlr_bitpos), 0); |
b30e3dbc | 5141 | rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask), |
5485823f | 5142 | size_int (xrr_bitpos), 0); |
2bc77e10 | 5143 | |
5144 | /* Make a mask that corresponds to both fields being compared. | |
00ee0921 | 5145 | Do this for both items being compared. If the operands are the |
5146 | same size and the bits being compared are in the same position | |
5147 | then we can do this by masking both and comparing the masked | |
5148 | results. */ | |
5485823f | 5149 | ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0); |
5150 | lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0); | |
00ee0921 | 5151 | if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos) |
2bc77e10 | 5152 | { |
ffba564c | 5153 | lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos, |
2bc77e10 | 5154 | ll_unsignedp || rl_unsignedp); |
00ee0921 | 5155 | if (! all_ones_mask_p (ll_mask, lnbitsize)) |
fd96eeef | 5156 | lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask); |
00ee0921 | 5157 | |
ffba564c | 5158 | rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos, |
2bc77e10 | 5159 | lr_unsignedp || rr_unsignedp); |
00ee0921 | 5160 | if (! all_ones_mask_p (lr_mask, rnbitsize)) |
fd96eeef | 5161 | rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask); |
00ee0921 | 5162 | |
fd96eeef | 5163 | return build2 (wanted_code, truth_type, lhs, rhs); |
2bc77e10 | 5164 | } |
5165 | ||
5166 | /* There is still another way we can do something: If both pairs of | |
5167 | fields being compared are adjacent, we may be able to make a wider | |
4cf66b97 | 5168 | field containing them both. |
5169 | ||
5170 | Note that we still must mask the lhs/rhs expressions. Furthermore, | |
cc049fa3 | 5171 | the mask must be shifted to account for the shift done by |
4cf66b97 | 5172 | make_bit_field_ref. */ |
2bc77e10 | 5173 | if ((ll_bitsize + ll_bitpos == rl_bitpos |
5174 | && lr_bitsize + lr_bitpos == rr_bitpos) | |
5175 | || (ll_bitpos == rl_bitpos + rl_bitsize | |
5176 | && lr_bitpos == rr_bitpos + rr_bitsize)) | |
4cf66b97 | 5177 | { |
ffba564c | 5178 | tree type; |
5179 | ||
5180 | lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize, | |
4cf66b97 | 5181 | MIN (ll_bitpos, rl_bitpos), ll_unsignedp); |
ffba564c | 5182 | rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize, |
5183 | MIN (lr_bitpos, rr_bitpos), lr_unsignedp); | |
5184 | ||
4cf66b97 | 5185 | ll_mask = const_binop (RSHIFT_EXPR, ll_mask, |
5186 | size_int (MIN (xll_bitpos, xrl_bitpos)), 0); | |
ffba564c | 5187 | lr_mask = const_binop (RSHIFT_EXPR, lr_mask, |
5188 | size_int (MIN (xlr_bitpos, xrr_bitpos)), 0); | |
5189 | ||
5190 | /* Convert to the smaller type before masking out unwanted bits. */ | |
5191 | type = lntype; | |
5192 | if (lntype != rntype) | |
5193 | { | |
5194 | if (lnbitsize > rnbitsize) | |
5195 | { | |
b30e3dbc | 5196 | lhs = fold_convert (rntype, lhs); |
5197 | ll_mask = fold_convert (rntype, ll_mask); | |
ffba564c | 5198 | type = rntype; |
5199 | } | |
5200 | else if (lnbitsize < rnbitsize) | |
5201 | { | |
b30e3dbc | 5202 | rhs = fold_convert (lntype, rhs); |
5203 | lr_mask = fold_convert (lntype, lr_mask); | |
ffba564c | 5204 | type = lntype; |
5205 | } | |
5206 | } | |
5207 | ||
4cf66b97 | 5208 | if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize)) |
fd96eeef | 5209 | lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask); |
4cf66b97 | 5210 | |
4cf66b97 | 5211 | if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize)) |
fd96eeef | 5212 | rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask); |
4cf66b97 | 5213 | |
fd96eeef | 5214 | return build2 (wanted_code, truth_type, lhs, rhs); |
4cf66b97 | 5215 | } |
2bc77e10 | 5216 | |
5217 | return 0; | |
5218 | } | |
5219 | ||
5220 | /* Handle the case of comparisons with constants. If there is something in | |
5221 | common between the masks, those bits of the constants must be the same. | |
5222 | If not, the condition is always false. Test for this to avoid generating | |
5223 | incorrect code below. */ | |
5485823f | 5224 | result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0); |
2bc77e10 | 5225 | if (! integer_zerop (result) |
5485823f | 5226 | && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0), |
5227 | const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1) | |
2bc77e10 | 5228 | { |
5229 | if (wanted_code == NE_EXPR) | |
5230 | { | |
c3ceba8e | 5231 | warning (0, "%<or%> of unmatched not-equal tests is always 1"); |
20783f07 | 5232 | return constant_boolean_node (true, truth_type); |
2bc77e10 | 5233 | } |
5234 | else | |
5235 | { | |
c3ceba8e | 5236 | warning (0, "%<and%> of mutually exclusive equal-tests is always 0"); |
20783f07 | 5237 | return constant_boolean_node (false, truth_type); |
2bc77e10 | 5238 | } |
5239 | } | |
5240 | ||
5241 | /* Construct the expression we will return. First get the component | |
5242 | reference we will make. Unless the mask is all ones the width of | |
5243 | that field, perform the mask operation. Then compare with the | |
5244 | merged constant. */ | |
ffba564c | 5245 | result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos, |
2bc77e10 | 5246 | ll_unsignedp || rl_unsignedp); |
5247 | ||
5485823f | 5248 | ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0); |
2bc77e10 | 5249 | if (! all_ones_mask_p (ll_mask, lnbitsize)) |
fd96eeef | 5250 | result = build2 (BIT_AND_EXPR, lntype, result, ll_mask); |
2bc77e10 | 5251 | |
fd96eeef | 5252 | return build2 (wanted_code, truth_type, result, |
5253 | const_binop (BIT_IOR_EXPR, l_const, r_const, 0)); | |
2bc77e10 | 5254 | } |
5255 | \f | |
cc049fa3 | 5256 | /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a |
155b05dc | 5257 | constant. */ |
5258 | ||
5259 | static tree | |
155acab4 | 5260 | optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1) |
155b05dc | 5261 | { |
155acab4 | 5262 | tree arg0 = op0; |
155b05dc | 5263 | enum tree_code op_code; |
155acab4 | 5264 | tree comp_const = op1; |
155b05dc | 5265 | tree minmax_const; |
5266 | int consts_equal, consts_lt; | |
5267 | tree inner; | |
5268 | ||
5269 | STRIP_SIGN_NOPS (arg0); | |
5270 | ||
5271 | op_code = TREE_CODE (arg0); | |
5272 | minmax_const = TREE_OPERAND (arg0, 1); | |
5273 | consts_equal = tree_int_cst_equal (minmax_const, comp_const); | |
5274 | consts_lt = tree_int_cst_lt (minmax_const, comp_const); | |
5275 | inner = TREE_OPERAND (arg0, 0); | |
5276 | ||
5277 | /* If something does not permit us to optimize, return the original tree. */ | |
5278 | if ((op_code != MIN_EXPR && op_code != MAX_EXPR) | |
5279 | || TREE_CODE (comp_const) != INTEGER_CST | |
5280 | || TREE_CONSTANT_OVERFLOW (comp_const) | |
5281 | || TREE_CODE (minmax_const) != INTEGER_CST | |
5282 | || TREE_CONSTANT_OVERFLOW (minmax_const)) | |
155acab4 | 5283 | return NULL_TREE; |
155b05dc | 5284 | |
5285 | /* Now handle all the various comparison codes. We only handle EQ_EXPR | |
5286 | and GT_EXPR, doing the rest with recursive calls using logical | |
5287 | simplifications. */ | |
155acab4 | 5288 | switch (code) |
155b05dc | 5289 | { |
5290 | case NE_EXPR: case LT_EXPR: case LE_EXPR: | |
155acab4 | 5291 | { |
6758b11c | 5292 | tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false), |
5293 | type, op0, op1); | |
5294 | if (tem) | |
5295 | return invert_truthvalue (tem); | |
5296 | return NULL_TREE; | |
155acab4 | 5297 | } |
155b05dc | 5298 | |
5299 | case GE_EXPR: | |
5300 | return | |
7ab7fd4f | 5301 | fold_build2 (TRUTH_ORIF_EXPR, type, |
5302 | optimize_minmax_comparison | |
5303 | (EQ_EXPR, type, arg0, comp_const), | |
5304 | optimize_minmax_comparison | |
5305 | (GT_EXPR, type, arg0, comp_const)); | |
155b05dc | 5306 | |
5307 | case EQ_EXPR: | |
5308 | if (op_code == MAX_EXPR && consts_equal) | |
5309 | /* MAX (X, 0) == 0 -> X <= 0 */ | |
7ab7fd4f | 5310 | return fold_build2 (LE_EXPR, type, inner, comp_const); |
155b05dc | 5311 | |
5312 | else if (op_code == MAX_EXPR && consts_lt) | |
5313 | /* MAX (X, 0) == 5 -> X == 5 */ | |
7ab7fd4f | 5314 | return fold_build2 (EQ_EXPR, type, inner, comp_const); |
155b05dc | 5315 | |
5316 | else if (op_code == MAX_EXPR) | |
5317 | /* MAX (X, 0) == -1 -> false */ | |
5318 | return omit_one_operand (type, integer_zero_node, inner); | |
5319 | ||
5320 | else if (consts_equal) | |
5321 | /* MIN (X, 0) == 0 -> X >= 0 */ | |
7ab7fd4f | 5322 | return fold_build2 (GE_EXPR, type, inner, comp_const); |
155b05dc | 5323 | |
5324 | else if (consts_lt) | |
5325 | /* MIN (X, 0) == 5 -> false */ | |
5326 | return omit_one_operand (type, integer_zero_node, inner); | |
5327 | ||
5328 | else | |
5329 | /* MIN (X, 0) == -1 -> X == -1 */ | |
7ab7fd4f | 5330 | return fold_build2 (EQ_EXPR, type, inner, comp_const); |
155b05dc | 5331 | |
5332 | case GT_EXPR: | |
5333 | if (op_code == MAX_EXPR && (consts_equal || consts_lt)) | |
5334 | /* MAX (X, 0) > 0 -> X > 0 | |
5335 | MAX (X, 0) > 5 -> X > 5 */ | |
7ab7fd4f | 5336 | return fold_build2 (GT_EXPR, type, inner, comp_const); |
155b05dc | 5337 | |
5338 | else if (op_code == MAX_EXPR) | |
5339 | /* MAX (X, 0) > -1 -> true */ | |
5340 | return omit_one_operand (type, integer_one_node, inner); | |
5341 | ||
5342 | else if (op_code == MIN_EXPR && (consts_equal || consts_lt)) | |
5343 | /* MIN (X, 0) > 0 -> false | |
5344 | MIN (X, 0) > 5 -> false */ | |
5345 | return omit_one_operand (type, integer_zero_node, inner); | |
5346 | ||
5347 | else | |
5348 | /* MIN (X, 0) > -1 -> X > -1 */ | |
7ab7fd4f | 5349 | return fold_build2 (GT_EXPR, type, inner, comp_const); |
155b05dc | 5350 | |
5351 | default: | |
155acab4 | 5352 | return NULL_TREE; |
155b05dc | 5353 | } |
5354 | } | |
5355 | \f | |
23ec2d5e | 5356 | /* T is an integer expression that is being multiplied, divided, or taken a |
5357 | modulus (CODE says which and what kind of divide or modulus) by a | |
5358 | constant C. See if we can eliminate that operation by folding it with | |
5359 | other operations already in T. WIDE_TYPE, if non-null, is a type that | |
5360 | should be used for the computation if wider than our type. | |
5361 | ||
b07ba9ff | 5362 | For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return |
5363 | (X * 2) + (Y * 4). We must, however, be assured that either the original | |
2f5cf552 | 5364 | expression would not overflow or that overflow is undefined for the type |
5365 | in the language in question. | |
5366 | ||
5367 | We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either | |
5368 | the machine has a multiply-accumulate insn or that this is part of an | |
5369 | addressing calculation. | |
23ec2d5e | 5370 | |
5371 | If we return a non-null expression, it is an equivalent form of the | |
5372 | original computation, but need not be in the original type. */ | |
5373 | ||
5374 | static tree | |
de1b648b | 5375 | extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type) |
009f6e1c | 5376 | { |
5377 | /* To avoid exponential search depth, refuse to allow recursion past | |
5378 | three levels. Beyond that (1) it's highly unlikely that we'll find | |
5379 | something interesting and (2) we've probably processed it before | |
5380 | when we built the inner expression. */ | |
5381 | ||
5382 | static int depth; | |
5383 | tree ret; | |
5384 | ||
5385 | if (depth > 3) | |
5386 | return NULL; | |
5387 | ||
5388 | depth++; | |
5389 | ret = extract_muldiv_1 (t, c, code, wide_type); | |
5390 | depth--; | |
5391 | ||
5392 | return ret; | |
5393 | } | |
5394 | ||
5395 | static tree | |
de1b648b | 5396 | extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) |
23ec2d5e | 5397 | { |
5398 | tree type = TREE_TYPE (t); | |
5399 | enum tree_code tcode = TREE_CODE (t); | |
cc049fa3 | 5400 | tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type)) |
23ec2d5e | 5401 | > GET_MODE_SIZE (TYPE_MODE (type))) |
5402 | ? wide_type : type); | |
5403 | tree t1, t2; | |
5404 | int same_p = tcode == code; | |
03435587 | 5405 | tree op0 = NULL_TREE, op1 = NULL_TREE; |
23ec2d5e | 5406 | |
5407 | /* Don't deal with constants of zero here; they confuse the code below. */ | |
5408 | if (integer_zerop (c)) | |
2f5cf552 | 5409 | return NULL_TREE; |
23ec2d5e | 5410 | |
ce45a448 | 5411 | if (TREE_CODE_CLASS (tcode) == tcc_unary) |
23ec2d5e | 5412 | op0 = TREE_OPERAND (t, 0); |
5413 | ||
ce45a448 | 5414 | if (TREE_CODE_CLASS (tcode) == tcc_binary) |
23ec2d5e | 5415 | op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1); |
5416 | ||
5417 | /* Note that we need not handle conditional operations here since fold | |
5418 | already handles those cases. So just do arithmetic here. */ | |
5419 | switch (tcode) | |
5420 | { | |
5421 | case INTEGER_CST: | |
5422 | /* For a constant, we can always simplify if we are a multiply | |
5423 | or (for divide and modulus) if it is a multiple of our constant. */ | |
5424 | if (code == MULT_EXPR | |
5425 | || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0))) | |
b30e3dbc | 5426 | return const_binop (code, fold_convert (ctype, t), |
5427 | fold_convert (ctype, c), 0); | |
23ec2d5e | 5428 | break; |
5429 | ||
5430 | case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR: | |
12480406 | 5431 | /* If op0 is an expression ... */ |
ce45a448 | 5432 | if ((COMPARISON_CLASS_P (op0) |
5433 | || UNARY_CLASS_P (op0) | |
5434 | || BINARY_CLASS_P (op0) | |
5435 | || EXPRESSION_CLASS_P (op0)) | |
12480406 | 5436 | /* ... and is unsigned, and its type is smaller than ctype, |
5437 | then we cannot pass through as widening. */ | |
78a8ed03 | 5438 | && ((TYPE_UNSIGNED (TREE_TYPE (op0)) |
12480406 | 5439 | && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE |
5440 | && TYPE_IS_SIZETYPE (TREE_TYPE (op0))) | |
5441 | && (GET_MODE_SIZE (TYPE_MODE (ctype)) | |
5442 | > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))) | |
40309554 | 5443 | /* ... or this is a truncation (t is narrower than op0), |
5444 | then we cannot pass through this narrowing. */ | |
5445 | || (GET_MODE_SIZE (TYPE_MODE (type)) | |
cee280ef | 5446 | < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))) |
5447 | /* ... or signedness changes for division or modulus, | |
5448 | then we cannot pass through this conversion. */ | |
5449 | || (code != MULT_EXPR | |
78a8ed03 | 5450 | && (TYPE_UNSIGNED (ctype) |
5451 | != TYPE_UNSIGNED (TREE_TYPE (op0)))))) | |
3cb1a3c6 | 5452 | break; |
5453 | ||
23ec2d5e | 5454 | /* Pass the constant down and see if we can make a simplification. If |
5f0002b0 | 5455 | we can, replace this expression with the inner simplification for |
5456 | possible later conversion to our or some other type. */ | |
b30e3dbc | 5457 | if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0 |
f2fa1510 | 5458 | && TREE_CODE (t2) == INTEGER_CST |
5459 | && ! TREE_CONSTANT_OVERFLOW (t2) | |
5460 | && (0 != (t1 = extract_muldiv (op0, t2, code, | |
5461 | code == MULT_EXPR | |
5462 | ? ctype : NULL_TREE)))) | |
23ec2d5e | 5463 | return t1; |
5464 | break; | |
5465 | ||
24877233 | 5466 | case ABS_EXPR: |
5467 | /* If widening the type changes it from signed to unsigned, then we | |
5468 | must avoid building ABS_EXPR itself as unsigned. */ | |
5469 | if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type)) | |
5470 | { | |
5471 | tree cstype = (*lang_hooks.types.signed_type) (ctype); | |
5472 | if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0) | |
5473 | { | |
7ab7fd4f | 5474 | t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1)); |
24877233 | 5475 | return fold_convert (ctype, t1); |
5476 | } | |
5477 | break; | |
5478 | } | |
5479 | /* FALLTHROUGH */ | |
5480 | case NEGATE_EXPR: | |
23ec2d5e | 5481 | if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0) |
7ab7fd4f | 5482 | return fold_build1 (tcode, ctype, fold_convert (ctype, t1)); |
23ec2d5e | 5483 | break; |
5484 | ||
5485 | case MIN_EXPR: case MAX_EXPR: | |
6269027b | 5486 | /* If widening the type changes the signedness, then we can't perform |
5487 | this optimization as that changes the result. */ | |
78a8ed03 | 5488 | if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type)) |
6269027b | 5489 | break; |
5490 | ||
23ec2d5e | 5491 | /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */ |
5492 | if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0 | |
5493 | && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0) | |
5f0002b0 | 5494 | { |
5495 | if (tree_int_cst_sgn (c) < 0) | |
5496 | tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR); | |
5497 | ||
7ab7fd4f | 5498 | return fold_build2 (tcode, ctype, fold_convert (ctype, t1), |
5499 | fold_convert (ctype, t2)); | |
5f0002b0 | 5500 | } |
23ec2d5e | 5501 | break; |
5502 | ||
23ec2d5e | 5503 | case LSHIFT_EXPR: case RSHIFT_EXPR: |
5504 | /* If the second operand is constant, this is a multiplication | |
5505 | or floor division, by a power of two, so we can treat it that | |
dceee6fb | 5506 | way unless the multiplier or divisor overflows. Signed |
5507 | left-shift overflow is implementation-defined rather than | |
5508 | undefined in C90, so do not convert signed left shift into | |
5509 | multiplication. */ | |
23ec2d5e | 5510 | if (TREE_CODE (op1) == INTEGER_CST |
dceee6fb | 5511 | && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0))) |
c011f821 | 5512 | /* const_binop may not detect overflow correctly, |
5513 | so check for it explicitly here. */ | |
5514 | && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1) | |
5515 | && TREE_INT_CST_HIGH (op1) == 0 | |
b30e3dbc | 5516 | && 0 != (t1 = fold_convert (ctype, |
5517 | const_binop (LSHIFT_EXPR, | |
5518 | size_one_node, | |
5519 | op1, 0))) | |
23ec2d5e | 5520 | && ! TREE_OVERFLOW (t1)) |
fd96eeef | 5521 | return extract_muldiv (build2 (tcode == LSHIFT_EXPR |
5522 | ? MULT_EXPR : FLOOR_DIV_EXPR, | |
5523 | ctype, fold_convert (ctype, op0), t1), | |
23ec2d5e | 5524 | c, code, wide_type); |
5525 | break; | |
5526 | ||
5527 | case PLUS_EXPR: case MINUS_EXPR: | |
5528 | /* See if we can eliminate the operation on both sides. If we can, we | |
5529 | can return a new PLUS or MINUS. If we can't, the only remaining | |
5530 | cases where we can do anything are if the second operand is a | |
5531 | constant. */ | |
5532 | t1 = extract_muldiv (op0, c, code, wide_type); | |
5533 | t2 = extract_muldiv (op1, c, code, wide_type); | |
17e3940f | 5534 | if (t1 != 0 && t2 != 0 |
5535 | && (code == MULT_EXPR | |
e5b30d78 | 5536 | /* If not multiplication, we can only do this if both operands |
5537 | are divisible by c. */ | |
5538 | || (multiple_of_p (ctype, op0, c) | |
5539 | && multiple_of_p (ctype, op1, c)))) | |
7ab7fd4f | 5540 | return fold_build2 (tcode, ctype, fold_convert (ctype, t1), |
5541 | fold_convert (ctype, t2)); | |
23ec2d5e | 5542 | |
5f0002b0 | 5543 | /* If this was a subtraction, negate OP1 and set it to be an addition. |
5544 | This simplifies the logic below. */ | |
5545 | if (tcode == MINUS_EXPR) | |
5546 | tcode = PLUS_EXPR, op1 = negate_expr (op1); | |
5547 | ||
ec4d93b0 | 5548 | if (TREE_CODE (op1) != INTEGER_CST) |
5549 | break; | |
5550 | ||
5f0002b0 | 5551 | /* If either OP1 or C are negative, this optimization is not safe for |
5552 | some of the division and remainder types while for others we need | |
5553 | to change the code. */ | |
5554 | if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0) | |
5555 | { | |
5556 | if (code == CEIL_DIV_EXPR) | |
5557 | code = FLOOR_DIV_EXPR; | |
5f0002b0 | 5558 | else if (code == FLOOR_DIV_EXPR) |
5559 | code = CEIL_DIV_EXPR; | |
b575bb01 | 5560 | else if (code != MULT_EXPR |
5561 | && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR) | |
5f0002b0 | 5562 | break; |
5563 | } | |
5564 | ||
98248b34 | 5565 | /* If it's a multiply or a division/modulus operation of a multiple |
5566 | of our constant, do the operation and verify it doesn't overflow. */ | |
5567 | if (code == MULT_EXPR | |
5568 | || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) | |
d3371fcd | 5569 | { |
b30e3dbc | 5570 | op1 = const_binop (code, fold_convert (ctype, op1), |
5571 | fold_convert (ctype, c), 0); | |
f5c47dd7 | 5572 | /* We allow the constant to overflow with wrapping semantics. */ |
5573 | if (op1 == 0 | |
5574 | || (TREE_OVERFLOW (op1) && ! flag_wrapv)) | |
d3371fcd | 5575 | break; |
5576 | } | |
98248b34 | 5577 | else |
d3371fcd | 5578 | break; |
5f0002b0 | 5579 | |
fc452262 | 5580 | /* If we have an unsigned type is not a sizetype, we cannot widen |
5581 | the operation since it will change the result if the original | |
5582 | computation overflowed. */ | |
78a8ed03 | 5583 | if (TYPE_UNSIGNED (ctype) |
d490e2f2 | 5584 | && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)) |
fc452262 | 5585 | && ctype != type) |
5586 | break; | |
5587 | ||
23ec2d5e | 5588 | /* If we were able to eliminate our operation from the first side, |
5f0002b0 | 5589 | apply our operation to the second side and reform the PLUS. */ |
5590 | if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR)) | |
7ab7fd4f | 5591 | return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1); |
23ec2d5e | 5592 | |
5593 | /* The last case is if we are a multiply. In that case, we can | |
5594 | apply the distributive law to commute the multiply and addition | |
6312a35e | 5595 | if the multiplication of the constants doesn't overflow. */ |
5f0002b0 | 5596 | if (code == MULT_EXPR) |
7ab7fd4f | 5597 | return fold_build2 (tcode, ctype, |
5598 | fold_build2 (code, ctype, | |
5599 | fold_convert (ctype, op0), | |
5600 | fold_convert (ctype, c)), | |
5601 | op1); | |
23ec2d5e | 5602 | |
5603 | break; | |
5604 | ||
5605 | case MULT_EXPR: | |
5606 | /* We have a special case here if we are doing something like | |
5607 | (C * 8) % 4 since we know that's zero. */ | |
5608 | if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR | |
5609 | || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR) | |
5610 | && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST | |
5611 | && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) | |
5612 | return omit_one_operand (type, integer_zero_node, op0); | |
5613 | ||
6312a35e | 5614 | /* ... fall through ... */ |
23ec2d5e | 5615 | |
5616 | case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR: | |
5617 | case ROUND_DIV_EXPR: case EXACT_DIV_EXPR: | |
5618 | /* If we can extract our operation from the LHS, do so and return a | |
5619 | new operation. Likewise for the RHS from a MULT_EXPR. Otherwise, | |
5620 | do something only if the second operand is a constant. */ | |
5621 | if (same_p | |
5622 | && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0) | |
7ab7fd4f | 5623 | return fold_build2 (tcode, ctype, fold_convert (ctype, t1), |
5624 | fold_convert (ctype, op1)); | |
23ec2d5e | 5625 | else if (tcode == MULT_EXPR && code == MULT_EXPR |
5626 | && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0) | |
7ab7fd4f | 5627 | return fold_build2 (tcode, ctype, fold_convert (ctype, op0), |
5628 | fold_convert (ctype, t1)); | |
23ec2d5e | 5629 | else if (TREE_CODE (op1) != INTEGER_CST) |
5630 | return 0; | |
5631 | ||
5632 | /* If these are the same operation types, we can associate them | |
5633 | assuming no overflow. */ | |
5634 | if (tcode == code | |
b30e3dbc | 5635 | && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1), |
5636 | fold_convert (ctype, c), 0)) | |
23ec2d5e | 5637 | && ! TREE_OVERFLOW (t1)) |
7ab7fd4f | 5638 | return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1); |
23ec2d5e | 5639 | |
5640 | /* If these operations "cancel" each other, we have the main | |
5641 | optimizations of this pass, which occur when either constant is a | |
5642 | multiple of the other, in which case we replace this with either an | |
cc049fa3 | 5643 | operation or CODE or TCODE. |
2f5cf552 | 5644 | |
35a3065a | 5645 | If we have an unsigned type that is not a sizetype, we cannot do |
2f5cf552 | 5646 | this since it will change the result if the original computation |
5647 | overflowed. */ | |
78a8ed03 | 5648 | if ((! TYPE_UNSIGNED (ctype) |
d490e2f2 | 5649 | || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))) |
b24bee03 | 5650 | && ! flag_wrapv |
2f5cf552 | 5651 | && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR) |
5652 | || (tcode == MULT_EXPR | |
5653 | && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR | |
5654 | && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR))) | |
23ec2d5e | 5655 | { |
5656 | if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) | |
7ab7fd4f | 5657 | return fold_build2 (tcode, ctype, fold_convert (ctype, op0), |
5658 | fold_convert (ctype, | |
5659 | const_binop (TRUNC_DIV_EXPR, | |
5660 | op1, c, 0))); | |
23ec2d5e | 5661 | else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0))) |
7ab7fd4f | 5662 | return fold_build2 (code, ctype, fold_convert (ctype, op0), |
5663 | fold_convert (ctype, | |
5664 | const_binop (TRUNC_DIV_EXPR, | |
5665 | c, op1, 0))); | |
23ec2d5e | 5666 | } |
5667 | break; | |
5668 | ||
5669 | default: | |
5670 | break; | |
5671 | } | |
5672 | ||
5673 | return 0; | |
5674 | } | |
5675 | \f | |
b4af30fd | 5676 | /* Return a node which has the indicated constant VALUE (either 0 or |
5677 | 1), and is of the indicated TYPE. */ | |
5678 | ||
5c9198bd | 5679 | tree |
de1b648b | 5680 | constant_boolean_node (int value, tree type) |
b4af30fd | 5681 | { |
5682 | if (type == integer_type_node) | |
5683 | return value ? integer_one_node : integer_zero_node; | |
c4e122e7 | 5684 | else if (type == boolean_type_node) |
5685 | return value ? boolean_true_node : boolean_false_node; | |
cc049fa3 | 5686 | else |
7016c612 | 5687 | return build_int_cst (type, value); |
b4af30fd | 5688 | } |
5689 | ||
6d24c9aa | 5690 | |
5691 | /* Return true if expr looks like an ARRAY_REF and set base and | |
5692 | offset to the appropriate trees. If there is no offset, | |
6252c281 | 5693 | offset is set to NULL_TREE. Base will be canonicalized to |
5694 | something you can get the element type from using | |
148ad152 | 5695 | TREE_TYPE (TREE_TYPE (base)). Offset will be the offset |
5696 | in bytes to the base. */ | |
6d24c9aa | 5697 | |
5698 | static bool | |
5699 | extract_array_ref (tree expr, tree *base, tree *offset) | |
5700 | { | |
6d24c9aa | 5701 | /* One canonical form is a PLUS_EXPR with the first |
5702 | argument being an ADDR_EXPR with a possible NOP_EXPR | |
5703 | attached. */ | |
5704 | if (TREE_CODE (expr) == PLUS_EXPR) | |
5705 | { | |
5706 | tree op0 = TREE_OPERAND (expr, 0); | |
6252c281 | 5707 | tree inner_base, dummy1; |
5708 | /* Strip NOP_EXPRs here because the C frontends and/or | |
5709 | folders present us (int *)&x.a + 4B possibly. */ | |
6d24c9aa | 5710 | STRIP_NOPS (op0); |
6252c281 | 5711 | if (extract_array_ref (op0, &inner_base, &dummy1)) |
6d24c9aa | 5712 | { |
6252c281 | 5713 | *base = inner_base; |
5714 | if (dummy1 == NULL_TREE) | |
5715 | *offset = TREE_OPERAND (expr, 1); | |
5716 | else | |
5717 | *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr), | |
5718 | dummy1, TREE_OPERAND (expr, 1)); | |
6d24c9aa | 5719 | return true; |
5720 | } | |
5721 | } | |
5722 | /* Other canonical form is an ADDR_EXPR of an ARRAY_REF, | |
5723 | which we transform into an ADDR_EXPR with appropriate | |
5724 | offset. For other arguments to the ADDR_EXPR we assume | |
5725 | zero offset and as such do not care about the ADDR_EXPR | |
5726 | type and strip possible nops from it. */ | |
6252c281 | 5727 | else if (TREE_CODE (expr) == ADDR_EXPR) |
6d24c9aa | 5728 | { |
6252c281 | 5729 | tree op0 = TREE_OPERAND (expr, 0); |
6d24c9aa | 5730 | if (TREE_CODE (op0) == ARRAY_REF) |
5731 | { | |
148ad152 | 5732 | tree idx = TREE_OPERAND (op0, 1); |
6252c281 | 5733 | *base = TREE_OPERAND (op0, 0); |
148ad152 | 5734 | *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx, |
5735 | array_ref_element_size (op0)); | |
6d24c9aa | 5736 | } |
5737 | else | |
5738 | { | |
6252c281 | 5739 | /* Handle array-to-pointer decay as &a. */ |
5740 | if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE) | |
5741 | *base = TREE_OPERAND (expr, 0); | |
5742 | else | |
5743 | *base = expr; | |
6d24c9aa | 5744 | *offset = NULL_TREE; |
5745 | } | |
5746 | return true; | |
5747 | } | |
6252c281 | 5748 | /* The next canonical form is a VAR_DECL with POINTER_TYPE. */ |
5749 | else if (SSA_VAR_P (expr) | |
5750 | && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE) | |
5751 | { | |
5752 | *base = expr; | |
5753 | *offset = NULL_TREE; | |
5754 | return true; | |
5755 | } | |
6d24c9aa | 5756 | |
5757 | return false; | |
5758 | } | |
5759 | ||
5760 | ||
203a24c4 | 5761 | /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'. |
47cbd05d | 5762 | Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here |
5763 | CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)' | |
6ef828f9 | 5764 | expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the |
47cbd05d | 5765 | COND is the first argument to CODE; otherwise (as in the example |
5766 | given here), it is the second argument. TYPE is the type of the | |
9c9bad97 | 5767 | original expression. Return NULL_TREE if no simplification is |
a6661800 | 5768 | possible. */ |
47cbd05d | 5769 | |
5770 | static tree | |
1ebe9a83 | 5771 | fold_binary_op_with_conditional_arg (enum tree_code code, |
5772 | tree type, tree op0, tree op1, | |
5773 | tree cond, tree arg, int cond_first_p) | |
47cbd05d | 5774 | { |
1ebe9a83 | 5775 | tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1); |
84b251e4 | 5776 | tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0); |
47cbd05d | 5777 | tree test, true_value, false_value; |
5778 | tree lhs = NULL_TREE; | |
5779 | tree rhs = NULL_TREE; | |
a6661800 | 5780 | |
f2b83d13 | 5781 | /* This transformation is only worthwhile if we don't have to wrap |
0975351b | 5782 | arg in a SAVE_EXPR, and the operation can be simplified on at least |
f2b83d13 | 5783 | one of the branches once its pushed inside the COND_EXPR. */ |
5784 | if (!TREE_CONSTANT (arg)) | |
a6661800 | 5785 | return NULL_TREE; |
5786 | ||
47cbd05d | 5787 | if (TREE_CODE (cond) == COND_EXPR) |
5788 | { | |
5789 | test = TREE_OPERAND (cond, 0); | |
5790 | true_value = TREE_OPERAND (cond, 1); | |
5791 | false_value = TREE_OPERAND (cond, 2); | |
5792 | /* If this operand throws an expression, then it does not make | |
5793 | sense to try to perform a logical or arithmetic operation | |
f2b83d13 | 5794 | involving it. */ |
47cbd05d | 5795 | if (VOID_TYPE_P (TREE_TYPE (true_value))) |
f2b83d13 | 5796 | lhs = true_value; |
47cbd05d | 5797 | if (VOID_TYPE_P (TREE_TYPE (false_value))) |
f2b83d13 | 5798 | rhs = false_value; |
47cbd05d | 5799 | } |
5800 | else | |
5801 | { | |
5802 | tree testtype = TREE_TYPE (cond); | |
5803 | test = cond; | |
20783f07 | 5804 | true_value = constant_boolean_node (true, testtype); |
5805 | false_value = constant_boolean_node (false, testtype); | |
47cbd05d | 5806 | } |
d3371fcd | 5807 | |
5fe1fe72 | 5808 | arg = fold_convert (arg_type, arg); |
47cbd05d | 5809 | if (lhs == 0) |
5fe1fe72 | 5810 | { |
5811 | true_value = fold_convert (cond_type, true_value); | |
b085d4e5 | 5812 | if (cond_first_p) |
5813 | lhs = fold_build2 (code, type, true_value, arg); | |
5814 | else | |
5815 | lhs = fold_build2 (code, type, arg, true_value); | |
5fe1fe72 | 5816 | } |
47cbd05d | 5817 | if (rhs == 0) |
5fe1fe72 | 5818 | { |
5819 | false_value = fold_convert (cond_type, false_value); | |
b085d4e5 | 5820 | if (cond_first_p) |
5821 | rhs = fold_build2 (code, type, false_value, arg); | |
5822 | else | |
5823 | rhs = fold_build2 (code, type, arg, false_value); | |
5fe1fe72 | 5824 | } |
f2b83d13 | 5825 | |
7ab7fd4f | 5826 | test = fold_build3 (COND_EXPR, type, test, lhs, rhs); |
f2b83d13 | 5827 | return fold_convert (type, test); |
47cbd05d | 5828 | } |
5829 | ||
be2828ce | 5830 | \f |
920d0fb5 | 5831 | /* Subroutine of fold() that checks for the addition of +/- 0.0. |
5832 | ||
5833 | If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type | |
5834 | TYPE, X + ADDEND is the same as X. If NEGATE, return true if X - | |
5835 | ADDEND is the same as X. | |
5836 | ||
6ef828f9 | 5837 | X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero |
920d0fb5 | 5838 | and finite. The problematic cases are when X is zero, and its mode |
5839 | has signed zeros. In the case of rounding towards -infinity, | |
5840 | X - 0 is not the same as X because 0 - 0 is -0. In other rounding | |
5841 | modes, X + 0 is not the same as X because -0 + 0 is 0. */ | |
5842 | ||
5843 | static bool | |
de1b648b | 5844 | fold_real_zero_addition_p (tree type, tree addend, int negate) |
920d0fb5 | 5845 | { |
5846 | if (!real_zerop (addend)) | |
5847 | return false; | |
5848 | ||
c7590f7e | 5849 | /* Don't allow the fold with -fsignaling-nans. */ |
5850 | if (HONOR_SNANS (TYPE_MODE (type))) | |
5851 | return false; | |
5852 | ||
920d0fb5 | 5853 | /* Allow the fold if zeros aren't signed, or their sign isn't important. */ |
5854 | if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))) | |
5855 | return true; | |
5856 | ||
5857 | /* Treat x + -0 as x - 0 and x - -0 as x + 0. */ | |
5858 | if (TREE_CODE (addend) == REAL_CST | |
5859 | && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend))) | |
5860 | negate = !negate; | |
5861 | ||
5862 | /* The mode has signed zeros, and we have to honor their sign. | |
5863 | In this situation, there is only one case we can return true for. | |
5864 | X - 0 is the same as X unless rounding towards -infinity is | |
5865 | supported. */ | |
5866 | return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)); | |
5867 | } | |
5868 | ||
4b0b9adb | 5869 | /* Subroutine of fold() that checks comparisons of built-in math |
5870 | functions against real constants. | |
5871 | ||
5872 | FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison | |
5873 | operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE | |
5874 | is the type of the result and ARG0 and ARG1 are the operands of the | |
5875 | comparison. ARG1 must be a TREE_REAL_CST. | |
5876 | ||
5877 | The function returns the constant folded tree if a simplification | |
5878 | can be made, and NULL_TREE otherwise. */ | |
5879 | ||
5880 | static tree | |
dc81944a | 5881 | fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, |
5882 | tree type, tree arg0, tree arg1) | |
4b0b9adb | 5883 | { |
5884 | REAL_VALUE_TYPE c; | |
5885 | ||
852da3c3 | 5886 | if (BUILTIN_SQRT_P (fcode)) |
4b0b9adb | 5887 | { |
5888 | tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1)); | |
5889 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0)); | |
5890 | ||
5891 | c = TREE_REAL_CST (arg1); | |
5892 | if (REAL_VALUE_NEGATIVE (c)) | |
5893 | { | |
5894 | /* sqrt(x) < y is always false, if y is negative. */ | |
5895 | if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR) | |
20783f07 | 5896 | return omit_one_operand (type, integer_zero_node, arg); |
4b0b9adb | 5897 | |
5898 | /* sqrt(x) > y is always true, if y is negative and we | |
5899 | don't care about NaNs, i.e. negative values of x. */ | |
5900 | if (code == NE_EXPR || !HONOR_NANS (mode)) | |
20783f07 | 5901 | return omit_one_operand (type, integer_one_node, arg); |
4b0b9adb | 5902 | |
5903 | /* sqrt(x) > y is the same as x >= 0, if y is negative. */ | |
7ab7fd4f | 5904 | return fold_build2 (GE_EXPR, type, arg, |
5905 | build_real (TREE_TYPE (arg), dconst0)); | |
4b0b9adb | 5906 | } |
5907 | else if (code == GT_EXPR || code == GE_EXPR) | |
5908 | { | |
5909 | REAL_VALUE_TYPE c2; | |
5910 | ||
5911 | REAL_ARITHMETIC (c2, MULT_EXPR, c, c); | |
5912 | real_convert (&c2, mode, &c2); | |
5913 | ||
5914 | if (REAL_VALUE_ISINF (c2)) | |
5915 | { | |
5916 | /* sqrt(x) > y is x == +Inf, when y is very large. */ | |
5917 | if (HONOR_INFINITIES (mode)) | |
7ab7fd4f | 5918 | return fold_build2 (EQ_EXPR, type, arg, |
5919 | build_real (TREE_TYPE (arg), c2)); | |
4b0b9adb | 5920 | |
5921 | /* sqrt(x) > y is always false, when y is very large | |
5922 | and we don't care about infinities. */ | |
20783f07 | 5923 | return omit_one_operand (type, integer_zero_node, arg); |
4b0b9adb | 5924 | } |
5925 | ||
5926 | /* sqrt(x) > c is the same as x > c*c. */ | |
7ab7fd4f | 5927 | return fold_build2 (code, type, arg, |
5928 | build_real (TREE_TYPE (arg), c2)); | |
4b0b9adb | 5929 | } |
5930 | else if (code == LT_EXPR || code == LE_EXPR) | |
5931 | { | |
5932 | REAL_VALUE_TYPE c2; | |
5933 | ||
5934 | REAL_ARITHMETIC (c2, MULT_EXPR, c, c); | |
5935 | real_convert (&c2, mode, &c2); | |
5936 | ||
5937 | if (REAL_VALUE_ISINF (c2)) | |
5938 | { | |
5939 | /* sqrt(x) < y is always true, when y is a very large | |
5940 | value and we don't care about NaNs or Infinities. */ | |
5941 | if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode)) | |
20783f07 | 5942 | return omit_one_operand (type, integer_one_node, arg); |
4b0b9adb | 5943 | |
5944 | /* sqrt(x) < y is x != +Inf when y is very large and we | |
5945 | don't care about NaNs. */ | |
5946 | if (! HONOR_NANS (mode)) | |
7ab7fd4f | 5947 | return fold_build2 (NE_EXPR, type, arg, |
5948 | build_real (TREE_TYPE (arg), c2)); | |
4b0b9adb | 5949 | |
5950 | /* sqrt(x) < y is x >= 0 when y is very large and we | |
5951 | don't care about Infinities. */ | |
5952 | if (! HONOR_INFINITIES (mode)) | |
7ab7fd4f | 5953 | return fold_build2 (GE_EXPR, type, arg, |
5954 | build_real (TREE_TYPE (arg), dconst0)); | |
4b0b9adb | 5955 | |
5956 | /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */ | |
fa8b888f | 5957 | if (lang_hooks.decls.global_bindings_p () != 0 |
ce3fb06e | 5958 | || CONTAINS_PLACEHOLDER_P (arg)) |
4b0b9adb | 5959 | return NULL_TREE; |
5960 | ||
5961 | arg = save_expr (arg); | |
7ab7fd4f | 5962 | return fold_build2 (TRUTH_ANDIF_EXPR, type, |
5963 | fold_build2 (GE_EXPR, type, arg, | |
5964 | build_real (TREE_TYPE (arg), | |
5965 | dconst0)), | |
5966 | fold_build2 (NE_EXPR, type, arg, | |
5967 | build_real (TREE_TYPE (arg), | |
5968 | c2))); | |
4b0b9adb | 5969 | } |
5970 | ||
5971 | /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */ | |
5972 | if (! HONOR_NANS (mode)) | |
7ab7fd4f | 5973 | return fold_build2 (code, type, arg, |
5974 | build_real (TREE_TYPE (arg), c2)); | |
4b0b9adb | 5975 | |
5976 | /* sqrt(x) < c is the same as x >= 0 && x < c*c. */ | |
fa8b888f | 5977 | if (lang_hooks.decls.global_bindings_p () == 0 |
ce3fb06e | 5978 | && ! CONTAINS_PLACEHOLDER_P (arg)) |
4b0b9adb | 5979 | { |
5980 | arg = save_expr (arg); | |
7ab7fd4f | 5981 | return fold_build2 (TRUTH_ANDIF_EXPR, type, |
5982 | fold_build2 (GE_EXPR, type, arg, | |
5983 | build_real (TREE_TYPE (arg), | |
5984 | dconst0)), | |
5985 | fold_build2 (code, type, arg, | |
5986 | build_real (TREE_TYPE (arg), | |
5987 | c2))); | |
4b0b9adb | 5988 | } |
5989 | } | |
5990 | } | |
5991 | ||
5992 | return NULL_TREE; | |
5993 | } | |
5994 | ||
6d2e901f | 5995 | /* Subroutine of fold() that optimizes comparisons against Infinities, |
5996 | either +Inf or -Inf. | |
5997 | ||
5998 | CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, | |
5999 | GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1 | |
6000 | are the operands of the comparison. ARG1 must be a TREE_REAL_CST. | |
6001 | ||
6002 | The function returns the constant folded tree if a simplification | |
6003 | can be made, and NULL_TREE otherwise. */ | |
6004 | ||
6005 | static tree | |
de1b648b | 6006 | fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1) |
6d2e901f | 6007 | { |
ac4bd9a0 | 6008 | enum machine_mode mode; |
6009 | REAL_VALUE_TYPE max; | |
6010 | tree temp; | |
6011 | bool neg; | |
6012 | ||
6013 | mode = TYPE_MODE (TREE_TYPE (arg0)); | |
6014 | ||
6d2e901f | 6015 | /* For negative infinity swap the sense of the comparison. */ |
ac4bd9a0 | 6016 | neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)); |
6017 | if (neg) | |
6d2e901f | 6018 | code = swap_tree_comparison (code); |
6019 | ||
6020 | switch (code) | |
6021 | { | |
6022 | case GT_EXPR: | |
6023 | /* x > +Inf is always false, if with ignore sNANs. */ | |
ac4bd9a0 | 6024 | if (HONOR_SNANS (mode)) |
6d2e901f | 6025 | return NULL_TREE; |
20783f07 | 6026 | return omit_one_operand (type, integer_zero_node, arg0); |
6d2e901f | 6027 | |
6028 | case LE_EXPR: | |
6029 | /* x <= +Inf is always true, if we don't case about NaNs. */ | |
ac4bd9a0 | 6030 | if (! HONOR_NANS (mode)) |
20783f07 | 6031 | return omit_one_operand (type, integer_one_node, arg0); |
6d2e901f | 6032 | |
6033 | /* x <= +Inf is the same as x == x, i.e. isfinite(x). */ | |
fa8b888f | 6034 | if (lang_hooks.decls.global_bindings_p () == 0 |
ce3fb06e | 6035 | && ! CONTAINS_PLACEHOLDER_P (arg0)) |
6d2e901f | 6036 | { |
6037 | arg0 = save_expr (arg0); | |
7ab7fd4f | 6038 | return fold_build2 (EQ_EXPR, type, arg0, arg0); |
6d2e901f | 6039 | } |
6040 | break; | |
6041 | ||
ac4bd9a0 | 6042 | case EQ_EXPR: |
6043 | case GE_EXPR: | |
6044 | /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */ | |
6045 | real_maxval (&max, neg, mode); | |
7ab7fd4f | 6046 | return fold_build2 (neg ? LT_EXPR : GT_EXPR, type, |
6047 | arg0, build_real (TREE_TYPE (arg0), max)); | |
ac4bd9a0 | 6048 | |
6049 | case LT_EXPR: | |
6050 | /* x < +Inf is always equal to x <= DBL_MAX. */ | |
6051 | real_maxval (&max, neg, mode); | |
7ab7fd4f | 6052 | return fold_build2 (neg ? GE_EXPR : LE_EXPR, type, |
6053 | arg0, build_real (TREE_TYPE (arg0), max)); | |
ac4bd9a0 | 6054 | |
6055 | case NE_EXPR: | |
6056 | /* x != +Inf is always equal to !(x > DBL_MAX). */ | |
6057 | real_maxval (&max, neg, mode); | |
6058 | if (! HONOR_NANS (mode)) | |
7ab7fd4f | 6059 | return fold_build2 (neg ? GE_EXPR : LE_EXPR, type, |
6060 | arg0, build_real (TREE_TYPE (arg0), max)); | |
bd1ec513 | 6061 | |
6062 | /* The transformation below creates non-gimple code and thus is | |
6063 | not appropriate if we are in gimple form. */ | |
6064 | if (in_gimple_form) | |
6065 | return NULL_TREE; | |
7206da1b | 6066 | |
7ab7fd4f | 6067 | temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type, |
6068 | arg0, build_real (TREE_TYPE (arg0), max)); | |
6069 | return fold_build1 (TRUTH_NOT_EXPR, type, temp); | |
6d2e901f | 6070 | |
6071 | default: | |
6072 | break; | |
6073 | } | |
6074 | ||
6075 | return NULL_TREE; | |
6076 | } | |
920d0fb5 | 6077 | |
270029e0 | 6078 | /* Subroutine of fold() that optimizes comparisons of a division by |
365db11e | 6079 | a nonzero integer constant against an integer constant, i.e. |
270029e0 | 6080 | X/C1 op C2. |
6081 | ||
6082 | CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, | |
6083 | GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1 | |
6084 | are the operands of the comparison. ARG1 must be a TREE_REAL_CST. | |
6085 | ||
6086 | The function returns the constant folded tree if a simplification | |
6087 | can be made, and NULL_TREE otherwise. */ | |
6088 | ||
6089 | static tree | |
6090 | fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1) | |
6091 | { | |
6092 | tree prod, tmp, hi, lo; | |
6093 | tree arg00 = TREE_OPERAND (arg0, 0); | |
6094 | tree arg01 = TREE_OPERAND (arg0, 1); | |
6095 | unsigned HOST_WIDE_INT lpart; | |
6096 | HOST_WIDE_INT hpart; | |
81035ec1 | 6097 | bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0)); |
4e35b483 | 6098 | bool neg_overflow; |
270029e0 | 6099 | int overflow; |
6100 | ||
6101 | /* We have to do this the hard way to detect unsigned overflow. | |
6102 | prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */ | |
81035ec1 | 6103 | overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01), |
6104 | TREE_INT_CST_HIGH (arg01), | |
6105 | TREE_INT_CST_LOW (arg1), | |
6106 | TREE_INT_CST_HIGH (arg1), | |
6107 | &lpart, &hpart, unsigned_p); | |
7016c612 | 6108 | prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart); |
4d28c5d1 | 6109 | prod = force_fit_type (prod, -1, overflow, false); |
4e35b483 | 6110 | neg_overflow = false; |
270029e0 | 6111 | |
81035ec1 | 6112 | if (unsigned_p) |
270029e0 | 6113 | { |
6114 | tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0); | |
6115 | lo = prod; | |
6116 | ||
6117 | /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */ | |
81035ec1 | 6118 | overflow = add_double_with_sign (TREE_INT_CST_LOW (prod), |
6119 | TREE_INT_CST_HIGH (prod), | |
6120 | TREE_INT_CST_LOW (tmp), | |
6121 | TREE_INT_CST_HIGH (tmp), | |
6122 | &lpart, &hpart, unsigned_p); | |
7016c612 | 6123 | hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart); |
4d28c5d1 | 6124 | hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod), |
6125 | TREE_CONSTANT_OVERFLOW (prod)); | |
270029e0 | 6126 | } |
6127 | else if (tree_int_cst_sgn (arg01) >= 0) | |
6128 | { | |
6129 | tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0); | |
6130 | switch (tree_int_cst_sgn (arg1)) | |
6131 | { | |
6132 | case -1: | |
4e35b483 | 6133 | neg_overflow = true; |
270029e0 | 6134 | lo = int_const_binop (MINUS_EXPR, prod, tmp, 0); |
6135 | hi = prod; | |
6136 | break; | |
6137 | ||
6138 | case 0: | |
6139 | lo = fold_negate_const (tmp, TREE_TYPE (arg0)); | |
6140 | hi = tmp; | |
6141 | break; | |
6142 | ||
6143 | case 1: | |
6144 | hi = int_const_binop (PLUS_EXPR, prod, tmp, 0); | |
6145 | lo = prod; | |
6146 | break; | |
6147 | ||
6148 | default: | |
fdada98f | 6149 | gcc_unreachable (); |
270029e0 | 6150 | } |
6151 | } | |
6152 | else | |
6153 | { | |
460c8e36 | 6154 | /* A negative divisor reverses the relational operators. */ |
6155 | code = swap_tree_comparison (code); | |
6156 | ||
270029e0 | 6157 | tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0); |
6158 | switch (tree_int_cst_sgn (arg1)) | |
6159 | { | |
6160 | case -1: | |
6161 | hi = int_const_binop (MINUS_EXPR, prod, tmp, 0); | |
6162 | lo = prod; | |
6163 | break; | |
6164 | ||
6165 | case 0: | |
6166 | hi = fold_negate_const (tmp, TREE_TYPE (arg0)); | |
6167 | lo = tmp; | |
6168 | break; | |
6169 | ||
6170 | case 1: | |
4e35b483 | 6171 | neg_overflow = true; |
6172 | lo = int_const_binop (PLUS_EXPR, prod, tmp, 0); | |
270029e0 | 6173 | hi = prod; |
6174 | break; | |
6175 | ||
6176 | default: | |
fdada98f | 6177 | gcc_unreachable (); |
270029e0 | 6178 | } |
6179 | } | |
6180 | ||
6181 | switch (code) | |
6182 | { | |
6183 | case EQ_EXPR: | |
6184 | if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi)) | |
6185 | return omit_one_operand (type, integer_zero_node, arg00); | |
6186 | if (TREE_OVERFLOW (hi)) | |
7ab7fd4f | 6187 | return fold_build2 (GE_EXPR, type, arg00, lo); |
270029e0 | 6188 | if (TREE_OVERFLOW (lo)) |
7ab7fd4f | 6189 | return fold_build2 (LE_EXPR, type, arg00, hi); |
270029e0 | 6190 | return build_range_check (type, arg00, 1, lo, hi); |
6191 | ||
6192 | case NE_EXPR: | |
6193 | if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi)) | |
6194 | return omit_one_operand (type, integer_one_node, arg00); | |
6195 | if (TREE_OVERFLOW (hi)) | |
7ab7fd4f | 6196 | return fold_build2 (LT_EXPR, type, arg00, lo); |
270029e0 | 6197 | if (TREE_OVERFLOW (lo)) |
7ab7fd4f | 6198 | return fold_build2 (GT_EXPR, type, arg00, hi); |
270029e0 | 6199 | return build_range_check (type, arg00, 0, lo, hi); |
6200 | ||
6201 | case LT_EXPR: | |
6202 | if (TREE_OVERFLOW (lo)) | |
4e35b483 | 6203 | { |
6204 | tmp = neg_overflow ? integer_zero_node : integer_one_node; | |
6205 | return omit_one_operand (type, tmp, arg00); | |
6206 | } | |
7ab7fd4f | 6207 | return fold_build2 (LT_EXPR, type, arg00, lo); |
270029e0 | 6208 | |
6209 | case LE_EXPR: | |
6210 | if (TREE_OVERFLOW (hi)) | |
4e35b483 | 6211 | { |
6212 | tmp = neg_overflow ? integer_zero_node : integer_one_node; | |
6213 | return omit_one_operand (type, tmp, arg00); | |
6214 | } | |
7ab7fd4f | 6215 | return fold_build2 (LE_EXPR, type, arg00, hi); |
270029e0 | 6216 | |
6217 | case GT_EXPR: | |
6218 | if (TREE_OVERFLOW (hi)) | |
4e35b483 | 6219 | { |
6220 | tmp = neg_overflow ? integer_one_node : integer_zero_node; | |
6221 | return omit_one_operand (type, tmp, arg00); | |
6222 | } | |
7ab7fd4f | 6223 | return fold_build2 (GT_EXPR, type, arg00, hi); |
270029e0 | 6224 | |
6225 | case GE_EXPR: | |
6226 | if (TREE_OVERFLOW (lo)) | |
4e35b483 | 6227 | { |
6228 | tmp = neg_overflow ? integer_one_node : integer_zero_node; | |
6229 | return omit_one_operand (type, tmp, arg00); | |
6230 | } | |
7ab7fd4f | 6231 | return fold_build2 (GE_EXPR, type, arg00, lo); |
270029e0 | 6232 | |
6233 | default: | |
6234 | break; | |
6235 | } | |
6236 | ||
6237 | return NULL_TREE; | |
6238 | } | |
6239 | ||
6240 | ||
6881f973 | 6241 | /* If CODE with arguments ARG0 and ARG1 represents a single bit |
149f0db4 | 6242 | equality/inequality test, then return a simplified form of the test |
6243 | using a sign testing. Otherwise return NULL. TYPE is the desired | |
6244 | result type. */ | |
7206da1b | 6245 | |
149f0db4 | 6246 | static tree |
6247 | fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1, | |
6248 | tree result_type) | |
6881f973 | 6249 | { |
6881f973 | 6250 | /* If this is testing a single bit, we can optimize the test. */ |
6251 | if ((code == NE_EXPR || code == EQ_EXPR) | |
6252 | && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) | |
6253 | && integer_pow2p (TREE_OPERAND (arg0, 1))) | |
6254 | { | |
6881f973 | 6255 | /* If we have (A & C) != 0 where C is the sign bit of A, convert |
6256 | this into A < 0. Similarly for (A & C) == 0 into A >= 0. */ | |
149f0db4 | 6257 | tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)); |
6258 | ||
7cc00cbd | 6259 | if (arg00 != NULL_TREE |
6260 | /* This is only a win if casting to a signed type is cheap, | |
6261 | i.e. when arg00's type is not a partial mode. */ | |
6262 | && TYPE_PRECISION (TREE_TYPE (arg00)) | |
6263 | == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00)))) | |
6881f973 | 6264 | { |
fa8b888f | 6265 | tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00)); |
7ab7fd4f | 6266 | return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, |
6267 | result_type, fold_convert (stype, arg00), | |
3c6185f1 | 6268 | build_int_cst (stype, 0)); |
6881f973 | 6269 | } |
149f0db4 | 6270 | } |
6271 | ||
6272 | return NULL_TREE; | |
6273 | } | |
6274 | ||
6275 | /* If CODE with arguments ARG0 and ARG1 represents a single bit | |
6276 | equality/inequality test, then return a simplified form of | |
6277 | the test using shifts and logical operations. Otherwise return | |
6278 | NULL. TYPE is the desired result type. */ | |
6279 | ||
6280 | tree | |
6281 | fold_single_bit_test (enum tree_code code, tree arg0, tree arg1, | |
6282 | tree result_type) | |
6283 | { | |
6284 | /* If this is testing a single bit, we can optimize the test. */ | |
6285 | if ((code == NE_EXPR || code == EQ_EXPR) | |
6286 | && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) | |
6287 | && integer_pow2p (TREE_OPERAND (arg0, 1))) | |
6288 | { | |
6289 | tree inner = TREE_OPERAND (arg0, 0); | |
6290 | tree type = TREE_TYPE (arg0); | |
6291 | int bitnum = tree_log2 (TREE_OPERAND (arg0, 1)); | |
6292 | enum machine_mode operand_mode = TYPE_MODE (type); | |
6293 | int ops_unsigned; | |
6294 | tree signed_type, unsigned_type, intermediate_type; | |
6295 | tree tem; | |
6296 | ||
6297 | /* First, see if we can fold the single bit test into a sign-bit | |
6298 | test. */ | |
6299 | tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, | |
6300 | result_type); | |
6301 | if (tem) | |
6302 | return tem; | |
a4de5624 | 6303 | |
7206da1b | 6304 | /* Otherwise we have (A & C) != 0 where C is a single bit, |
6881f973 | 6305 | convert that into ((A >> C2) & 1). Where C2 = log2(C). |
6306 | Similarly for (A & C) == 0. */ | |
6307 | ||
6308 | /* If INNER is a right shift of a constant and it plus BITNUM does | |
6309 | not overflow, adjust BITNUM and INNER. */ | |
6310 | if (TREE_CODE (inner) == RSHIFT_EXPR | |
6311 | && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST | |
6312 | && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0 | |
6313 | && bitnum < TYPE_PRECISION (type) | |
6314 | && 0 > compare_tree_int (TREE_OPERAND (inner, 1), | |
6315 | bitnum - TYPE_PRECISION (type))) | |
6316 | { | |
6317 | bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)); | |
6318 | inner = TREE_OPERAND (inner, 0); | |
6319 | } | |
6320 | ||
6321 | /* If we are going to be able to omit the AND below, we must do our | |
6322 | operations as unsigned. If we must use the AND, we have a choice. | |
6323 | Normally unsigned is faster, but for some machines signed is. */ | |
6881f973 | 6324 | #ifdef LOAD_EXTEND_OP |
a38d3d8b | 6325 | ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND |
6326 | && !flag_syntax_only) ? 0 : 1; | |
6881f973 | 6327 | #else |
a4de5624 | 6328 | ops_unsigned = 1; |
6881f973 | 6329 | #endif |
6881f973 | 6330 | |
fa8b888f | 6331 | signed_type = lang_hooks.types.type_for_mode (operand_mode, 0); |
6332 | unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1); | |
654d0fed | 6333 | intermediate_type = ops_unsigned ? unsigned_type : signed_type; |
b30e3dbc | 6334 | inner = fold_convert (intermediate_type, inner); |
6881f973 | 6335 | |
6336 | if (bitnum != 0) | |
fd96eeef | 6337 | inner = build2 (RSHIFT_EXPR, intermediate_type, |
6338 | inner, size_int (bitnum)); | |
6881f973 | 6339 | |
6340 | if (code == EQ_EXPR) | |
7ab7fd4f | 6341 | inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, |
6342 | inner, integer_one_node); | |
6881f973 | 6343 | |
6344 | /* Put the AND last so it can combine with more things. */ | |
fd96eeef | 6345 | inner = build2 (BIT_AND_EXPR, intermediate_type, |
6346 | inner, integer_one_node); | |
6881f973 | 6347 | |
6348 | /* Make sure to return the proper type. */ | |
b30e3dbc | 6349 | inner = fold_convert (result_type, inner); |
6881f973 | 6350 | |
6351 | return inner; | |
6352 | } | |
6353 | return NULL_TREE; | |
6354 | } | |
fc3df357 | 6355 | |
bd214d13 | 6356 | /* Check whether we are allowed to reorder operands arg0 and arg1, |
6357 | such that the evaluation of arg1 occurs before arg0. */ | |
6358 | ||
6359 | static bool | |
6360 | reorder_operands_p (tree arg0, tree arg1) | |
6361 | { | |
6362 | if (! flag_evaluation_order) | |
0c5713a2 | 6363 | return true; |
bd214d13 | 6364 | if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1)) |
6365 | return true; | |
6366 | return ! TREE_SIDE_EFFECTS (arg0) | |
6367 | && ! TREE_SIDE_EFFECTS (arg1); | |
6368 | } | |
6369 | ||
88e11d8f | 6370 | /* Test whether it is preferable two swap two operands, ARG0 and |
6371 | ARG1, for example because ARG0 is an integer constant and ARG1 | |
bd214d13 | 6372 | isn't. If REORDER is true, only recommend swapping if we can |
6373 | evaluate the operands in reverse order. */ | |
88e11d8f | 6374 | |
cc0bdf91 | 6375 | bool |
bd214d13 | 6376 | tree_swap_operands_p (tree arg0, tree arg1, bool reorder) |
88e11d8f | 6377 | { |
6378 | STRIP_SIGN_NOPS (arg0); | |
6379 | STRIP_SIGN_NOPS (arg1); | |
6380 | ||
6381 | if (TREE_CODE (arg1) == INTEGER_CST) | |
6382 | return 0; | |
6383 | if (TREE_CODE (arg0) == INTEGER_CST) | |
6384 | return 1; | |
6385 | ||
6386 | if (TREE_CODE (arg1) == REAL_CST) | |
6387 | return 0; | |
6388 | if (TREE_CODE (arg0) == REAL_CST) | |
6389 | return 1; | |
6390 | ||
6391 | if (TREE_CODE (arg1) == COMPLEX_CST) | |
6392 | return 0; | |
6393 | if (TREE_CODE (arg0) == COMPLEX_CST) | |
6394 | return 1; | |
6395 | ||
6396 | if (TREE_CONSTANT (arg1)) | |
6397 | return 0; | |
6398 | if (TREE_CONSTANT (arg0)) | |
6399 | return 1; | |
7206da1b | 6400 | |
f9464d30 | 6401 | if (optimize_size) |
6402 | return 0; | |
88e11d8f | 6403 | |
bd214d13 | 6404 | if (reorder && flag_evaluation_order |
6405 | && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))) | |
6406 | return 0; | |
6407 | ||
6408 | if (DECL_P (arg1)) | |
6409 | return 0; | |
6410 | if (DECL_P (arg0)) | |
6411 | return 1; | |
6412 | ||
cc0bdf91 | 6413 | /* It is preferable to swap two SSA_NAME to ensure a canonical form |
6414 | for commutative and comparison operators. Ensuring a canonical | |
6415 | form allows the optimizers to find additional redundancies without | |
6416 | having to explicitly check for both orderings. */ | |
6417 | if (TREE_CODE (arg0) == SSA_NAME | |
6418 | && TREE_CODE (arg1) == SSA_NAME | |
6419 | && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1)) | |
6420 | return 1; | |
6421 | ||
88e11d8f | 6422 | return 0; |
6423 | } | |
6424 | ||
faab57e3 | 6425 | /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where |
6426 | ARG0 is extended to a wider type. */ | |
6427 | ||
6428 | static tree | |
6429 | fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1) | |
6430 | { | |
6431 | tree arg0_unw = get_unwidened (arg0, NULL_TREE); | |
6432 | tree arg1_unw; | |
6433 | tree shorter_type, outer_type; | |
6434 | tree min, max; | |
6435 | bool above, below; | |
6436 | ||
6437 | if (arg0_unw == arg0) | |
6438 | return NULL_TREE; | |
6439 | shorter_type = TREE_TYPE (arg0_unw); | |
fd66f095 | 6440 | |
085bb6ea | 6441 | #ifdef HAVE_canonicalize_funcptr_for_compare |
6442 | /* Disable this optimization if we're casting a function pointer | |
6443 | type on targets that require function pointer canonicalization. */ | |
6444 | if (HAVE_canonicalize_funcptr_for_compare | |
6445 | && TREE_CODE (shorter_type) == POINTER_TYPE | |
6446 | && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE) | |
6447 | return NULL_TREE; | |
6448 | #endif | |
6449 | ||
fd66f095 | 6450 | if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type)) |
6451 | return NULL_TREE; | |
6452 | ||
faab57e3 | 6453 | arg1_unw = get_unwidened (arg1, shorter_type); |
faab57e3 | 6454 | |
6455 | /* If possible, express the comparison in the shorter mode. */ | |
6456 | if ((code == EQ_EXPR || code == NE_EXPR | |
6457 | || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type)) | |
6458 | && (TREE_TYPE (arg1_unw) == shorter_type | |
6459 | || (TREE_CODE (arg1_unw) == INTEGER_CST | |
66787d4f | 6460 | && (TREE_CODE (shorter_type) == INTEGER_TYPE |
6461 | || TREE_CODE (shorter_type) == BOOLEAN_TYPE) | |
faab57e3 | 6462 | && int_fits_type_p (arg1_unw, shorter_type)))) |
7ab7fd4f | 6463 | return fold_build2 (code, type, arg0_unw, |
6464 | fold_convert (shorter_type, arg1_unw)); | |
faab57e3 | 6465 | |
a5543a83 | 6466 | if (TREE_CODE (arg1_unw) != INTEGER_CST |
6467 | || TREE_CODE (shorter_type) != INTEGER_TYPE | |
6468 | || !int_fits_type_p (arg1_unw, shorter_type)) | |
faab57e3 | 6469 | return NULL_TREE; |
6470 | ||
6471 | /* If we are comparing with the integer that does not fit into the range | |
6472 | of the shorter type, the result is known. */ | |
6473 | outer_type = TREE_TYPE (arg1_unw); | |
6474 | min = lower_bound_in_type (outer_type, shorter_type); | |
6475 | max = upper_bound_in_type (outer_type, shorter_type); | |
6476 | ||
6477 | above = integer_nonzerop (fold_relational_const (LT_EXPR, type, | |
6478 | max, arg1_unw)); | |
6479 | below = integer_nonzerop (fold_relational_const (LT_EXPR, type, | |
6480 | arg1_unw, min)); | |
6481 | ||
6482 | switch (code) | |
6483 | { | |
6484 | case EQ_EXPR: | |
6485 | if (above || below) | |
20a8bb20 | 6486 | return omit_one_operand (type, integer_zero_node, arg0); |
faab57e3 | 6487 | break; |
6488 | ||
6489 | case NE_EXPR: | |
6490 | if (above || below) | |
20a8bb20 | 6491 | return omit_one_operand (type, integer_one_node, arg0); |
faab57e3 | 6492 | break; |
6493 | ||
6494 | case LT_EXPR: | |
6495 | case LE_EXPR: | |
6496 | if (above) | |
20a8bb20 | 6497 | return omit_one_operand (type, integer_one_node, arg0); |
faab57e3 | 6498 | else if (below) |
20a8bb20 | 6499 | return omit_one_operand (type, integer_zero_node, arg0); |
faab57e3 | 6500 | |
6501 | case GT_EXPR: | |
6502 | case GE_EXPR: | |
6503 | if (above) | |
20a8bb20 | 6504 | return omit_one_operand (type, integer_zero_node, arg0); |
faab57e3 | 6505 | else if (below) |
20a8bb20 | 6506 | return omit_one_operand (type, integer_one_node, arg0); |
faab57e3 | 6507 | |
6508 | default: | |
6509 | break; | |
6510 | } | |
6511 | ||
6512 | return NULL_TREE; | |
6513 | } | |
6514 | ||
6515 | /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for | |
6516 | ARG0 just the signedness is changed. */ | |
6517 | ||
6518 | static tree | |
6519 | fold_sign_changed_comparison (enum tree_code code, tree type, | |
6520 | tree arg0, tree arg1) | |
6521 | { | |
6522 | tree arg0_inner, tmp; | |
6523 | tree inner_type, outer_type; | |
6524 | ||
a846fd4d | 6525 | if (TREE_CODE (arg0) != NOP_EXPR |
6526 | && TREE_CODE (arg0) != CONVERT_EXPR) | |
faab57e3 | 6527 | return NULL_TREE; |
6528 | ||
6529 | outer_type = TREE_TYPE (arg0); | |
6530 | arg0_inner = TREE_OPERAND (arg0, 0); | |
6531 | inner_type = TREE_TYPE (arg0_inner); | |
6532 | ||
085bb6ea | 6533 | #ifdef HAVE_canonicalize_funcptr_for_compare |
6534 | /* Disable this optimization if we're casting a function pointer | |
6535 | type on targets that require function pointer canonicalization. */ | |
6536 | if (HAVE_canonicalize_funcptr_for_compare | |
6537 | && TREE_CODE (inner_type) == POINTER_TYPE | |
6538 | && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE) | |
6539 | return NULL_TREE; | |
6540 | #endif | |
6541 | ||
faab57e3 | 6542 | if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type)) |
6543 | return NULL_TREE; | |
6544 | ||
6545 | if (TREE_CODE (arg1) != INTEGER_CST | |
a846fd4d | 6546 | && !((TREE_CODE (arg1) == NOP_EXPR |
6547 | || TREE_CODE (arg1) == CONVERT_EXPR) | |
faab57e3 | 6548 | && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type)) |
6549 | return NULL_TREE; | |
6550 | ||
6551 | if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type) | |
6552 | && code != NE_EXPR | |
6553 | && code != EQ_EXPR) | |
6554 | return NULL_TREE; | |
6555 | ||
6556 | if (TREE_CODE (arg1) == INTEGER_CST) | |
6557 | { | |
6558 | tmp = build_int_cst_wide (inner_type, | |
6559 | TREE_INT_CST_LOW (arg1), | |
6560 | TREE_INT_CST_HIGH (arg1)); | |
6561 | arg1 = force_fit_type (tmp, 0, | |
6562 | TREE_OVERFLOW (arg1), | |
6563 | TREE_CONSTANT_OVERFLOW (arg1)); | |
6564 | } | |
6565 | else | |
6566 | arg1 = fold_convert (inner_type, arg1); | |
6567 | ||
7ab7fd4f | 6568 | return fold_build2 (code, type, arg0_inner, arg1); |
faab57e3 | 6569 | } |
6570 | ||
dede8dcc | 6571 | /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is |
ad1f9c12 | 6572 | step of the array. Reconstructs s and delta in the case of s * delta |
6573 | being an integer constant (and thus already folded). | |
6574 | ADDR is the address. MULT is the multiplicative expression. | |
e709f9ea | 6575 | If the function succeeds, the new address expression is returned. Otherwise |
6576 | NULL_TREE is returned. */ | |
dede8dcc | 6577 | |
6578 | static tree | |
ad1f9c12 | 6579 | try_move_mult_to_index (enum tree_code code, tree addr, tree op1) |
dede8dcc | 6580 | { |
6581 | tree s, delta, step; | |
dede8dcc | 6582 | tree ref = TREE_OPERAND (addr, 0), pref; |
6583 | tree ret, pos; | |
6584 | tree itype; | |
6585 | ||
ad1f9c12 | 6586 | /* Canonicalize op1 into a possibly non-constant delta |
6587 | and an INTEGER_CST s. */ | |
6588 | if (TREE_CODE (op1) == MULT_EXPR) | |
dede8dcc | 6589 | { |
ad1f9c12 | 6590 | tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1); |
6591 | ||
6592 | STRIP_NOPS (arg0); | |
6593 | STRIP_NOPS (arg1); | |
6594 | ||
6595 | if (TREE_CODE (arg0) == INTEGER_CST) | |
6596 | { | |
6597 | s = arg0; | |
6598 | delta = arg1; | |
6599 | } | |
6600 | else if (TREE_CODE (arg1) == INTEGER_CST) | |
6601 | { | |
6602 | s = arg1; | |
6603 | delta = arg0; | |
6604 | } | |
6605 | else | |
6606 | return NULL_TREE; | |
dede8dcc | 6607 | } |
ad1f9c12 | 6608 | else if (TREE_CODE (op1) == INTEGER_CST) |
dede8dcc | 6609 | { |
ad1f9c12 | 6610 | delta = op1; |
6611 | s = NULL_TREE; | |
dede8dcc | 6612 | } |
6613 | else | |
ad1f9c12 | 6614 | { |
6615 | /* Simulate we are delta * 1. */ | |
6616 | delta = op1; | |
6617 | s = integer_one_node; | |
6618 | } | |
dede8dcc | 6619 | |
6620 | for (;; ref = TREE_OPERAND (ref, 0)) | |
6621 | { | |
6622 | if (TREE_CODE (ref) == ARRAY_REF) | |
6623 | { | |
86f023fe | 6624 | itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0))); |
6625 | if (! itype) | |
6626 | continue; | |
6627 | ||
dede8dcc | 6628 | step = array_ref_element_size (ref); |
dede8dcc | 6629 | if (TREE_CODE (step) != INTEGER_CST) |
6630 | continue; | |
6631 | ||
ad1f9c12 | 6632 | if (s) |
6633 | { | |
6634 | if (! tree_int_cst_equal (step, s)) | |
6635 | continue; | |
6636 | } | |
6637 | else | |
6638 | { | |
6639 | /* Try if delta is a multiple of step. */ | |
86f023fe | 6640 | tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step); |
6641 | if (! tmp) | |
ad1f9c12 | 6642 | continue; |
86f023fe | 6643 | delta = tmp; |
ad1f9c12 | 6644 | } |
dede8dcc | 6645 | |
dede8dcc | 6646 | break; |
6647 | } | |
6648 | ||
6649 | if (!handled_component_p (ref)) | |
6650 | return NULL_TREE; | |
6651 | } | |
6652 | ||
6653 | /* We found the suitable array reference. So copy everything up to it, | |
6654 | and replace the index. */ | |
6655 | ||
6656 | pref = TREE_OPERAND (addr, 0); | |
6657 | ret = copy_node (pref); | |
6658 | pos = ret; | |
6659 | ||
6660 | while (pref != ref) | |
6661 | { | |
6662 | pref = TREE_OPERAND (pref, 0); | |
6663 | TREE_OPERAND (pos, 0) = copy_node (pref); | |
6664 | pos = TREE_OPERAND (pos, 0); | |
6665 | } | |
6666 | ||
7ab7fd4f | 6667 | TREE_OPERAND (pos, 1) = fold_build2 (code, itype, |
ad1f9c12 | 6668 | fold_convert (itype, |
6669 | TREE_OPERAND (pos, 1)), | |
6670 | fold_convert (itype, delta)); | |
dede8dcc | 6671 | |
e7be49a3 | 6672 | return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret); |
dede8dcc | 6673 | } |
6674 | ||
9a73db25 | 6675 | |
6676 | /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y | |
6677 | means A >= Y && A != MAX, but in this case we know that | |
6678 | A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */ | |
6679 | ||
6680 | static tree | |
6681 | fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound) | |
6682 | { | |
6683 | tree a, typea, type = TREE_TYPE (ineq), a1, diff, y; | |
6684 | ||
6685 | if (TREE_CODE (bound) == LT_EXPR) | |
6686 | a = TREE_OPERAND (bound, 0); | |
6687 | else if (TREE_CODE (bound) == GT_EXPR) | |
6688 | a = TREE_OPERAND (bound, 1); | |
6689 | else | |
6690 | return NULL_TREE; | |
6691 | ||
6692 | typea = TREE_TYPE (a); | |
6693 | if (!INTEGRAL_TYPE_P (typea) | |
6694 | && !POINTER_TYPE_P (typea)) | |
6695 | return NULL_TREE; | |
6696 | ||
6697 | if (TREE_CODE (ineq) == LT_EXPR) | |
6698 | { | |
6699 | a1 = TREE_OPERAND (ineq, 1); | |
6700 | y = TREE_OPERAND (ineq, 0); | |
6701 | } | |
6702 | else if (TREE_CODE (ineq) == GT_EXPR) | |
6703 | { | |
6704 | a1 = TREE_OPERAND (ineq, 0); | |
6705 | y = TREE_OPERAND (ineq, 1); | |
6706 | } | |
6707 | else | |
6708 | return NULL_TREE; | |
6709 | ||
6710 | if (TREE_TYPE (a1) != typea) | |
6711 | return NULL_TREE; | |
6712 | ||
7ab7fd4f | 6713 | diff = fold_build2 (MINUS_EXPR, typea, a1, a); |
9a73db25 | 6714 | if (!integer_onep (diff)) |
6715 | return NULL_TREE; | |
6716 | ||
7ab7fd4f | 6717 | return fold_build2 (GE_EXPR, type, a, y); |
9a73db25 | 6718 | } |
6719 | ||
1c9af531 | 6720 | /* Fold a sum or difference of at least one multiplication. |
6721 | Returns the folded tree or NULL if no simplification could be made. */ | |
6722 | ||
6723 | static tree | |
6724 | fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1) | |
6725 | { | |
6726 | tree arg00, arg01, arg10, arg11; | |
6727 | tree alt0 = NULL_TREE, alt1 = NULL_TREE, same; | |
6728 | ||
6729 | /* (A * C) +- (B * C) -> (A+-B) * C. | |
6730 | (A * C) +- A -> A * (C+-1). | |
6731 | We are most concerned about the case where C is a constant, | |
6732 | but other combinations show up during loop reduction. Since | |
6733 | it is not difficult, try all four possibilities. */ | |
6734 | ||
6735 | if (TREE_CODE (arg0) == MULT_EXPR) | |
6736 | { | |
6737 | arg00 = TREE_OPERAND (arg0, 0); | |
6738 | arg01 = TREE_OPERAND (arg0, 1); | |
6739 | } | |
6740 | else | |
6741 | { | |
6742 | arg00 = arg0; | |
ba56cb50 | 6743 | arg01 = build_one_cst (type); |
1c9af531 | 6744 | } |
6745 | if (TREE_CODE (arg1) == MULT_EXPR) | |
6746 | { | |
6747 | arg10 = TREE_OPERAND (arg1, 0); | |
6748 | arg11 = TREE_OPERAND (arg1, 1); | |
6749 | } | |
6750 | else | |
6751 | { | |
6752 | arg10 = arg1; | |
ba56cb50 | 6753 | arg11 = build_one_cst (type); |
1c9af531 | 6754 | } |
6755 | same = NULL_TREE; | |
6756 | ||
6757 | if (operand_equal_p (arg01, arg11, 0)) | |
6758 | same = arg01, alt0 = arg00, alt1 = arg10; | |
6759 | else if (operand_equal_p (arg00, arg10, 0)) | |
6760 | same = arg00, alt0 = arg01, alt1 = arg11; | |
6761 | else if (operand_equal_p (arg00, arg11, 0)) | |
6762 | same = arg00, alt0 = arg01, alt1 = arg10; | |
6763 | else if (operand_equal_p (arg01, arg10, 0)) | |
6764 | same = arg01, alt0 = arg00, alt1 = arg11; | |
6765 | ||
6766 | /* No identical multiplicands; see if we can find a common | |
6767 | power-of-two factor in non-power-of-two multiplies. This | |
6768 | can help in multi-dimensional array access. */ | |
6769 | else if (host_integerp (arg01, 0) | |
6770 | && host_integerp (arg11, 0)) | |
6771 | { | |
6772 | HOST_WIDE_INT int01, int11, tmp; | |
6773 | bool swap = false; | |
6774 | tree maybe_same; | |
6775 | int01 = TREE_INT_CST_LOW (arg01); | |
6776 | int11 = TREE_INT_CST_LOW (arg11); | |
6777 | ||
6778 | /* Move min of absolute values to int11. */ | |
6779 | if ((int01 >= 0 ? int01 : -int01) | |
6780 | < (int11 >= 0 ? int11 : -int11)) | |
6781 | { | |
6782 | tmp = int01, int01 = int11, int11 = tmp; | |
6783 | alt0 = arg00, arg00 = arg10, arg10 = alt0; | |
6784 | maybe_same = arg01; | |
6785 | swap = true; | |
6786 | } | |
6787 | else | |
6788 | maybe_same = arg11; | |
6789 | ||
8a7907c1 | 6790 | if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0) |
1c9af531 | 6791 | { |
6792 | alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00, | |
6793 | build_int_cst (TREE_TYPE (arg00), | |
6794 | int01 / int11)); | |
6795 | alt1 = arg10; | |
6796 | same = maybe_same; | |
6797 | if (swap) | |
6798 | maybe_same = alt0, alt0 = alt1, alt1 = maybe_same; | |
6799 | } | |
6800 | } | |
6801 | ||
6802 | if (same) | |
6803 | return fold_build2 (MULT_EXPR, type, | |
6804 | fold_build2 (code, type, | |
6805 | fold_convert (type, alt0), | |
6806 | fold_convert (type, alt1)), | |
6807 | fold_convert (type, same)); | |
6808 | ||
6809 | return NULL_TREE; | |
6810 | } | |
6811 | ||
5f4092ed | 6812 | /* Subroutine of native_encode_expr. Encode the INTEGER_CST |
6813 | specified by EXPR into the buffer PTR of length LEN bytes. | |
6814 | Return the number of bytes placed in the buffer, or zero | |
6815 | upon failure. */ | |
6816 | ||
6817 | static int | |
6818 | native_encode_int (tree expr, unsigned char *ptr, int len) | |
6819 | { | |
6820 | tree type = TREE_TYPE (expr); | |
6821 | int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); | |
6822 | int byte, offset, word, words; | |
6823 | unsigned char value; | |
6824 | ||
6825 | if (total_bytes > len) | |
6826 | return 0; | |
6827 | words = total_bytes / UNITS_PER_WORD; | |
6828 | ||
6829 | for (byte = 0; byte < total_bytes; byte++) | |
6830 | { | |
6831 | int bitpos = byte * BITS_PER_UNIT; | |
6832 | if (bitpos < HOST_BITS_PER_WIDE_INT) | |
6833 | value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos); | |
6834 | else | |
6835 | value = (unsigned char) (TREE_INT_CST_HIGH (expr) | |
6836 | >> (bitpos - HOST_BITS_PER_WIDE_INT)); | |
6837 | ||
6838 | if (total_bytes > UNITS_PER_WORD) | |
6839 | { | |
6840 | word = byte / UNITS_PER_WORD; | |
6841 | if (WORDS_BIG_ENDIAN) | |
6842 | word = (words - 1) - word; | |
6843 | offset = word * UNITS_PER_WORD; | |
6844 | if (BYTES_BIG_ENDIAN) | |
6845 | offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); | |
6846 | else | |
6847 | offset += byte % UNITS_PER_WORD; | |
6848 | } | |
6849 | else | |
6850 | offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; | |
6851 | ptr[offset] = value; | |
6852 | } | |
6853 | return total_bytes; | |
6854 | } | |
6855 | ||
6856 | ||
6857 | /* Subroutine of native_encode_expr. Encode the REAL_CST | |
6858 | specified by EXPR into the buffer PTR of length LEN bytes. | |
6859 | Return the number of bytes placed in the buffer, or zero | |
6860 | upon failure. */ | |
6861 | ||
6862 | static int | |
6863 | native_encode_real (tree expr, unsigned char *ptr, int len) | |
6864 | { | |
6865 | tree type = TREE_TYPE (expr); | |
6866 | int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); | |
6867 | int byte, offset, word, words; | |
6868 | unsigned char value; | |
6869 | ||
6870 | /* There are always 32 bits in each long, no matter the size of | |
6871 | the hosts long. We handle floating point representations with | |
6872 | up to 192 bits. */ | |
6873 | long tmp[6]; | |
6874 | ||
6875 | if (total_bytes > len) | |
6876 | return 0; | |
6877 | words = total_bytes / UNITS_PER_WORD; | |
6878 | ||
6879 | real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type)); | |
6880 | ||
6881 | for (byte = 0; byte < total_bytes; byte++) | |
6882 | { | |
6883 | int bitpos = byte * BITS_PER_UNIT; | |
6884 | value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31)); | |
6885 | ||
6886 | if (total_bytes > UNITS_PER_WORD) | |
6887 | { | |
6888 | word = byte / UNITS_PER_WORD; | |
6889 | if (FLOAT_WORDS_BIG_ENDIAN) | |
6890 | word = (words - 1) - word; | |
6891 | offset = word * UNITS_PER_WORD; | |
6892 | if (BYTES_BIG_ENDIAN) | |
6893 | offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); | |
6894 | else | |
6895 | offset += byte % UNITS_PER_WORD; | |
6896 | } | |
6897 | else | |
6898 | offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; | |
6899 | ptr[offset] = value; | |
6900 | } | |
6901 | return total_bytes; | |
6902 | } | |
6903 | ||
6904 | /* Subroutine of native_encode_expr. Encode the COMPLEX_CST | |
6905 | specified by EXPR into the buffer PTR of length LEN bytes. | |
6906 | Return the number of bytes placed in the buffer, or zero | |
6907 | upon failure. */ | |
6908 | ||
6909 | static int | |
6910 | native_encode_complex (tree expr, unsigned char *ptr, int len) | |
6911 | { | |
6912 | int rsize, isize; | |
6913 | tree part; | |
6914 | ||
6915 | part = TREE_REALPART (expr); | |
6916 | rsize = native_encode_expr (part, ptr, len); | |
6917 | if (rsize == 0) | |
6918 | return 0; | |
6919 | part = TREE_IMAGPART (expr); | |
6920 | isize = native_encode_expr (part, ptr+rsize, len-rsize); | |
6921 | if (isize != rsize) | |
6922 | return 0; | |
6923 | return rsize + isize; | |
6924 | } | |
6925 | ||
6926 | ||
6927 | /* Subroutine of native_encode_expr. Encode the VECTOR_CST | |
6928 | specified by EXPR into the buffer PTR of length LEN bytes. | |
6929 | Return the number of bytes placed in the buffer, or zero | |
6930 | upon failure. */ | |
6931 | ||
6932 | static int | |
6933 | native_encode_vector (tree expr, unsigned char *ptr, int len) | |
6934 | { | |
3fa15ed1 | 6935 | int i, size, offset, count; |
9fd22806 | 6936 | tree itype, elem, elements; |
5f4092ed | 6937 | |
5f4092ed | 6938 | offset = 0; |
6939 | elements = TREE_VECTOR_CST_ELTS (expr); | |
6940 | count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)); | |
9fd22806 | 6941 | itype = TREE_TYPE (TREE_TYPE (expr)); |
6942 | size = GET_MODE_SIZE (TYPE_MODE (itype)); | |
5f4092ed | 6943 | for (i = 0; i < count; i++) |
6944 | { | |
6945 | if (elements) | |
6946 | { | |
6947 | elem = TREE_VALUE (elements); | |
6948 | elements = TREE_CHAIN (elements); | |
6949 | } | |
6950 | else | |
6951 | elem = NULL_TREE; | |
6952 | ||
6953 | if (elem) | |
6954 | { | |
9fd22806 | 6955 | if (native_encode_expr (elem, ptr+offset, len-offset) != size) |
5f4092ed | 6956 | return 0; |
6957 | } | |
9fd22806 | 6958 | else |
5f4092ed | 6959 | { |
6960 | if (offset + size > len) | |
6961 | return 0; | |
6962 | memset (ptr+offset, 0, size); | |
6963 | } | |
5f4092ed | 6964 | offset += size; |
6965 | } | |
6966 | return offset; | |
6967 | } | |
6968 | ||
6969 | ||
6970 | /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, | |
6971 | REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the | |
6972 | buffer PTR of length LEN bytes. Return the number of bytes | |
6973 | placed in the buffer, or zero upon failure. */ | |
6974 | ||
6975 | static int | |
6976 | native_encode_expr (tree expr, unsigned char *ptr, int len) | |
6977 | { | |
6978 | switch (TREE_CODE (expr)) | |
6979 | { | |
6980 | case INTEGER_CST: | |
6981 | return native_encode_int (expr, ptr, len); | |
6982 | ||
6983 | case REAL_CST: | |
6984 | return native_encode_real (expr, ptr, len); | |
6985 | ||
6986 | case COMPLEX_CST: | |
6987 | return native_encode_complex (expr, ptr, len); | |
6988 | ||
6989 | case VECTOR_CST: | |
6990 | return native_encode_vector (expr, ptr, len); | |
6991 | ||
6992 | default: | |
6993 | return 0; | |
6994 | } | |
6995 | } | |
6996 | ||
6997 | ||
6998 | /* Subroutine of native_interpret_expr. Interpret the contents of | |
6999 | the buffer PTR of length LEN as an INTEGER_CST of type TYPE. | |
7000 | If the buffer cannot be interpreted, return NULL_TREE. */ | |
7001 | ||
7002 | static tree | |
7003 | native_interpret_int (tree type, unsigned char *ptr, int len) | |
7004 | { | |
7005 | int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); | |
7006 | int byte, offset, word, words; | |
7007 | unsigned char value; | |
7008 | unsigned int HOST_WIDE_INT lo = 0; | |
7009 | HOST_WIDE_INT hi = 0; | |
7010 | ||
7011 | if (total_bytes > len) | |
7012 | return NULL_TREE; | |
7013 | if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT) | |
7014 | return NULL_TREE; | |
7015 | words = total_bytes / UNITS_PER_WORD; | |
7016 | ||
7017 | for (byte = 0; byte < total_bytes; byte++) | |
7018 | { | |
7019 | int bitpos = byte * BITS_PER_UNIT; | |
7020 | if (total_bytes > UNITS_PER_WORD) | |
7021 | { | |
7022 | word = byte / UNITS_PER_WORD; | |
7023 | if (WORDS_BIG_ENDIAN) | |
7024 | word = (words - 1) - word; | |
7025 | offset = word * UNITS_PER_WORD; | |
7026 | if (BYTES_BIG_ENDIAN) | |
7027 | offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); | |
7028 | else | |
7029 | offset += byte % UNITS_PER_WORD; | |
7030 | } | |
7031 | else | |
7032 | offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; | |
7033 | value = ptr[offset]; | |
7034 | ||
7035 | if (bitpos < HOST_BITS_PER_WIDE_INT) | |
7036 | lo |= (unsigned HOST_WIDE_INT) value << bitpos; | |
7037 | else | |
7038 | hi |= (unsigned HOST_WIDE_INT) value | |
7039 | << (bitpos - HOST_BITS_PER_WIDE_INT); | |
7040 | } | |
7041 | ||
7042 | return force_fit_type (build_int_cst_wide (type, lo, hi), | |
7043 | 0, false, false); | |
7044 | } | |
7045 | ||
7046 | ||
7047 | /* Subroutine of native_interpret_expr. Interpret the contents of | |
7048 | the buffer PTR of length LEN as a REAL_CST of type TYPE. | |
7049 | If the buffer cannot be interpreted, return NULL_TREE. */ | |
7050 | ||
7051 | static tree | |
7052 | native_interpret_real (tree type, unsigned char *ptr, int len) | |
7053 | { | |
3fa15ed1 | 7054 | enum machine_mode mode = TYPE_MODE (type); |
7055 | int total_bytes = GET_MODE_SIZE (mode); | |
5f4092ed | 7056 | int byte, offset, word, words; |
7057 | unsigned char value; | |
7058 | /* There are always 32 bits in each long, no matter the size of | |
7059 | the hosts long. We handle floating point representations with | |
7060 | up to 192 bits. */ | |
7061 | REAL_VALUE_TYPE r; | |
7062 | long tmp[6]; | |
7063 | ||
7064 | total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); | |
7065 | if (total_bytes > len || total_bytes > 24) | |
7066 | return NULL_TREE; | |
7067 | words = total_bytes / UNITS_PER_WORD; | |
7068 | ||
7069 | memset (tmp, 0, sizeof (tmp)); | |
7070 | for (byte = 0; byte < total_bytes; byte++) | |
7071 | { | |
7072 | int bitpos = byte * BITS_PER_UNIT; | |
7073 | if (total_bytes > UNITS_PER_WORD) | |
7074 | { | |
7075 | word = byte / UNITS_PER_WORD; | |
7076 | if (FLOAT_WORDS_BIG_ENDIAN) | |
7077 | word = (words - 1) - word; | |
7078 | offset = word * UNITS_PER_WORD; | |
7079 | if (BYTES_BIG_ENDIAN) | |
7080 | offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); | |
7081 | else | |
7082 | offset += byte % UNITS_PER_WORD; | |
7083 | } | |
7084 | else | |
7085 | offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; | |
7086 | value = ptr[offset]; | |
7087 | ||
7088 | tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31); | |
7089 | } | |
7090 | ||
7091 | real_from_target (&r, tmp, mode); | |
7092 | return build_real (type, r); | |
7093 | } | |
7094 | ||
7095 | ||
7096 | /* Subroutine of native_interpret_expr. Interpret the contents of | |
7097 | the buffer PTR of length LEN as a COMPLEX_CST of type TYPE. | |
7098 | If the buffer cannot be interpreted, return NULL_TREE. */ | |
7099 | ||
7100 | static tree | |
7101 | native_interpret_complex (tree type, unsigned char *ptr, int len) | |
7102 | { | |
7103 | tree etype, rpart, ipart; | |
7104 | int size; | |
7105 | ||
7106 | etype = TREE_TYPE (type); | |
7107 | size = GET_MODE_SIZE (TYPE_MODE (etype)); | |
7108 | if (size * 2 > len) | |
7109 | return NULL_TREE; | |
7110 | rpart = native_interpret_expr (etype, ptr, size); | |
7111 | if (!rpart) | |
7112 | return NULL_TREE; | |
7113 | ipart = native_interpret_expr (etype, ptr+size, size); | |
7114 | if (!ipart) | |
7115 | return NULL_TREE; | |
7116 | return build_complex (type, rpart, ipart); | |
7117 | } | |
7118 | ||
7119 | ||
7120 | /* Subroutine of native_interpret_expr. Interpret the contents of | |
7121 | the buffer PTR of length LEN as a VECTOR_CST of type TYPE. | |
7122 | If the buffer cannot be interpreted, return NULL_TREE. */ | |
7123 | ||
7124 | static tree | |
7125 | native_interpret_vector (tree type, unsigned char *ptr, int len) | |
7126 | { | |
7127 | tree etype, elem, elements; | |
7128 | int i, size, count; | |
7129 | ||
7130 | etype = TREE_TYPE (type); | |
7131 | size = GET_MODE_SIZE (TYPE_MODE (etype)); | |
7132 | count = TYPE_VECTOR_SUBPARTS (type); | |
7133 | if (size * count > len) | |
7134 | return NULL_TREE; | |
7135 | ||
7136 | elements = NULL_TREE; | |
7137 | for (i = count - 1; i >= 0; i--) | |
7138 | { | |
7139 | elem = native_interpret_expr (etype, ptr+(i*size), size); | |
7140 | if (!elem) | |
7141 | return NULL_TREE; | |
7142 | elements = tree_cons (NULL_TREE, elem, elements); | |
7143 | } | |
7144 | return build_vector (type, elements); | |
7145 | } | |
7146 | ||
7147 | ||
d961ae3a | 7148 | /* Subroutine of fold_view_convert_expr. Interpret the contents of |
5f4092ed | 7149 | the buffer PTR of length LEN as a constant of type TYPE. For |
7150 | INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P | |
7151 | we return a REAL_CST, etc... If the buffer cannot be interpreted, | |
7152 | return NULL_TREE. */ | |
7153 | ||
7154 | static tree | |
7155 | native_interpret_expr (tree type, unsigned char *ptr, int len) | |
7156 | { | |
7157 | switch (TREE_CODE (type)) | |
7158 | { | |
7159 | case INTEGER_TYPE: | |
7160 | case ENUMERAL_TYPE: | |
7161 | case BOOLEAN_TYPE: | |
7162 | return native_interpret_int (type, ptr, len); | |
7163 | ||
7164 | case REAL_TYPE: | |
7165 | return native_interpret_real (type, ptr, len); | |
7166 | ||
7167 | case COMPLEX_TYPE: | |
7168 | return native_interpret_complex (type, ptr, len); | |
7169 | ||
7170 | case VECTOR_TYPE: | |
7171 | return native_interpret_vector (type, ptr, len); | |
7172 | ||
7173 | default: | |
7174 | return NULL_TREE; | |
7175 | } | |
7176 | } | |
7177 | ||
7178 | ||
7179 | /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type | |
7180 | TYPE at compile-time. If we're unable to perform the conversion | |
7181 | return NULL_TREE. */ | |
7182 | ||
7183 | static tree | |
7184 | fold_view_convert_expr (tree type, tree expr) | |
7185 | { | |
7186 | /* We support up to 512-bit values (for V8DFmode). */ | |
7187 | unsigned char buffer[64]; | |
7188 | int len; | |
7189 | ||
7190 | /* Check that the host and target are sane. */ | |
7191 | if (CHAR_BIT != 8 || BITS_PER_UNIT != 8) | |
7192 | return NULL_TREE; | |
7193 | ||
7194 | len = native_encode_expr (expr, buffer, sizeof (buffer)); | |
7195 | if (len == 0) | |
7196 | return NULL_TREE; | |
7197 | ||
7198 | return native_interpret_expr (type, buffer, len); | |
7199 | } | |
7200 | ||
7201 | ||
0d3711e2 | 7202 | /* Fold a unary expression of code CODE and type TYPE with operand |
7203 | OP0. Return the folded expression if folding is successful. | |
7204 | Otherwise, return NULL_TREE. */ | |
422c18cb | 7205 | |
d3858e14 | 7206 | tree |
0052b98e | 7207 | fold_unary (enum tree_code code, tree type, tree op0) |
422c18cb | 7208 | { |
422c18cb | 7209 | tree tem; |
0052b98e | 7210 | tree arg0; |
422c18cb | 7211 | enum tree_code_class kind = TREE_CODE_CLASS (code); |
7212 | ||
7213 | gcc_assert (IS_EXPR_CODE_CLASS (kind) | |
7214 | && TREE_CODE_LENGTH (code) == 1); | |
7215 | ||
0052b98e | 7216 | arg0 = op0; |
422c18cb | 7217 | if (arg0) |
7218 | { | |
00bb4a78 | 7219 | if (code == NOP_EXPR || code == CONVERT_EXPR |
7220 | || code == FLOAT_EXPR || code == ABS_EXPR) | |
422c18cb | 7221 | { |
00bb4a78 | 7222 | /* Don't use STRIP_NOPS, because signedness of argument type |
7223 | matters. */ | |
422c18cb | 7224 | STRIP_SIGN_NOPS (arg0); |
7225 | } | |
7226 | else | |
7227 | { | |
7228 | /* Strip any conversions that don't change the mode. This | |
7229 | is safe for every expression, except for a comparison | |
7230 | expression because its signedness is derived from its | |
7231 | operands. | |
7232 | ||
7233 | Note that this is done as an internal manipulation within | |
7234 | the constant folder, in order to find the simplest | |
7235 | representation of the arguments so that their form can be | |
7236 | studied. In any cases, the appropriate type conversions | |
7237 | should be put back in the tree that will get out of the | |
7238 | constant folder. */ | |
7239 | STRIP_NOPS (arg0); | |
7240 | } | |
7241 | } | |
7242 | ||
7243 | if (TREE_CODE_CLASS (code) == tcc_unary) | |
7244 | { | |
7245 | if (TREE_CODE (arg0) == COMPOUND_EXPR) | |
7246 | return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), | |
7ab7fd4f | 7247 | fold_build1 (code, type, TREE_OPERAND (arg0, 1))); |
422c18cb | 7248 | else if (TREE_CODE (arg0) == COND_EXPR) |
7249 | { | |
7250 | tree arg01 = TREE_OPERAND (arg0, 1); | |
7251 | tree arg02 = TREE_OPERAND (arg0, 2); | |
7252 | if (! VOID_TYPE_P (TREE_TYPE (arg01))) | |
7ab7fd4f | 7253 | arg01 = fold_build1 (code, type, arg01); |
422c18cb | 7254 | if (! VOID_TYPE_P (TREE_TYPE (arg02))) |
7ab7fd4f | 7255 | arg02 = fold_build1 (code, type, arg02); |
7256 | tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0), | |
7257 | arg01, arg02); | |
422c18cb | 7258 | |
7259 | /* If this was a conversion, and all we did was to move into | |
7260 | inside the COND_EXPR, bring it back out. But leave it if | |
7261 | it is a conversion from integer to integer and the | |
7262 | result precision is no wider than a word since such a | |
7263 | conversion is cheap and may be optimized away by combine, | |
7264 | while it couldn't if it were outside the COND_EXPR. Then return | |
7265 | so we don't get into an infinite recursion loop taking the | |
7266 | conversion out and then back in. */ | |
7267 | ||
7268 | if ((code == NOP_EXPR || code == CONVERT_EXPR | |
7269 | || code == NON_LVALUE_EXPR) | |
7270 | && TREE_CODE (tem) == COND_EXPR | |
7271 | && TREE_CODE (TREE_OPERAND (tem, 1)) == code | |
7272 | && TREE_CODE (TREE_OPERAND (tem, 2)) == code | |
7273 | && ! VOID_TYPE_P (TREE_OPERAND (tem, 1)) | |
7274 | && ! VOID_TYPE_P (TREE_OPERAND (tem, 2)) | |
7275 | && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0)) | |
7276 | == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0))) | |
7277 | && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem)) | |
7278 | && (INTEGRAL_TYPE_P | |
7279 | (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0)))) | |
7280 | && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD) | |
7281 | || flag_syntax_only)) | |
7282 | tem = build1 (code, type, | |
7283 | build3 (COND_EXPR, | |
7284 | TREE_TYPE (TREE_OPERAND | |
7285 | (TREE_OPERAND (tem, 1), 0)), | |
7286 | TREE_OPERAND (tem, 0), | |
7287 | TREE_OPERAND (TREE_OPERAND (tem, 1), 0), | |
7288 | TREE_OPERAND (TREE_OPERAND (tem, 2), 0))); | |
7289 | return tem; | |
7290 | } | |
7291 | else if (COMPARISON_CLASS_P (arg0)) | |
7292 | { | |
7293 | if (TREE_CODE (type) == BOOLEAN_TYPE) | |
7294 | { | |
7295 | arg0 = copy_node (arg0); | |
7296 | TREE_TYPE (arg0) = type; | |
7297 | return arg0; | |
7298 | } | |
7299 | else if (TREE_CODE (type) != INTEGER_TYPE) | |
7ab7fd4f | 7300 | return fold_build3 (COND_EXPR, type, arg0, |
7301 | fold_build1 (code, type, | |
7302 | integer_one_node), | |
7303 | fold_build1 (code, type, | |
7304 | integer_zero_node)); | |
422c18cb | 7305 | } |
7306 | } | |
7307 | ||
7308 | switch (code) | |
7309 | { | |
7310 | case NOP_EXPR: | |
7311 | case FLOAT_EXPR: | |
7312 | case CONVERT_EXPR: | |
7313 | case FIX_TRUNC_EXPR: | |
7314 | case FIX_CEIL_EXPR: | |
7315 | case FIX_FLOOR_EXPR: | |
7316 | case FIX_ROUND_EXPR: | |
fac5aff3 | 7317 | if (TREE_TYPE (op0) == type) |
7318 | return op0; | |
8aa776be | 7319 | |
191ec5a2 | 7320 | /* If we have (type) (a CMP b) and type is an integral type, return |
8aa776be | 7321 | new expression involving the new type. */ |
7322 | if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type)) | |
7323 | return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0), | |
7324 | TREE_OPERAND (op0, 1)); | |
422c18cb | 7325 | |
7326 | /* Handle cases of two conversions in a row. */ | |
fac5aff3 | 7327 | if (TREE_CODE (op0) == NOP_EXPR |
7328 | || TREE_CODE (op0) == CONVERT_EXPR) | |
422c18cb | 7329 | { |
fac5aff3 | 7330 | tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0)); |
7331 | tree inter_type = TREE_TYPE (op0); | |
422c18cb | 7332 | int inside_int = INTEGRAL_TYPE_P (inside_type); |
7333 | int inside_ptr = POINTER_TYPE_P (inside_type); | |
7334 | int inside_float = FLOAT_TYPE_P (inside_type); | |
6ff828af | 7335 | int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE; |
422c18cb | 7336 | unsigned int inside_prec = TYPE_PRECISION (inside_type); |
7337 | int inside_unsignedp = TYPE_UNSIGNED (inside_type); | |
7338 | int inter_int = INTEGRAL_TYPE_P (inter_type); | |
7339 | int inter_ptr = POINTER_TYPE_P (inter_type); | |
7340 | int inter_float = FLOAT_TYPE_P (inter_type); | |
6ff828af | 7341 | int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE; |
422c18cb | 7342 | unsigned int inter_prec = TYPE_PRECISION (inter_type); |
7343 | int inter_unsignedp = TYPE_UNSIGNED (inter_type); | |
7344 | int final_int = INTEGRAL_TYPE_P (type); | |
7345 | int final_ptr = POINTER_TYPE_P (type); | |
7346 | int final_float = FLOAT_TYPE_P (type); | |
6ff828af | 7347 | int final_vec = TREE_CODE (type) == VECTOR_TYPE; |
422c18cb | 7348 | unsigned int final_prec = TYPE_PRECISION (type); |
7349 | int final_unsignedp = TYPE_UNSIGNED (type); | |
7350 | ||
7351 | /* In addition to the cases of two conversions in a row | |
7352 | handled below, if we are converting something to its own | |
7353 | type via an object of identical or wider precision, neither | |
7354 | conversion is needed. */ | |
7355 | if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type) | |
219dad96 | 7356 | && (((inter_int || inter_ptr) && final_int) |
7357 | || (inter_float && final_float)) | |
422c18cb | 7358 | && inter_prec >= final_prec) |
7ab7fd4f | 7359 | return fold_build1 (code, type, TREE_OPERAND (op0, 0)); |
422c18cb | 7360 | |
7361 | /* Likewise, if the intermediate and final types are either both | |
7362 | float or both integer, we don't need the middle conversion if | |
7363 | it is wider than the final type and doesn't change the signedness | |
7364 | (for integers). Avoid this if the final type is a pointer | |
7365 | since then we sometimes need the inner conversion. Likewise if | |
7366 | the outer has a precision not equal to the size of its mode. */ | |
7367 | if ((((inter_int || inter_ptr) && (inside_int || inside_ptr)) | |
6ff828af | 7368 | || (inter_float && inside_float) |
7369 | || (inter_vec && inside_vec)) | |
422c18cb | 7370 | && inter_prec >= inside_prec |
6ff828af | 7371 | && (inter_float || inter_vec |
7372 | || inter_unsignedp == inside_unsignedp) | |
422c18cb | 7373 | && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) |
7374 | && TYPE_MODE (type) == TYPE_MODE (inter_type)) | |
6ff828af | 7375 | && ! final_ptr |
7376 | && (! final_vec || inter_prec == inside_prec)) | |
7ab7fd4f | 7377 | return fold_build1 (code, type, TREE_OPERAND (op0, 0)); |
422c18cb | 7378 | |
7379 | /* If we have a sign-extension of a zero-extended value, we can | |
7380 | replace that by a single zero-extension. */ | |
7381 | if (inside_int && inter_int && final_int | |
7382 | && inside_prec < inter_prec && inter_prec < final_prec | |
7383 | && inside_unsignedp && !inter_unsignedp) | |
7ab7fd4f | 7384 | return fold_build1 (code, type, TREE_OPERAND (op0, 0)); |
422c18cb | 7385 | |
7386 | /* Two conversions in a row are not needed unless: | |
7387 | - some conversion is floating-point (overstrict for now), or | |
6ff828af | 7388 | - some conversion is a vector (overstrict for now), or |
422c18cb | 7389 | - the intermediate type is narrower than both initial and |
7390 | final, or | |
7391 | - the intermediate type and innermost type differ in signedness, | |
7392 | and the outermost type is wider than the intermediate, or | |
7393 | - the initial type is a pointer type and the precisions of the | |
7394 | intermediate and final types differ, or | |
7395 | - the final type is a pointer type and the precisions of the | |
219dad96 | 7396 | initial and intermediate types differ. |
7397 | - the final type is a pointer type and the initial type not | |
7398 | - the initial type is a pointer to an array and the final type | |
7399 | not. */ | |
422c18cb | 7400 | if (! inside_float && ! inter_float && ! final_float |
6ff828af | 7401 | && ! inside_vec && ! inter_vec && ! final_vec |
219dad96 | 7402 | && (inter_prec >= inside_prec || inter_prec >= final_prec) |
422c18cb | 7403 | && ! (inside_int && inter_int |
7404 | && inter_unsignedp != inside_unsignedp | |
7405 | && inter_prec < final_prec) | |
7406 | && ((inter_unsignedp && inter_prec > inside_prec) | |
7407 | == (final_unsignedp && final_prec > inter_prec)) | |
7408 | && ! (inside_ptr && inter_prec != final_prec) | |
7409 | && ! (final_ptr && inside_prec != inter_prec) | |
7410 | && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) | |
7411 | && TYPE_MODE (type) == TYPE_MODE (inter_type)) | |
219dad96 | 7412 | && final_ptr == inside_ptr |
7413 | && ! (inside_ptr | |
7414 | && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE | |
7415 | && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)) | |
7ab7fd4f | 7416 | return fold_build1 (code, type, TREE_OPERAND (op0, 0)); |
422c18cb | 7417 | } |
7418 | ||
73d9e97d | 7419 | /* Handle (T *)&A.B.C for A being of type T and B and C |
23943319 | 7420 | living at offset zero. This occurs frequently in |
73d9e97d | 7421 | C++ upcasting and then accessing the base. */ |
7422 | if (TREE_CODE (op0) == ADDR_EXPR | |
7423 | && POINTER_TYPE_P (type) | |
7424 | && handled_component_p (TREE_OPERAND (op0, 0))) | |
7425 | { | |
7426 | HOST_WIDE_INT bitsize, bitpos; | |
7427 | tree offset; | |
7428 | enum machine_mode mode; | |
7429 | int unsignedp, volatilep; | |
7430 | tree base = TREE_OPERAND (op0, 0); | |
7431 | base = get_inner_reference (base, &bitsize, &bitpos, &offset, | |
7432 | &mode, &unsignedp, &volatilep, false); | |
7433 | /* If the reference was to a (constant) zero offset, we can use | |
7434 | the address of the base if it has the same base type | |
7435 | as the result type. */ | |
7436 | if (! offset && bitpos == 0 | |
7437 | && TYPE_MAIN_VARIANT (TREE_TYPE (type)) | |
7438 | == TYPE_MAIN_VARIANT (TREE_TYPE (base))) | |
7439 | return fold_convert (type, build_fold_addr_expr (base)); | |
7440 | } | |
7441 | ||
fac5aff3 | 7442 | if (TREE_CODE (op0) == MODIFY_EXPR |
7443 | && TREE_CONSTANT (TREE_OPERAND (op0, 1)) | |
422c18cb | 7444 | /* Detect assigning a bitfield. */ |
fac5aff3 | 7445 | && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF |
7446 | && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1)))) | |
422c18cb | 7447 | { |
7448 | /* Don't leave an assignment inside a conversion | |
7449 | unless assigning a bitfield. */ | |
b085d4e5 | 7450 | tem = fold_build1 (code, type, TREE_OPERAND (op0, 1)); |
422c18cb | 7451 | /* First do the assignment, then return converted constant. */ |
b085d4e5 | 7452 | tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem); |
422c18cb | 7453 | TREE_NO_WARNING (tem) = 1; |
7454 | TREE_USED (tem) = 1; | |
7455 | return tem; | |
7456 | } | |
7457 | ||
7458 | /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer | |
7459 | constants (if x has signed type, the sign bit cannot be set | |
7460 | in c). This folds extension into the BIT_AND_EXPR. */ | |
7461 | if (INTEGRAL_TYPE_P (type) | |
7462 | && TREE_CODE (type) != BOOLEAN_TYPE | |
fac5aff3 | 7463 | && TREE_CODE (op0) == BIT_AND_EXPR |
7464 | && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST) | |
422c18cb | 7465 | { |
fac5aff3 | 7466 | tree and = op0; |
422c18cb | 7467 | tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1); |
7468 | int change = 0; | |
7469 | ||
7470 | if (TYPE_UNSIGNED (TREE_TYPE (and)) | |
7471 | || (TYPE_PRECISION (type) | |
7472 | <= TYPE_PRECISION (TREE_TYPE (and)))) | |
7473 | change = 1; | |
7474 | else if (TYPE_PRECISION (TREE_TYPE (and1)) | |
7475 | <= HOST_BITS_PER_WIDE_INT | |
7476 | && host_integerp (and1, 1)) | |
7477 | { | |
7478 | unsigned HOST_WIDE_INT cst; | |
7479 | ||
7480 | cst = tree_low_cst (and1, 1); | |
7481 | cst &= (HOST_WIDE_INT) -1 | |
7482 | << (TYPE_PRECISION (TREE_TYPE (and1)) - 1); | |
7483 | change = (cst == 0); | |
7484 | #ifdef LOAD_EXTEND_OP | |
7485 | if (change | |
7486 | && !flag_syntax_only | |
7487 | && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0))) | |
7488 | == ZERO_EXTEND)) | |
7489 | { | |
7490 | tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0)); | |
7491 | and0 = fold_convert (uns, and0); | |
7492 | and1 = fold_convert (uns, and1); | |
7493 | } | |
7494 | #endif | |
7495 | } | |
7496 | if (change) | |
7497 | { | |
7498 | tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1), | |
7499 | TREE_INT_CST_HIGH (and1)); | |
7500 | tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1), | |
7501 | TREE_CONSTANT_OVERFLOW (and1)); | |
7ab7fd4f | 7502 | return fold_build2 (BIT_AND_EXPR, type, |
7503 | fold_convert (type, and0), tem); | |
422c18cb | 7504 | } |
7505 | } | |
7506 | ||
7507 | /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and | |
7508 | T2 being pointers to types of the same size. */ | |
fac5aff3 | 7509 | if (POINTER_TYPE_P (type) |
422c18cb | 7510 | && BINARY_CLASS_P (arg0) |
7511 | && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR | |
7512 | && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))) | |
7513 | { | |
7514 | tree arg00 = TREE_OPERAND (arg0, 0); | |
fac5aff3 | 7515 | tree t0 = type; |
422c18cb | 7516 | tree t1 = TREE_TYPE (arg00); |
7517 | tree tt0 = TREE_TYPE (t0); | |
7518 | tree tt1 = TREE_TYPE (t1); | |
7519 | tree s0 = TYPE_SIZE (tt0); | |
7520 | tree s1 = TYPE_SIZE (tt1); | |
7521 | ||
7522 | if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST)) | |
7523 | return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00), | |
7524 | TREE_OPERAND (arg0, 1)); | |
7525 | } | |
7526 | ||
c348f27f | 7527 | /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types |
7528 | of the same precision, and X is a integer type not narrower than | |
7529 | types T1 or T2, i.e. the cast (T2)X isn't an extension. */ | |
7530 | if (INTEGRAL_TYPE_P (type) | |
7531 | && TREE_CODE (op0) == BIT_NOT_EXPR | |
7532 | && INTEGRAL_TYPE_P (TREE_TYPE (op0)) | |
7533 | && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR | |
7534 | || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR) | |
7535 | && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))) | |
7536 | { | |
7537 | tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0); | |
7538 | if (INTEGRAL_TYPE_P (TREE_TYPE (tem)) | |
7539 | && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem))) | |
7540 | return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem)); | |
7541 | } | |
7542 | ||
422c18cb | 7543 | tem = fold_convert_const (code, type, arg0); |
e7edfbbd | 7544 | return tem ? tem : NULL_TREE; |
422c18cb | 7545 | |
7546 | case VIEW_CONVERT_EXPR: | |
fac5aff3 | 7547 | if (TREE_CODE (op0) == VIEW_CONVERT_EXPR) |
5f4092ed | 7548 | return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0)); |
7549 | return fold_view_convert_expr (type, op0); | |
422c18cb | 7550 | |
7551 | case NEGATE_EXPR: | |
58b22aa6 | 7552 | tem = fold_negate_expr (arg0); |
7553 | if (tem) | |
7554 | return fold_convert (type, tem); | |
e7edfbbd | 7555 | return NULL_TREE; |
422c18cb | 7556 | |
7557 | case ABS_EXPR: | |
7558 | if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST) | |
7559 | return fold_abs_const (arg0, type); | |
7560 | else if (TREE_CODE (arg0) == NEGATE_EXPR) | |
7ab7fd4f | 7561 | return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)); |
422c18cb | 7562 | /* Convert fabs((double)float) into (double)fabsf(float). */ |
7563 | else if (TREE_CODE (arg0) == NOP_EXPR | |
7564 | && TREE_CODE (type) == REAL_TYPE) | |
7565 | { | |
7566 | tree targ0 = strip_float_extensions (arg0); | |
7567 | if (targ0 != arg0) | |
7ab7fd4f | 7568 | return fold_convert (type, fold_build1 (ABS_EXPR, |
7569 | TREE_TYPE (targ0), | |
7570 | targ0)); | |
422c18cb | 7571 | } |
8040d1c5 | 7572 | /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */ |
7573 | else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR) | |
422c18cb | 7574 | return arg0; |
7575 | ||
7576 | /* Strip sign ops from argument. */ | |
7577 | if (TREE_CODE (type) == REAL_TYPE) | |
7578 | { | |
7579 | tem = fold_strip_sign_ops (arg0); | |
7580 | if (tem) | |
7ab7fd4f | 7581 | return fold_build1 (ABS_EXPR, type, fold_convert (type, tem)); |
422c18cb | 7582 | } |
e7edfbbd | 7583 | return NULL_TREE; |
422c18cb | 7584 | |
7585 | case CONJ_EXPR: | |
7586 | if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) | |
7587 | return fold_convert (type, arg0); | |
75e3ef6e | 7588 | if (TREE_CODE (arg0) == COMPLEX_EXPR) |
7589 | { | |
7590 | tree itype = TREE_TYPE (type); | |
7591 | tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0)); | |
7592 | tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1)); | |
7593 | return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart)); | |
7594 | } | |
7595 | if (TREE_CODE (arg0) == COMPLEX_CST) | |
7596 | { | |
7597 | tree itype = TREE_TYPE (type); | |
7598 | tree rpart = fold_convert (itype, TREE_REALPART (arg0)); | |
7599 | tree ipart = fold_convert (itype, TREE_IMAGPART (arg0)); | |
7600 | return build_complex (type, rpart, negate_expr (ipart)); | |
7601 | } | |
7602 | if (TREE_CODE (arg0) == CONJ_EXPR) | |
7603 | return fold_convert (type, TREE_OPERAND (arg0, 0)); | |
e7edfbbd | 7604 | return NULL_TREE; |
422c18cb | 7605 | |
7606 | case BIT_NOT_EXPR: | |
7607 | if (TREE_CODE (arg0) == INTEGER_CST) | |
7608 | return fold_not_const (arg0, type); | |
7609 | else if (TREE_CODE (arg0) == BIT_NOT_EXPR) | |
7610 | return TREE_OPERAND (arg0, 0); | |
7611 | /* Convert ~ (-A) to A - 1. */ | |
7612 | else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR) | |
7ab7fd4f | 7613 | return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0), |
7614 | build_int_cst (type, 1)); | |
422c18cb | 7615 | /* Convert ~ (A - 1) or ~ (A + -1) to -A. */ |
7616 | else if (INTEGRAL_TYPE_P (type) | |
7617 | && ((TREE_CODE (arg0) == MINUS_EXPR | |
7618 | && integer_onep (TREE_OPERAND (arg0, 1))) | |
7619 | || (TREE_CODE (arg0) == PLUS_EXPR | |
7620 | && integer_all_onesp (TREE_OPERAND (arg0, 1))))) | |
7ab7fd4f | 7621 | return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0)); |
039f212d | 7622 | /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */ |
7623 | else if (TREE_CODE (arg0) == BIT_XOR_EXPR | |
7624 | && (tem = fold_unary (BIT_NOT_EXPR, type, | |
7625 | fold_convert (type, | |
7626 | TREE_OPERAND (arg0, 0))))) | |
7627 | return fold_build2 (BIT_XOR_EXPR, type, tem, | |
7628 | fold_convert (type, TREE_OPERAND (arg0, 1))); | |
7629 | else if (TREE_CODE (arg0) == BIT_XOR_EXPR | |
7630 | && (tem = fold_unary (BIT_NOT_EXPR, type, | |
7631 | fold_convert (type, | |
7632 | TREE_OPERAND (arg0, 1))))) | |
7633 | return fold_build2 (BIT_XOR_EXPR, type, | |
7634 | fold_convert (type, TREE_OPERAND (arg0, 0)), tem); | |
7635 | ||
e7edfbbd | 7636 | return NULL_TREE; |
422c18cb | 7637 | |
7638 | case TRUTH_NOT_EXPR: | |
7639 | /* The argument to invert_truthvalue must have Boolean type. */ | |
7640 | if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) | |
7641 | arg0 = fold_convert (boolean_type_node, arg0); | |
7642 | ||
7643 | /* Note that the operand of this must be an int | |
7644 | and its values must be 0 or 1. | |
7645 | ("true" is a fixed value perhaps depending on the language, | |
7646 | but we don't handle values other than 1 correctly yet.) */ | |
6758b11c | 7647 | tem = fold_truth_not_expr (arg0); |
7648 | if (!tem) | |
e7edfbbd | 7649 | return NULL_TREE; |
422c18cb | 7650 | return fold_convert (type, tem); |
7651 | ||
7652 | case REALPART_EXPR: | |
7653 | if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) | |
75e3ef6e | 7654 | return fold_convert (type, arg0); |
7655 | if (TREE_CODE (arg0) == COMPLEX_EXPR) | |
422c18cb | 7656 | return omit_one_operand (type, TREE_OPERAND (arg0, 0), |
7657 | TREE_OPERAND (arg0, 1)); | |
75e3ef6e | 7658 | if (TREE_CODE (arg0) == COMPLEX_CST) |
7659 | return fold_convert (type, TREE_REALPART (arg0)); | |
7660 | if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) | |
7661 | { | |
7662 | tree itype = TREE_TYPE (TREE_TYPE (arg0)); | |
7663 | tem = fold_build2 (TREE_CODE (arg0), itype, | |
7664 | fold_build1 (REALPART_EXPR, itype, | |
7665 | TREE_OPERAND (arg0, 0)), | |
7666 | fold_build1 (REALPART_EXPR, itype, | |
7667 | TREE_OPERAND (arg0, 1))); | |
7668 | return fold_convert (type, tem); | |
7669 | } | |
7670 | if (TREE_CODE (arg0) == CONJ_EXPR) | |
7671 | { | |
7672 | tree itype = TREE_TYPE (TREE_TYPE (arg0)); | |
7673 | tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0)); | |
7674 | return fold_convert (type, tem); | |
7675 | } | |
e7edfbbd | 7676 | return NULL_TREE; |
422c18cb | 7677 | |
7678 | case IMAGPART_EXPR: | |
7679 | if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) | |
7680 | return fold_convert (type, integer_zero_node); | |
75e3ef6e | 7681 | if (TREE_CODE (arg0) == COMPLEX_EXPR) |
422c18cb | 7682 | return omit_one_operand (type, TREE_OPERAND (arg0, 1), |
7683 | TREE_OPERAND (arg0, 0)); | |
75e3ef6e | 7684 | if (TREE_CODE (arg0) == COMPLEX_CST) |
7685 | return fold_convert (type, TREE_IMAGPART (arg0)); | |
7686 | if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) | |
7687 | { | |
7688 | tree itype = TREE_TYPE (TREE_TYPE (arg0)); | |
7689 | tem = fold_build2 (TREE_CODE (arg0), itype, | |
7690 | fold_build1 (IMAGPART_EXPR, itype, | |
7691 | TREE_OPERAND (arg0, 0)), | |
7692 | fold_build1 (IMAGPART_EXPR, itype, | |
7693 | TREE_OPERAND (arg0, 1))); | |
7694 | return fold_convert (type, tem); | |
7695 | } | |
7696 | if (TREE_CODE (arg0) == CONJ_EXPR) | |
7697 | { | |
7698 | tree itype = TREE_TYPE (TREE_TYPE (arg0)); | |
7699 | tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0)); | |
7700 | return fold_convert (type, negate_expr (tem)); | |
7701 | } | |
e7edfbbd | 7702 | return NULL_TREE; |
422c18cb | 7703 | |
7704 | default: | |
e7edfbbd | 7705 | return NULL_TREE; |
422c18cb | 7706 | } /* switch (code) */ |
7707 | } | |
7708 | ||
7e50ecae | 7709 | /* Fold a binary expression of code CODE and type TYPE with operands |
7710 | OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination. | |
7711 | Return the folded expression if folding is successful. Otherwise, | |
7712 | return NULL_TREE. */ | |
7713 | ||
7714 | static tree | |
7715 | fold_minmax (enum tree_code code, tree type, tree op0, tree op1) | |
7716 | { | |
7717 | enum tree_code compl_code; | |
7718 | ||
7719 | if (code == MIN_EXPR) | |
7720 | compl_code = MAX_EXPR; | |
7721 | else if (code == MAX_EXPR) | |
7722 | compl_code = MIN_EXPR; | |
7723 | else | |
d9560eb6 | 7724 | gcc_unreachable (); |
7e50ecae | 7725 | |
7726 |