]>
Commit | Line | Data |
---|---|---|
749a2da1 | 1 | /* RTL simplification functions for GNU compiler. |
af841dbd | 2 | Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, |
66647d44 | 3 | 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 |
eeef0e45 | 4 | Free Software Foundation, Inc. |
0cedb36c | 5 | |
1322177d | 6 | This file is part of GCC. |
0cedb36c | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 10 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 11 | version. |
0cedb36c | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
0cedb36c JL |
17 | |
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
0cedb36c JL |
21 | |
22 | ||
23 | #include "config.h" | |
0cedb36c | 24 | #include "system.h" |
4977bab6 ZW |
25 | #include "coretypes.h" |
26 | #include "tm.h" | |
0cedb36c | 27 | #include "rtl.h" |
efdc7e19 | 28 | #include "tree.h" |
0cedb36c JL |
29 | #include "tm_p.h" |
30 | #include "regs.h" | |
31 | #include "hard-reg-set.h" | |
32 | #include "flags.h" | |
33 | #include "real.h" | |
34 | #include "insn-config.h" | |
35 | #include "recog.h" | |
36 | #include "function.h" | |
37 | #include "expr.h" | |
38 | #include "toplev.h" | |
39 | #include "output.h" | |
eab5c70a | 40 | #include "ggc.h" |
7daebb7a | 41 | #include "target.h" |
0cedb36c JL |
42 | |
43 | /* Simplification and canonicalization of RTL. */ | |
44 | ||
3839069b ZW |
45 | /* Much code operates on (low, high) pairs; the low value is an |
46 | unsigned wide int, the high value a signed wide int. We | |
47 | occasionally need to sign extend from low to high as if low were a | |
48 | signed wide int. */ | |
ba34d877 | 49 | #define HWI_SIGN_EXTEND(low) \ |
3839069b | 50 | ((((HOST_WIDE_INT) low) < 0) ? ((HOST_WIDE_INT) -1) : ((HOST_WIDE_INT) 0)) |
0cedb36c | 51 | |
f7d504c2 KG |
52 | static rtx neg_const_int (enum machine_mode, const_rtx); |
53 | static bool plus_minus_operand_p (const_rtx); | |
7e0b4eae | 54 | static bool simplify_plus_minus_op_data_cmp (rtx, rtx); |
1941069a | 55 | static rtx simplify_plus_minus (enum rtx_code, enum machine_mode, rtx, rtx); |
550d1387 GK |
56 | static rtx simplify_immed_subreg (enum machine_mode, rtx, enum machine_mode, |
57 | unsigned int); | |
dd61aa98 RS |
58 | static rtx simplify_associative_operation (enum rtx_code, enum machine_mode, |
59 | rtx, rtx); | |
c6fb08ad PB |
60 | static rtx simplify_relational_operation_1 (enum rtx_code, enum machine_mode, |
61 | enum machine_mode, rtx, rtx); | |
0a67e02c PB |
62 | static rtx simplify_unary_operation_1 (enum rtx_code, enum machine_mode, rtx); |
63 | static rtx simplify_binary_operation_1 (enum rtx_code, enum machine_mode, | |
64 | rtx, rtx, rtx, rtx); | |
aff8a8d5 CM |
65 | \f |
66 | /* Negate a CONST_INT rtx, truncating (because a conversion from a | |
23d1aac4 | 67 | maximally negative number can overflow). */ |
aff8a8d5 | 68 | static rtx |
f7d504c2 | 69 | neg_const_int (enum machine_mode mode, const_rtx i) |
aff8a8d5 | 70 | { |
2496c7bd | 71 | return gen_int_mode (- INTVAL (i), mode); |
aff8a8d5 CM |
72 | } |
73 | ||
0b24db88 RS |
74 | /* Test whether expression, X, is an immediate constant that represents |
75 | the most significant bit of machine mode MODE. */ | |
76 | ||
b757b9f8 | 77 | bool |
f7d504c2 | 78 | mode_signbit_p (enum machine_mode mode, const_rtx x) |
0b24db88 RS |
79 | { |
80 | unsigned HOST_WIDE_INT val; | |
81 | unsigned int width; | |
82 | ||
83 | if (GET_MODE_CLASS (mode) != MODE_INT) | |
84 | return false; | |
85 | ||
86 | width = GET_MODE_BITSIZE (mode); | |
87 | if (width == 0) | |
88 | return false; | |
89 | ||
90 | if (width <= HOST_BITS_PER_WIDE_INT | |
481683e1 | 91 | && CONST_INT_P (x)) |
0b24db88 RS |
92 | val = INTVAL (x); |
93 | else if (width <= 2 * HOST_BITS_PER_WIDE_INT | |
94 | && GET_CODE (x) == CONST_DOUBLE | |
95 | && CONST_DOUBLE_LOW (x) == 0) | |
96 | { | |
97 | val = CONST_DOUBLE_HIGH (x); | |
98 | width -= HOST_BITS_PER_WIDE_INT; | |
99 | } | |
100 | else | |
101 | return false; | |
102 | ||
103 | if (width < HOST_BITS_PER_WIDE_INT) | |
104 | val &= ((unsigned HOST_WIDE_INT) 1 << width) - 1; | |
105 | return val == ((unsigned HOST_WIDE_INT) 1 << (width - 1)); | |
106 | } | |
749a2da1 | 107 | \f |
786de7eb | 108 | /* Make a binary operation by properly ordering the operands and |
0cedb36c JL |
109 | seeing if the expression folds. */ |
110 | ||
111 | rtx | |
46c5ad27 AJ |
112 | simplify_gen_binary (enum rtx_code code, enum machine_mode mode, rtx op0, |
113 | rtx op1) | |
0cedb36c JL |
114 | { |
115 | rtx tem; | |
116 | ||
0cedb36c JL |
117 | /* If this simplifies, do it. */ |
118 | tem = simplify_binary_operation (code, mode, op0, op1); | |
0cedb36c JL |
119 | if (tem) |
120 | return tem; | |
121 | ||
68162a97 ILT |
122 | /* Put complex operands first and constants second if commutative. */ |
123 | if (GET_RTX_CLASS (code) == RTX_COMM_ARITH | |
124 | && swap_commutative_operands_p (op0, op1)) | |
125 | tem = op0, op0 = op1, op1 = tem; | |
126 | ||
e16e3291 | 127 | return gen_rtx_fmt_ee (code, mode, op0, op1); |
0cedb36c JL |
128 | } |
129 | \f | |
5a2aa3bd | 130 | /* If X is a MEM referencing the constant pool, return the real value. |
4ba5f925 | 131 | Otherwise return X. */ |
732910b9 | 132 | rtx |
46c5ad27 | 133 | avoid_constant_pool_reference (rtx x) |
4ba5f925 | 134 | { |
7daebb7a | 135 | rtx c, tmp, addr; |
5a2aa3bd | 136 | enum machine_mode cmode; |
bdb82177 | 137 | HOST_WIDE_INT offset = 0; |
5a2aa3bd | 138 | |
7daebb7a RS |
139 | switch (GET_CODE (x)) |
140 | { | |
141 | case MEM: | |
142 | break; | |
143 | ||
144 | case FLOAT_EXTEND: | |
145 | /* Handle float extensions of constant pool references. */ | |
146 | tmp = XEXP (x, 0); | |
147 | c = avoid_constant_pool_reference (tmp); | |
148 | if (c != tmp && GET_CODE (c) == CONST_DOUBLE) | |
149 | { | |
150 | REAL_VALUE_TYPE d; | |
151 | ||
152 | REAL_VALUE_FROM_CONST_DOUBLE (d, c); | |
153 | return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x)); | |
154 | } | |
155 | return x; | |
156 | ||
157 | default: | |
158 | return x; | |
159 | } | |
160 | ||
d82a02fa AK |
161 | if (GET_MODE (x) == BLKmode) |
162 | return x; | |
163 | ||
5a2aa3bd RH |
164 | addr = XEXP (x, 0); |
165 | ||
59e4e217 | 166 | /* Call target hook to avoid the effects of -fpic etc.... */ |
5fd9b178 | 167 | addr = targetm.delegitimize_address (addr); |
7daebb7a | 168 | |
bdb82177 PB |
169 | /* Split the address into a base and integer offset. */ |
170 | if (GET_CODE (addr) == CONST | |
171 | && GET_CODE (XEXP (addr, 0)) == PLUS | |
481683e1 | 172 | && CONST_INT_P (XEXP (XEXP (addr, 0), 1))) |
bdb82177 PB |
173 | { |
174 | offset = INTVAL (XEXP (XEXP (addr, 0), 1)); | |
175 | addr = XEXP (XEXP (addr, 0), 0); | |
176 | } | |
177 | ||
11f3e4c7 RS |
178 | if (GET_CODE (addr) == LO_SUM) |
179 | addr = XEXP (addr, 1); | |
180 | ||
bdb82177 PB |
181 | /* If this is a constant pool reference, we can turn it into its |
182 | constant and hope that simplifications happen. */ | |
183 | if (GET_CODE (addr) == SYMBOL_REF | |
184 | && CONSTANT_POOL_ADDRESS_P (addr)) | |
5a2aa3bd | 185 | { |
bdb82177 PB |
186 | c = get_pool_constant (addr); |
187 | cmode = get_pool_mode (addr); | |
188 | ||
189 | /* If we're accessing the constant in a different mode than it was | |
190 | originally stored, attempt to fix that up via subreg simplifications. | |
191 | If that fails we have no choice but to return the original memory. */ | |
192 | if (offset != 0 || cmode != GET_MODE (x)) | |
193 | { | |
194 | rtx tem = simplify_subreg (GET_MODE (x), c, cmode, offset); | |
195 | if (tem && CONSTANT_P (tem)) | |
196 | return tem; | |
197 | } | |
198 | else | |
199 | return c; | |
5a2aa3bd RH |
200 | } |
201 | ||
bdb82177 | 202 | return x; |
4ba5f925 JH |
203 | } |
204 | \f | |
b5b8b0ac AO |
205 | /* Simplify a MEM based on its attributes. This is the default |
206 | delegitimize_address target hook, and it's recommended that every | |
207 | overrider call it. */ | |
208 | ||
209 | rtx | |
210 | delegitimize_mem_from_attrs (rtx x) | |
211 | { | |
212 | if (MEM_P (x) | |
213 | && MEM_EXPR (x) | |
214 | && (!MEM_OFFSET (x) | |
215 | || GET_CODE (MEM_OFFSET (x)) == CONST_INT)) | |
216 | { | |
217 | tree decl = MEM_EXPR (x); | |
218 | enum machine_mode mode = GET_MODE (x); | |
219 | HOST_WIDE_INT offset = 0; | |
220 | ||
221 | switch (TREE_CODE (decl)) | |
222 | { | |
223 | default: | |
224 | decl = NULL; | |
225 | break; | |
226 | ||
227 | case VAR_DECL: | |
228 | break; | |
229 | ||
230 | case ARRAY_REF: | |
231 | case ARRAY_RANGE_REF: | |
232 | case COMPONENT_REF: | |
233 | case BIT_FIELD_REF: | |
234 | case REALPART_EXPR: | |
235 | case IMAGPART_EXPR: | |
236 | case VIEW_CONVERT_EXPR: | |
237 | { | |
238 | HOST_WIDE_INT bitsize, bitpos; | |
239 | tree toffset; | |
240 | int unsignedp = 0, volatilep = 0; | |
241 | ||
242 | decl = get_inner_reference (decl, &bitsize, &bitpos, &toffset, | |
243 | &mode, &unsignedp, &volatilep, false); | |
244 | if (bitsize != GET_MODE_BITSIZE (mode) | |
245 | || (bitpos % BITS_PER_UNIT) | |
246 | || (toffset && !host_integerp (toffset, 0))) | |
247 | decl = NULL; | |
248 | else | |
249 | { | |
250 | offset += bitpos / BITS_PER_UNIT; | |
251 | if (toffset) | |
252 | offset += TREE_INT_CST_LOW (toffset); | |
253 | } | |
254 | break; | |
255 | } | |
256 | } | |
257 | ||
258 | if (decl | |
259 | && mode == GET_MODE (x) | |
260 | && TREE_CODE (decl) == VAR_DECL | |
261 | && (TREE_STATIC (decl) | |
262 | || DECL_THREAD_LOCAL_P (decl)) | |
263 | && DECL_RTL_SET_P (decl) | |
264 | && MEM_P (DECL_RTL (decl))) | |
265 | { | |
266 | rtx newx; | |
267 | ||
268 | if (MEM_OFFSET (x)) | |
269 | offset += INTVAL (MEM_OFFSET (x)); | |
270 | ||
271 | newx = DECL_RTL (decl); | |
272 | ||
273 | if (MEM_P (newx)) | |
274 | { | |
275 | rtx n = XEXP (newx, 0), o = XEXP (x, 0); | |
276 | ||
277 | /* Avoid creating a new MEM needlessly if we already had | |
278 | the same address. We do if there's no OFFSET and the | |
279 | old address X is identical to NEWX, or if X is of the | |
280 | form (plus NEWX OFFSET), or the NEWX is of the form | |
281 | (plus Y (const_int Z)) and X is that with the offset | |
282 | added: (plus Y (const_int Z+OFFSET)). */ | |
283 | if (!((offset == 0 | |
284 | || (GET_CODE (o) == PLUS | |
285 | && GET_CODE (XEXP (o, 1)) == CONST_INT | |
286 | && (offset == INTVAL (XEXP (o, 1)) | |
287 | || (GET_CODE (n) == PLUS | |
288 | && GET_CODE (XEXP (n, 1)) == CONST_INT | |
289 | && (INTVAL (XEXP (n, 1)) + offset | |
290 | == INTVAL (XEXP (o, 1))) | |
291 | && (n = XEXP (n, 0)))) | |
292 | && (o = XEXP (o, 0)))) | |
293 | && rtx_equal_p (o, n))) | |
294 | x = adjust_address_nv (newx, mode, offset); | |
295 | } | |
296 | else if (GET_MODE (x) == GET_MODE (newx) | |
297 | && offset == 0) | |
298 | x = newx; | |
299 | } | |
300 | } | |
301 | ||
302 | return x; | |
303 | } | |
304 | \f | |
d9c695ff RK |
305 | /* Make a unary operation by first seeing if it folds and otherwise making |
306 | the specified operation. */ | |
307 | ||
308 | rtx | |
46c5ad27 AJ |
309 | simplify_gen_unary (enum rtx_code code, enum machine_mode mode, rtx op, |
310 | enum machine_mode op_mode) | |
d9c695ff RK |
311 | { |
312 | rtx tem; | |
313 | ||
314 | /* If this simplifies, use it. */ | |
315 | if ((tem = simplify_unary_operation (code, mode, op, op_mode)) != 0) | |
316 | return tem; | |
317 | ||
318 | return gen_rtx_fmt_e (code, mode, op); | |
319 | } | |
320 | ||
321 | /* Likewise for ternary operations. */ | |
322 | ||
323 | rtx | |
46c5ad27 AJ |
324 | simplify_gen_ternary (enum rtx_code code, enum machine_mode mode, |
325 | enum machine_mode op0_mode, rtx op0, rtx op1, rtx op2) | |
d9c695ff RK |
326 | { |
327 | rtx tem; | |
328 | ||
329 | /* If this simplifies, use it. */ | |
330 | if (0 != (tem = simplify_ternary_operation (code, mode, op0_mode, | |
331 | op0, op1, op2))) | |
332 | return tem; | |
333 | ||
334 | return gen_rtx_fmt_eee (code, mode, op0, op1, op2); | |
335 | } | |
c6fb08ad | 336 | |
141e454b | 337 | /* Likewise, for relational operations. |
c6fb08ad | 338 | CMP_MODE specifies mode comparison is done in. */ |
d9c695ff RK |
339 | |
340 | rtx | |
46c5ad27 AJ |
341 | simplify_gen_relational (enum rtx_code code, enum machine_mode mode, |
342 | enum machine_mode cmp_mode, rtx op0, rtx op1) | |
d9c695ff RK |
343 | { |
344 | rtx tem; | |
345 | ||
c6fb08ad PB |
346 | if (0 != (tem = simplify_relational_operation (code, mode, cmp_mode, |
347 | op0, op1))) | |
348 | return tem; | |
bc9c18c3 | 349 | |
d9c695ff RK |
350 | return gen_rtx_fmt_ee (code, mode, op0, op1); |
351 | } | |
352 | \f | |
1ad463f4 | 353 | /* Replace all occurrences of OLD_RTX in X with NEW_RTX and try to simplify the |
d9c695ff RK |
354 | resulting RTX. Return a new RTX which is as simplified as possible. */ |
355 | ||
356 | rtx | |
f7d504c2 | 357 | simplify_replace_rtx (rtx x, const_rtx old_rtx, rtx new_rtx) |
d9c695ff RK |
358 | { |
359 | enum rtx_code code = GET_CODE (x); | |
360 | enum machine_mode mode = GET_MODE (x); | |
077a148b RS |
361 | enum machine_mode op_mode; |
362 | rtx op0, op1, op2; | |
d9c695ff | 363 | |
1ad463f4 | 364 | /* If X is OLD_RTX, return NEW_RTX. Otherwise, if this is an expression, try |
d9c695ff RK |
365 | to build a new expression substituting recursively. If we can't do |
366 | anything, return our input. */ | |
367 | ||
1ad463f4 BI |
368 | if (x == old_rtx) |
369 | return new_rtx; | |
d9c695ff RK |
370 | |
371 | switch (GET_RTX_CLASS (code)) | |
372 | { | |
ec8e098d | 373 | case RTX_UNARY: |
077a148b RS |
374 | op0 = XEXP (x, 0); |
375 | op_mode = GET_MODE (op0); | |
1ad463f4 | 376 | op0 = simplify_replace_rtx (op0, old_rtx, new_rtx); |
077a148b RS |
377 | if (op0 == XEXP (x, 0)) |
378 | return x; | |
379 | return simplify_gen_unary (code, mode, op0, op_mode); | |
d9c695ff | 380 | |
ec8e098d PB |
381 | case RTX_BIN_ARITH: |
382 | case RTX_COMM_ARITH: | |
1ad463f4 BI |
383 | op0 = simplify_replace_rtx (XEXP (x, 0), old_rtx, new_rtx); |
384 | op1 = simplify_replace_rtx (XEXP (x, 1), old_rtx, new_rtx); | |
077a148b RS |
385 | if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1)) |
386 | return x; | |
387 | return simplify_gen_binary (code, mode, op0, op1); | |
388 | ||
ec8e098d PB |
389 | case RTX_COMPARE: |
390 | case RTX_COMM_COMPARE: | |
077a148b RS |
391 | op0 = XEXP (x, 0); |
392 | op1 = XEXP (x, 1); | |
393 | op_mode = GET_MODE (op0) != VOIDmode ? GET_MODE (op0) : GET_MODE (op1); | |
1ad463f4 BI |
394 | op0 = simplify_replace_rtx (op0, old_rtx, new_rtx); |
395 | op1 = simplify_replace_rtx (op1, old_rtx, new_rtx); | |
077a148b RS |
396 | if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1)) |
397 | return x; | |
398 | return simplify_gen_relational (code, mode, op_mode, op0, op1); | |
d9c695ff | 399 | |
ec8e098d PB |
400 | case RTX_TERNARY: |
401 | case RTX_BITFIELD_OPS: | |
077a148b RS |
402 | op0 = XEXP (x, 0); |
403 | op_mode = GET_MODE (op0); | |
1ad463f4 BI |
404 | op0 = simplify_replace_rtx (op0, old_rtx, new_rtx); |
405 | op1 = simplify_replace_rtx (XEXP (x, 1), old_rtx, new_rtx); | |
406 | op2 = simplify_replace_rtx (XEXP (x, 2), old_rtx, new_rtx); | |
077a148b RS |
407 | if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1) && op2 == XEXP (x, 2)) |
408 | return x; | |
409 | if (op_mode == VOIDmode) | |
410 | op_mode = GET_MODE (op0); | |
411 | return simplify_gen_ternary (code, mode, op_mode, op0, op1, op2); | |
d9c695ff | 412 | |
ec8e098d | 413 | case RTX_EXTRA: |
949c5d62 JH |
414 | /* The only case we try to handle is a SUBREG. */ |
415 | if (code == SUBREG) | |
416 | { | |
1ad463f4 | 417 | op0 = simplify_replace_rtx (SUBREG_REG (x), old_rtx, new_rtx); |
077a148b RS |
418 | if (op0 == SUBREG_REG (x)) |
419 | return x; | |
420 | op0 = simplify_gen_subreg (GET_MODE (x), op0, | |
949c5d62 JH |
421 | GET_MODE (SUBREG_REG (x)), |
422 | SUBREG_BYTE (x)); | |
077a148b | 423 | return op0 ? op0 : x; |
949c5d62 | 424 | } |
077a148b | 425 | break; |
d9c695ff | 426 | |
ec8e098d | 427 | case RTX_OBJ: |
60c86d4e | 428 | if (code == MEM) |
077a148b | 429 | { |
1ad463f4 | 430 | op0 = simplify_replace_rtx (XEXP (x, 0), old_rtx, new_rtx); |
077a148b RS |
431 | if (op0 == XEXP (x, 0)) |
432 | return x; | |
433 | return replace_equiv_address_nv (x, op0); | |
434 | } | |
f4e3e618 RH |
435 | else if (code == LO_SUM) |
436 | { | |
1ad463f4 BI |
437 | op0 = simplify_replace_rtx (XEXP (x, 0), old_rtx, new_rtx); |
438 | op1 = simplify_replace_rtx (XEXP (x, 1), old_rtx, new_rtx); | |
f4e3e618 RH |
439 | |
440 | /* (lo_sum (high x) x) -> x */ | |
441 | if (GET_CODE (op0) == HIGH && rtx_equal_p (XEXP (op0, 0), op1)) | |
442 | return op1; | |
60c86d4e | 443 | |
077a148b RS |
444 | if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1)) |
445 | return x; | |
f4e3e618 RH |
446 | return gen_rtx_LO_SUM (mode, op0, op1); |
447 | } | |
448 | else if (code == REG) | |
449 | { | |
b2b7ec86 | 450 | if (rtx_equal_p (x, old_rtx)) |
1ad463f4 | 451 | return new_rtx; |
f4e3e618 | 452 | } |
077a148b | 453 | break; |
60c86d4e RS |
454 | |
455 | default: | |
077a148b | 456 | break; |
d9c695ff | 457 | } |
141e454b | 458 | return x; |
d9c695ff RK |
459 | } |
460 | \f | |
0cedb36c JL |
461 | /* Try to simplify a unary operation CODE whose output mode is to be |
462 | MODE with input operand OP whose mode was originally OP_MODE. | |
463 | Return zero if no simplification can be made. */ | |
0cedb36c | 464 | rtx |
46c5ad27 AJ |
465 | simplify_unary_operation (enum rtx_code code, enum machine_mode mode, |
466 | rtx op, enum machine_mode op_mode) | |
0a67e02c PB |
467 | { |
468 | rtx trueop, tem; | |
469 | ||
470 | if (GET_CODE (op) == CONST) | |
471 | op = XEXP (op, 0); | |
472 | ||
473 | trueop = avoid_constant_pool_reference (op); | |
474 | ||
475 | tem = simplify_const_unary_operation (code, mode, trueop, op_mode); | |
476 | if (tem) | |
477 | return tem; | |
478 | ||
479 | return simplify_unary_operation_1 (code, mode, op); | |
480 | } | |
481 | ||
482 | /* Perform some simplifications we can do even if the operands | |
483 | aren't constant. */ | |
484 | static rtx | |
485 | simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op) | |
486 | { | |
487 | enum rtx_code reversed; | |
488 | rtx temp; | |
489 | ||
490 | switch (code) | |
491 | { | |
492 | case NOT: | |
493 | /* (not (not X)) == X. */ | |
494 | if (GET_CODE (op) == NOT) | |
495 | return XEXP (op, 0); | |
496 | ||
bd1ef757 PB |
497 | /* (not (eq X Y)) == (ne X Y), etc. if BImode or the result of the |
498 | comparison is all ones. */ | |
0a67e02c PB |
499 | if (COMPARISON_P (op) |
500 | && (mode == BImode || STORE_FLAG_VALUE == -1) | |
501 | && ((reversed = reversed_comparison_code (op, NULL_RTX)) != UNKNOWN)) | |
502 | return simplify_gen_relational (reversed, mode, VOIDmode, | |
503 | XEXP (op, 0), XEXP (op, 1)); | |
504 | ||
505 | /* (not (plus X -1)) can become (neg X). */ | |
506 | if (GET_CODE (op) == PLUS | |
507 | && XEXP (op, 1) == constm1_rtx) | |
508 | return simplify_gen_unary (NEG, mode, XEXP (op, 0), mode); | |
509 | ||
510 | /* Similarly, (not (neg X)) is (plus X -1). */ | |
511 | if (GET_CODE (op) == NEG) | |
512 | return plus_constant (XEXP (op, 0), -1); | |
513 | ||
514 | /* (not (xor X C)) for C constant is (xor X D) with D = ~C. */ | |
515 | if (GET_CODE (op) == XOR | |
481683e1 | 516 | && CONST_INT_P (XEXP (op, 1)) |
0a67e02c PB |
517 | && (temp = simplify_unary_operation (NOT, mode, |
518 | XEXP (op, 1), mode)) != 0) | |
519 | return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp); | |
520 | ||
521 | /* (not (plus X C)) for signbit C is (xor X D) with D = ~C. */ | |
522 | if (GET_CODE (op) == PLUS | |
481683e1 | 523 | && CONST_INT_P (XEXP (op, 1)) |
0a67e02c PB |
524 | && mode_signbit_p (mode, XEXP (op, 1)) |
525 | && (temp = simplify_unary_operation (NOT, mode, | |
526 | XEXP (op, 1), mode)) != 0) | |
527 | return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp); | |
528 | ||
529 | ||
530 | /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for | |
531 | operands other than 1, but that is not valid. We could do a | |
532 | similar simplification for (not (lshiftrt C X)) where C is | |
533 | just the sign bit, but this doesn't seem common enough to | |
534 | bother with. */ | |
535 | if (GET_CODE (op) == ASHIFT | |
536 | && XEXP (op, 0) == const1_rtx) | |
537 | { | |
538 | temp = simplify_gen_unary (NOT, mode, const1_rtx, mode); | |
539 | return simplify_gen_binary (ROTATE, mode, temp, XEXP (op, 1)); | |
540 | } | |
541 | ||
0a67e02c PB |
542 | /* (not (ashiftrt foo C)) where C is the number of bits in FOO |
543 | minus 1 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1, | |
544 | so we can perform the above simplification. */ | |
bd1ef757 | 545 | |
0a67e02c PB |
546 | if (STORE_FLAG_VALUE == -1 |
547 | && GET_CODE (op) == ASHIFTRT | |
481683e1 | 548 | && GET_CODE (XEXP (op, 1)) |
0a67e02c PB |
549 | && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1) |
550 | return simplify_gen_relational (GE, mode, VOIDmode, | |
551 | XEXP (op, 0), const0_rtx); | |
552 | ||
bd1ef757 PB |
553 | |
554 | if (GET_CODE (op) == SUBREG | |
555 | && subreg_lowpart_p (op) | |
556 | && (GET_MODE_SIZE (GET_MODE (op)) | |
557 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op)))) | |
558 | && GET_CODE (SUBREG_REG (op)) == ASHIFT | |
559 | && XEXP (SUBREG_REG (op), 0) == const1_rtx) | |
560 | { | |
561 | enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op)); | |
562 | rtx x; | |
563 | ||
564 | x = gen_rtx_ROTATE (inner_mode, | |
565 | simplify_gen_unary (NOT, inner_mode, const1_rtx, | |
566 | inner_mode), | |
567 | XEXP (SUBREG_REG (op), 1)); | |
568 | return rtl_hooks.gen_lowpart_no_emit (mode, x); | |
569 | } | |
570 | ||
571 | /* Apply De Morgan's laws to reduce number of patterns for machines | |
572 | with negating logical insns (and-not, nand, etc.). If result has | |
573 | only one NOT, put it first, since that is how the patterns are | |
574 | coded. */ | |
575 | ||
576 | if (GET_CODE (op) == IOR || GET_CODE (op) == AND) | |
577 | { | |
578 | rtx in1 = XEXP (op, 0), in2 = XEXP (op, 1); | |
579 | enum machine_mode op_mode; | |
580 | ||
581 | op_mode = GET_MODE (in1); | |
582 | in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode); | |
583 | ||
584 | op_mode = GET_MODE (in2); | |
585 | if (op_mode == VOIDmode) | |
586 | op_mode = mode; | |
587 | in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode); | |
588 | ||
589 | if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT) | |
590 | { | |
591 | rtx tem = in2; | |
592 | in2 = in1; in1 = tem; | |
593 | } | |
594 | ||
595 | return gen_rtx_fmt_ee (GET_CODE (op) == IOR ? AND : IOR, | |
596 | mode, in1, in2); | |
597 | } | |
0a67e02c PB |
598 | break; |
599 | ||
600 | case NEG: | |
601 | /* (neg (neg X)) == X. */ | |
602 | if (GET_CODE (op) == NEG) | |
603 | return XEXP (op, 0); | |
604 | ||
605 | /* (neg (plus X 1)) can become (not X). */ | |
606 | if (GET_CODE (op) == PLUS | |
607 | && XEXP (op, 1) == const1_rtx) | |
608 | return simplify_gen_unary (NOT, mode, XEXP (op, 0), mode); | |
609 | ||
610 | /* Similarly, (neg (not X)) is (plus X 1). */ | |
611 | if (GET_CODE (op) == NOT) | |
612 | return plus_constant (XEXP (op, 0), 1); | |
613 | ||
614 | /* (neg (minus X Y)) can become (minus Y X). This transformation | |
615 | isn't safe for modes with signed zeros, since if X and Y are | |
616 | both +0, (minus Y X) is the same as (minus X Y). If the | |
617 | rounding mode is towards +infinity (or -infinity) then the two | |
618 | expressions will be rounded differently. */ | |
619 | if (GET_CODE (op) == MINUS | |
620 | && !HONOR_SIGNED_ZEROS (mode) | |
621 | && !HONOR_SIGN_DEPENDENT_ROUNDING (mode)) | |
622 | return simplify_gen_binary (MINUS, mode, XEXP (op, 1), XEXP (op, 0)); | |
623 | ||
624 | if (GET_CODE (op) == PLUS | |
625 | && !HONOR_SIGNED_ZEROS (mode) | |
626 | && !HONOR_SIGN_DEPENDENT_ROUNDING (mode)) | |
627 | { | |
628 | /* (neg (plus A C)) is simplified to (minus -C A). */ | |
481683e1 | 629 | if (CONST_INT_P (XEXP (op, 1)) |
0a67e02c PB |
630 | || GET_CODE (XEXP (op, 1)) == CONST_DOUBLE) |
631 | { | |
632 | temp = simplify_unary_operation (NEG, mode, XEXP (op, 1), mode); | |
633 | if (temp) | |
634 | return simplify_gen_binary (MINUS, mode, temp, XEXP (op, 0)); | |
635 | } | |
636 | ||
637 | /* (neg (plus A B)) is canonicalized to (minus (neg A) B). */ | |
638 | temp = simplify_gen_unary (NEG, mode, XEXP (op, 0), mode); | |
639 | return simplify_gen_binary (MINUS, mode, temp, XEXP (op, 1)); | |
640 | } | |
641 | ||
642 | /* (neg (mult A B)) becomes (mult (neg A) B). | |
643 | This works even for floating-point values. */ | |
644 | if (GET_CODE (op) == MULT | |
645 | && !HONOR_SIGN_DEPENDENT_ROUNDING (mode)) | |
646 | { | |
647 | temp = simplify_gen_unary (NEG, mode, XEXP (op, 0), mode); | |
648 | return simplify_gen_binary (MULT, mode, temp, XEXP (op, 1)); | |
649 | } | |
650 | ||
651 | /* NEG commutes with ASHIFT since it is multiplication. Only do | |
652 | this if we can then eliminate the NEG (e.g., if the operand | |
653 | is a constant). */ | |
654 | if (GET_CODE (op) == ASHIFT) | |
655 | { | |
656 | temp = simplify_unary_operation (NEG, mode, XEXP (op, 0), mode); | |
657 | if (temp) | |
658 | return simplify_gen_binary (ASHIFT, mode, temp, XEXP (op, 1)); | |
659 | } | |
660 | ||
661 | /* (neg (ashiftrt X C)) can be replaced by (lshiftrt X C) when | |
662 | C is equal to the width of MODE minus 1. */ | |
663 | if (GET_CODE (op) == ASHIFTRT | |
481683e1 | 664 | && CONST_INT_P (XEXP (op, 1)) |
0a67e02c PB |
665 | && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1) |
666 | return simplify_gen_binary (LSHIFTRT, mode, | |
667 | XEXP (op, 0), XEXP (op, 1)); | |
668 | ||
669 | /* (neg (lshiftrt X C)) can be replaced by (ashiftrt X C) when | |
670 | C is equal to the width of MODE minus 1. */ | |
671 | if (GET_CODE (op) == LSHIFTRT | |
481683e1 | 672 | && CONST_INT_P (XEXP (op, 1)) |
0a67e02c PB |
673 | && INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1) |
674 | return simplify_gen_binary (ASHIFTRT, mode, | |
675 | XEXP (op, 0), XEXP (op, 1)); | |
676 | ||
bd1ef757 PB |
677 | /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */ |
678 | if (GET_CODE (op) == XOR | |
679 | && XEXP (op, 1) == const1_rtx | |
680 | && nonzero_bits (XEXP (op, 0), mode) == 1) | |
681 | return plus_constant (XEXP (op, 0), -1); | |
8305d786 RS |
682 | |
683 | /* (neg (lt x 0)) is (ashiftrt X C) if STORE_FLAG_VALUE is 1. */ | |
684 | /* (neg (lt x 0)) is (lshiftrt X C) if STORE_FLAG_VALUE is -1. */ | |
685 | if (GET_CODE (op) == LT | |
71cca289 JJ |
686 | && XEXP (op, 1) == const0_rtx |
687 | && SCALAR_INT_MODE_P (GET_MODE (XEXP (op, 0)))) | |
8305d786 | 688 | { |
0f2f71b5 RS |
689 | enum machine_mode inner = GET_MODE (XEXP (op, 0)); |
690 | int isize = GET_MODE_BITSIZE (inner); | |
8305d786 | 691 | if (STORE_FLAG_VALUE == 1) |
0f2f71b5 RS |
692 | { |
693 | temp = simplify_gen_binary (ASHIFTRT, inner, XEXP (op, 0), | |
694 | GEN_INT (isize - 1)); | |
695 | if (mode == inner) | |
696 | return temp; | |
697 | if (GET_MODE_BITSIZE (mode) > isize) | |
698 | return simplify_gen_unary (SIGN_EXTEND, mode, temp, inner); | |
699 | return simplify_gen_unary (TRUNCATE, mode, temp, inner); | |
700 | } | |
8305d786 | 701 | else if (STORE_FLAG_VALUE == -1) |
0f2f71b5 RS |
702 | { |
703 | temp = simplify_gen_binary (LSHIFTRT, inner, XEXP (op, 0), | |
704 | GEN_INT (isize - 1)); | |
705 | if (mode == inner) | |
706 | return temp; | |
707 | if (GET_MODE_BITSIZE (mode) > isize) | |
708 | return simplify_gen_unary (ZERO_EXTEND, mode, temp, inner); | |
709 | return simplify_gen_unary (TRUNCATE, mode, temp, inner); | |
710 | } | |
8305d786 | 711 | } |
bd1ef757 PB |
712 | break; |
713 | ||
714 | case TRUNCATE: | |
715 | /* We can't handle truncation to a partial integer mode here | |
716 | because we don't know the real bitsize of the partial | |
717 | integer mode. */ | |
718 | if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT) | |
719 | break; | |
720 | ||
721 | /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */ | |
722 | if ((GET_CODE (op) == SIGN_EXTEND | |
723 | || GET_CODE (op) == ZERO_EXTEND) | |
724 | && GET_MODE (XEXP (op, 0)) == mode) | |
725 | return XEXP (op, 0); | |
726 | ||
727 | /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is | |
728 | (OP:SI foo:SI) if OP is NEG or ABS. */ | |
729 | if ((GET_CODE (op) == ABS | |
730 | || GET_CODE (op) == NEG) | |
731 | && (GET_CODE (XEXP (op, 0)) == SIGN_EXTEND | |
732 | || GET_CODE (XEXP (op, 0)) == ZERO_EXTEND) | |
733 | && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode) | |
734 | return simplify_gen_unary (GET_CODE (op), mode, | |
735 | XEXP (XEXP (op, 0), 0), mode); | |
736 | ||
e963b866 AN |
737 | /* (truncate:A (subreg:B (truncate:C X) 0)) is |
738 | (truncate:A X). */ | |
bd1ef757 PB |
739 | if (GET_CODE (op) == SUBREG |
740 | && GET_CODE (SUBREG_REG (op)) == TRUNCATE | |
741 | && subreg_lowpart_p (op)) | |
e963b866 AN |
742 | return simplify_gen_unary (TRUNCATE, mode, XEXP (SUBREG_REG (op), 0), |
743 | GET_MODE (XEXP (SUBREG_REG (op), 0))); | |
bd1ef757 PB |
744 | |
745 | /* If we know that the value is already truncated, we can | |
d3b72690 PB |
746 | replace the TRUNCATE with a SUBREG. Note that this is also |
747 | valid if TRULY_NOOP_TRUNCATION is false for the corresponding | |
748 | modes we just have to apply a different definition for | |
749 | truncation. But don't do this for an (LSHIFTRT (MULT ...)) | |
750 | since this will cause problems with the umulXi3_highpart | |
751 | patterns. */ | |
752 | if ((TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), | |
bd1ef757 | 753 | GET_MODE_BITSIZE (GET_MODE (op))) |
d3b72690 | 754 | ? (num_sign_bit_copies (op, GET_MODE (op)) |
2332d585 AN |
755 | > (unsigned int) (GET_MODE_BITSIZE (GET_MODE (op)) |
756 | - GET_MODE_BITSIZE (mode))) | |
d3b72690 | 757 | : truncated_to_mode (mode, op)) |
bd1ef757 PB |
758 | && ! (GET_CODE (op) == LSHIFTRT |
759 | && GET_CODE (XEXP (op, 0)) == MULT)) | |
760 | return rtl_hooks.gen_lowpart_no_emit (mode, op); | |
761 | ||
762 | /* A truncate of a comparison can be replaced with a subreg if | |
763 | STORE_FLAG_VALUE permits. This is like the previous test, | |
764 | but it works even if the comparison is done in a mode larger | |
765 | than HOST_BITS_PER_WIDE_INT. */ | |
766 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
767 | && COMPARISON_P (op) | |
768 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0) | |
769 | return rtl_hooks.gen_lowpart_no_emit (mode, op); | |
770 | break; | |
771 | ||
772 | case FLOAT_TRUNCATE: | |
15ed7b52 JG |
773 | if (DECIMAL_FLOAT_MODE_P (mode)) |
774 | break; | |
775 | ||
bd1ef757 PB |
776 | /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */ |
777 | if (GET_CODE (op) == FLOAT_EXTEND | |
778 | && GET_MODE (XEXP (op, 0)) == mode) | |
779 | return XEXP (op, 0); | |
780 | ||
781 | /* (float_truncate:SF (float_truncate:DF foo:XF)) | |
782 | = (float_truncate:SF foo:XF). | |
783 | This may eliminate double rounding, so it is unsafe. | |
784 | ||
785 | (float_truncate:SF (float_extend:XF foo:DF)) | |
786 | = (float_truncate:SF foo:DF). | |
787 | ||
788 | (float_truncate:DF (float_extend:XF foo:SF)) | |
789 | = (float_extend:SF foo:DF). */ | |
790 | if ((GET_CODE (op) == FLOAT_TRUNCATE | |
791 | && flag_unsafe_math_optimizations) | |
792 | || GET_CODE (op) == FLOAT_EXTEND) | |
793 | return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (op, | |
794 | 0))) | |
795 | > GET_MODE_SIZE (mode) | |
796 | ? FLOAT_TRUNCATE : FLOAT_EXTEND, | |
797 | mode, | |
798 | XEXP (op, 0), mode); | |
799 | ||
800 | /* (float_truncate (float x)) is (float x) */ | |
801 | if (GET_CODE (op) == FLOAT | |
802 | && (flag_unsafe_math_optimizations | |
a0c64295 UB |
803 | || (SCALAR_FLOAT_MODE_P (GET_MODE (op)) |
804 | && ((unsigned)significand_size (GET_MODE (op)) | |
805 | >= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0))) | |
806 | - num_sign_bit_copies (XEXP (op, 0), | |
807 | GET_MODE (XEXP (op, 0)))))))) | |
bd1ef757 PB |
808 | return simplify_gen_unary (FLOAT, mode, |
809 | XEXP (op, 0), | |
810 | GET_MODE (XEXP (op, 0))); | |
811 | ||
812 | /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is | |
813 | (OP:SF foo:SF) if OP is NEG or ABS. */ | |
814 | if ((GET_CODE (op) == ABS | |
815 | || GET_CODE (op) == NEG) | |
816 | && GET_CODE (XEXP (op, 0)) == FLOAT_EXTEND | |
817 | && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode) | |
818 | return simplify_gen_unary (GET_CODE (op), mode, | |
819 | XEXP (XEXP (op, 0), 0), mode); | |
820 | ||
821 | /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0)) | |
822 | is (float_truncate:SF x). */ | |
823 | if (GET_CODE (op) == SUBREG | |
824 | && subreg_lowpart_p (op) | |
825 | && GET_CODE (SUBREG_REG (op)) == FLOAT_TRUNCATE) | |
826 | return SUBREG_REG (op); | |
827 | break; | |
828 | ||
829 | case FLOAT_EXTEND: | |
15ed7b52 JG |
830 | if (DECIMAL_FLOAT_MODE_P (mode)) |
831 | break; | |
832 | ||
bd1ef757 PB |
833 | /* (float_extend (float_extend x)) is (float_extend x) |
834 | ||
835 | (float_extend (float x)) is (float x) assuming that double | |
836 | rounding can't happen. | |
837 | */ | |
838 | if (GET_CODE (op) == FLOAT_EXTEND | |
839 | || (GET_CODE (op) == FLOAT | |
a0c64295 | 840 | && SCALAR_FLOAT_MODE_P (GET_MODE (op)) |
bd1ef757 PB |
841 | && ((unsigned)significand_size (GET_MODE (op)) |
842 | >= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0))) | |
843 | - num_sign_bit_copies (XEXP (op, 0), | |
844 | GET_MODE (XEXP (op, 0))))))) | |
845 | return simplify_gen_unary (GET_CODE (op), mode, | |
846 | XEXP (op, 0), | |
847 | GET_MODE (XEXP (op, 0))); | |
848 | ||
849 | break; | |
850 | ||
851 | case ABS: | |
852 | /* (abs (neg <foo>)) -> (abs <foo>) */ | |
853 | if (GET_CODE (op) == NEG) | |
854 | return simplify_gen_unary (ABS, mode, XEXP (op, 0), | |
855 | GET_MODE (XEXP (op, 0))); | |
856 | ||
857 | /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS), | |
858 | do nothing. */ | |
859 | if (GET_MODE (op) == VOIDmode) | |
860 | break; | |
861 | ||
862 | /* If operand is something known to be positive, ignore the ABS. */ | |
863 | if (GET_CODE (op) == FFS || GET_CODE (op) == ABS | |
864 | || ((GET_MODE_BITSIZE (GET_MODE (op)) | |
865 | <= HOST_BITS_PER_WIDE_INT) | |
866 | && ((nonzero_bits (op, GET_MODE (op)) | |
867 | & ((HOST_WIDE_INT) 1 | |
868 | << (GET_MODE_BITSIZE (GET_MODE (op)) - 1))) | |
869 | == 0))) | |
870 | return op; | |
871 | ||
872 | /* If operand is known to be only -1 or 0, convert ABS to NEG. */ | |
873 | if (num_sign_bit_copies (op, mode) == GET_MODE_BITSIZE (mode)) | |
874 | return gen_rtx_NEG (mode, op); | |
875 | ||
876 | break; | |
877 | ||
878 | case FFS: | |
879 | /* (ffs (*_extend <X>)) = (ffs <X>) */ | |
880 | if (GET_CODE (op) == SIGN_EXTEND | |
881 | || GET_CODE (op) == ZERO_EXTEND) | |
882 | return simplify_gen_unary (FFS, mode, XEXP (op, 0), | |
883 | GET_MODE (XEXP (op, 0))); | |
884 | break; | |
885 | ||
886 | case POPCOUNT: | |
9f05adb0 RS |
887 | switch (GET_CODE (op)) |
888 | { | |
889 | case BSWAP: | |
890 | case ZERO_EXTEND: | |
891 | /* (popcount (zero_extend <X>)) = (popcount <X>) */ | |
892 | return simplify_gen_unary (POPCOUNT, mode, XEXP (op, 0), | |
893 | GET_MODE (XEXP (op, 0))); | |
894 | ||
895 | case ROTATE: | |
896 | case ROTATERT: | |
897 | /* Rotations don't affect popcount. */ | |
898 | if (!side_effects_p (XEXP (op, 1))) | |
899 | return simplify_gen_unary (POPCOUNT, mode, XEXP (op, 0), | |
900 | GET_MODE (XEXP (op, 0))); | |
901 | break; | |
902 | ||
903 | default: | |
904 | break; | |
905 | } | |
906 | break; | |
907 | ||
bd1ef757 | 908 | case PARITY: |
9f05adb0 RS |
909 | switch (GET_CODE (op)) |
910 | { | |
911 | case NOT: | |
912 | case BSWAP: | |
913 | case ZERO_EXTEND: | |
914 | case SIGN_EXTEND: | |
915 | return simplify_gen_unary (PARITY, mode, XEXP (op, 0), | |
916 | GET_MODE (XEXP (op, 0))); | |
917 | ||
918 | case ROTATE: | |
919 | case ROTATERT: | |
920 | /* Rotations don't affect parity. */ | |
921 | if (!side_effects_p (XEXP (op, 1))) | |
922 | return simplify_gen_unary (PARITY, mode, XEXP (op, 0), | |
923 | GET_MODE (XEXP (op, 0))); | |
924 | break; | |
925 | ||
926 | default: | |
927 | break; | |
928 | } | |
929 | break; | |
930 | ||
931 | case BSWAP: | |
932 | /* (bswap (bswap x)) -> x. */ | |
933 | if (GET_CODE (op) == BSWAP) | |
934 | return XEXP (op, 0); | |
bd1ef757 PB |
935 | break; |
936 | ||
937 | case FLOAT: | |
938 | /* (float (sign_extend <X>)) = (float <X>). */ | |
939 | if (GET_CODE (op) == SIGN_EXTEND) | |
940 | return simplify_gen_unary (FLOAT, mode, XEXP (op, 0), | |
941 | GET_MODE (XEXP (op, 0))); | |
0a67e02c PB |
942 | break; |
943 | ||
944 | case SIGN_EXTEND: | |
945 | /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2)))) | |
946 | becomes just the MINUS if its mode is MODE. This allows | |
947 | folding switch statements on machines using casesi (such as | |
948 | the VAX). */ | |
949 | if (GET_CODE (op) == TRUNCATE | |
950 | && GET_MODE (XEXP (op, 0)) == mode | |
951 | && GET_CODE (XEXP (op, 0)) == MINUS | |
952 | && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF | |
953 | && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF) | |
954 | return XEXP (op, 0); | |
955 | ||
956 | /* Check for a sign extension of a subreg of a promoted | |
957 | variable, where the promotion is sign-extended, and the | |
958 | target mode is the same as the variable's promotion. */ | |
959 | if (GET_CODE (op) == SUBREG | |
960 | && SUBREG_PROMOTED_VAR_P (op) | |
961 | && ! SUBREG_PROMOTED_UNSIGNED_P (op) | |
4613543f RS |
962 | && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (XEXP (op, 0)))) |
963 | return rtl_hooks.gen_lowpart_no_emit (mode, op); | |
0a67e02c PB |
964 | |
965 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
966 | if (! POINTERS_EXTEND_UNSIGNED | |
967 | && mode == Pmode && GET_MODE (op) == ptr_mode | |
968 | && (CONSTANT_P (op) | |
969 | || (GET_CODE (op) == SUBREG | |
970 | && REG_P (SUBREG_REG (op)) | |
971 | && REG_POINTER (SUBREG_REG (op)) | |
972 | && GET_MODE (SUBREG_REG (op)) == Pmode))) | |
973 | return convert_memory_address (Pmode, op); | |
974 | #endif | |
975 | break; | |
976 | ||
977 | case ZERO_EXTEND: | |
978 | /* Check for a zero extension of a subreg of a promoted | |
979 | variable, where the promotion is zero-extended, and the | |
980 | target mode is the same as the variable's promotion. */ | |
981 | if (GET_CODE (op) == SUBREG | |
982 | && SUBREG_PROMOTED_VAR_P (op) | |
7443a71d | 983 | && SUBREG_PROMOTED_UNSIGNED_P (op) > 0 |
4613543f RS |
984 | && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (XEXP (op, 0)))) |
985 | return rtl_hooks.gen_lowpart_no_emit (mode, op); | |
0a67e02c PB |
986 | |
987 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
988 | if (POINTERS_EXTEND_UNSIGNED > 0 | |
989 | && mode == Pmode && GET_MODE (op) == ptr_mode | |
990 | && (CONSTANT_P (op) | |
991 | || (GET_CODE (op) == SUBREG | |
992 | && REG_P (SUBREG_REG (op)) | |
993 | && REG_POINTER (SUBREG_REG (op)) | |
994 | && GET_MODE (SUBREG_REG (op)) == Pmode))) | |
995 | return convert_memory_address (Pmode, op); | |
996 | #endif | |
997 | break; | |
998 | ||
999 | default: | |
1000 | break; | |
1001 | } | |
1002 | ||
1003 | return 0; | |
1004 | } | |
1005 | ||
1006 | /* Try to compute the value of a unary operation CODE whose output mode is to | |
1007 | be MODE with input operand OP whose mode was originally OP_MODE. | |
1008 | Return zero if the value cannot be computed. */ | |
1009 | rtx | |
1010 | simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode, | |
1011 | rtx op, enum machine_mode op_mode) | |
0cedb36c | 1012 | { |
770ae6cc | 1013 | unsigned int width = GET_MODE_BITSIZE (mode); |
0cedb36c | 1014 | |
d9deed68 JH |
1015 | if (code == VEC_DUPLICATE) |
1016 | { | |
41374e13 | 1017 | gcc_assert (VECTOR_MODE_P (mode)); |
0a67e02c | 1018 | if (GET_MODE (op) != VOIDmode) |
41374e13 | 1019 | { |
0a67e02c PB |
1020 | if (!VECTOR_MODE_P (GET_MODE (op))) |
1021 | gcc_assert (GET_MODE_INNER (mode) == GET_MODE (op)); | |
41374e13 NS |
1022 | else |
1023 | gcc_assert (GET_MODE_INNER (mode) == GET_MODE_INNER | |
0a67e02c | 1024 | (GET_MODE (op))); |
41374e13 | 1025 | } |
481683e1 | 1026 | if (CONST_INT_P (op) || GET_CODE (op) == CONST_DOUBLE |
0a67e02c | 1027 | || GET_CODE (op) == CONST_VECTOR) |
d9deed68 JH |
1028 | { |
1029 | int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode)); | |
1030 | unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size); | |
1031 | rtvec v = rtvec_alloc (n_elts); | |
1032 | unsigned int i; | |
1033 | ||
0a67e02c | 1034 | if (GET_CODE (op) != CONST_VECTOR) |
d9deed68 | 1035 | for (i = 0; i < n_elts; i++) |
0a67e02c | 1036 | RTVEC_ELT (v, i) = op; |
d9deed68 JH |
1037 | else |
1038 | { | |
0a67e02c | 1039 | enum machine_mode inmode = GET_MODE (op); |
d9deed68 JH |
1040 | int in_elt_size = GET_MODE_SIZE (GET_MODE_INNER (inmode)); |
1041 | unsigned in_n_elts = (GET_MODE_SIZE (inmode) / in_elt_size); | |
1042 | ||
41374e13 NS |
1043 | gcc_assert (in_n_elts < n_elts); |
1044 | gcc_assert ((n_elts % in_n_elts) == 0); | |
d9deed68 | 1045 | for (i = 0; i < n_elts; i++) |
0a67e02c | 1046 | RTVEC_ELT (v, i) = CONST_VECTOR_ELT (op, i % in_n_elts); |
d9deed68 JH |
1047 | } |
1048 | return gen_rtx_CONST_VECTOR (mode, v); | |
1049 | } | |
1050 | } | |
1051 | ||
0a67e02c | 1052 | if (VECTOR_MODE_P (mode) && GET_CODE (op) == CONST_VECTOR) |
852c8ba1 JH |
1053 | { |
1054 | int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode)); | |
1055 | unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size); | |
0a67e02c | 1056 | enum machine_mode opmode = GET_MODE (op); |
852c8ba1 JH |
1057 | int op_elt_size = GET_MODE_SIZE (GET_MODE_INNER (opmode)); |
1058 | unsigned op_n_elts = (GET_MODE_SIZE (opmode) / op_elt_size); | |
1059 | rtvec v = rtvec_alloc (n_elts); | |
1060 | unsigned int i; | |
1061 | ||
41374e13 | 1062 | gcc_assert (op_n_elts == n_elts); |
852c8ba1 JH |
1063 | for (i = 0; i < n_elts; i++) |
1064 | { | |
1065 | rtx x = simplify_unary_operation (code, GET_MODE_INNER (mode), | |
0a67e02c | 1066 | CONST_VECTOR_ELT (op, i), |
852c8ba1 JH |
1067 | GET_MODE_INNER (opmode)); |
1068 | if (!x) | |
1069 | return 0; | |
1070 | RTVEC_ELT (v, i) = x; | |
1071 | } | |
1072 | return gen_rtx_CONST_VECTOR (mode, v); | |
1073 | } | |
1074 | ||
0cedb36c JL |
1075 | /* The order of these tests is critical so that, for example, we don't |
1076 | check the wrong mode (input vs. output) for a conversion operation, | |
1077 | such as FIX. At some point, this should be simplified. */ | |
1078 | ||
0a67e02c | 1079 | if (code == FLOAT && GET_MODE (op) == VOIDmode |
481683e1 | 1080 | && (GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))) |
0cedb36c JL |
1081 | { |
1082 | HOST_WIDE_INT hv, lv; | |
1083 | REAL_VALUE_TYPE d; | |
1084 | ||
481683e1 | 1085 | if (CONST_INT_P (op)) |
0a67e02c | 1086 | lv = INTVAL (op), hv = HWI_SIGN_EXTEND (lv); |
0cedb36c | 1087 | else |
0a67e02c | 1088 | lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op); |
0cedb36c | 1089 | |
0cedb36c | 1090 | REAL_VALUE_FROM_INT (d, lv, hv, mode); |
0cedb36c JL |
1091 | d = real_value_truncate (mode, d); |
1092 | return CONST_DOUBLE_FROM_REAL_VALUE (d, mode); | |
1093 | } | |
0a67e02c PB |
1094 | else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode |
1095 | && (GET_CODE (op) == CONST_DOUBLE | |
481683e1 | 1096 | || CONST_INT_P (op))) |
0cedb36c JL |
1097 | { |
1098 | HOST_WIDE_INT hv, lv; | |
1099 | REAL_VALUE_TYPE d; | |
1100 | ||
481683e1 | 1101 | if (CONST_INT_P (op)) |
0a67e02c | 1102 | lv = INTVAL (op), hv = HWI_SIGN_EXTEND (lv); |
0cedb36c | 1103 | else |
0a67e02c | 1104 | lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op); |
0cedb36c JL |
1105 | |
1106 | if (op_mode == VOIDmode) | |
1107 | { | |
1108 | /* We don't know how to interpret negative-looking numbers in | |
1109 | this case, so don't try to fold those. */ | |
1110 | if (hv < 0) | |
1111 | return 0; | |
1112 | } | |
1113 | else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2) | |
1114 | ; | |
1115 | else | |
1116 | hv = 0, lv &= GET_MODE_MASK (op_mode); | |
1117 | ||
0cedb36c | 1118 | REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode); |
0cedb36c JL |
1119 | d = real_value_truncate (mode, d); |
1120 | return CONST_DOUBLE_FROM_REAL_VALUE (d, mode); | |
1121 | } | |
0cedb36c | 1122 | |
481683e1 | 1123 | if (CONST_INT_P (op) |
0cedb36c JL |
1124 | && width <= HOST_BITS_PER_WIDE_INT && width > 0) |
1125 | { | |
0a67e02c | 1126 | HOST_WIDE_INT arg0 = INTVAL (op); |
b3694847 | 1127 | HOST_WIDE_INT val; |
0cedb36c JL |
1128 | |
1129 | switch (code) | |
1130 | { | |
1131 | case NOT: | |
1132 | val = ~ arg0; | |
1133 | break; | |
1134 | ||
1135 | case NEG: | |
1136 | val = - arg0; | |
1137 | break; | |
1138 | ||
1139 | case ABS: | |
1140 | val = (arg0 >= 0 ? arg0 : - arg0); | |
1141 | break; | |
1142 | ||
1143 | case FFS: | |
1144 | /* Don't use ffs here. Instead, get low order bit and then its | |
1145 | number. If arg0 is zero, this will return 0, as desired. */ | |
1146 | arg0 &= GET_MODE_MASK (mode); | |
1147 | val = exact_log2 (arg0 & (- arg0)) + 1; | |
1148 | break; | |
1149 | ||
2928cd7a RH |
1150 | case CLZ: |
1151 | arg0 &= GET_MODE_MASK (mode); | |
7dba8395 RH |
1152 | if (arg0 == 0 && CLZ_DEFINED_VALUE_AT_ZERO (mode, val)) |
1153 | ; | |
1154 | else | |
1155 | val = GET_MODE_BITSIZE (mode) - floor_log2 (arg0) - 1; | |
2928cd7a RH |
1156 | break; |
1157 | ||
1158 | case CTZ: | |
1159 | arg0 &= GET_MODE_MASK (mode); | |
7dba8395 RH |
1160 | if (arg0 == 0) |
1161 | { | |
1162 | /* Even if the value at zero is undefined, we have to come | |
1163 | up with some replacement. Seems good enough. */ | |
1164 | if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, val)) | |
1165 | val = GET_MODE_BITSIZE (mode); | |
1166 | } | |
1167 | else | |
1168 | val = exact_log2 (arg0 & -arg0); | |
2928cd7a RH |
1169 | break; |
1170 | ||
1171 | case POPCOUNT: | |
1172 | arg0 &= GET_MODE_MASK (mode); | |
1173 | val = 0; | |
1174 | while (arg0) | |
1175 | val++, arg0 &= arg0 - 1; | |
1176 | break; | |
1177 | ||
1178 | case PARITY: | |
1179 | arg0 &= GET_MODE_MASK (mode); | |
1180 | val = 0; | |
1181 | while (arg0) | |
1182 | val++, arg0 &= arg0 - 1; | |
1183 | val &= 1; | |
1184 | break; | |
1185 | ||
167fa32c | 1186 | case BSWAP: |
9f05adb0 RS |
1187 | { |
1188 | unsigned int s; | |
1189 | ||
1190 | val = 0; | |
1191 | for (s = 0; s < width; s += 8) | |
1192 | { | |
1193 | unsigned int d = width - s - 8; | |
1194 | unsigned HOST_WIDE_INT byte; | |
1195 | byte = (arg0 >> s) & 0xff; | |
1196 | val |= byte << d; | |
1197 | } | |
1198 | } | |
1199 | break; | |
167fa32c | 1200 | |
0cedb36c JL |
1201 | case TRUNCATE: |
1202 | val = arg0; | |
1203 | break; | |
1204 | ||
1205 | case ZERO_EXTEND: | |
4161da12 AO |
1206 | /* When zero-extending a CONST_INT, we need to know its |
1207 | original mode. */ | |
41374e13 | 1208 | gcc_assert (op_mode != VOIDmode); |
0cedb36c JL |
1209 | if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT) |
1210 | { | |
1211 | /* If we were really extending the mode, | |
1212 | we would have to distinguish between zero-extension | |
1213 | and sign-extension. */ | |
41374e13 | 1214 | gcc_assert (width == GET_MODE_BITSIZE (op_mode)); |
0cedb36c JL |
1215 | val = arg0; |
1216 | } | |
1217 | else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT) | |
1218 | val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode)); | |
1219 | else | |
1220 | return 0; | |
1221 | break; | |
1222 | ||
1223 | case SIGN_EXTEND: | |
1224 | if (op_mode == VOIDmode) | |
1225 | op_mode = mode; | |
1226 | if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT) | |
1227 | { | |
1228 | /* If we were really extending the mode, | |
1229 | we would have to distinguish between zero-extension | |
1230 | and sign-extension. */ | |
41374e13 | 1231 | gcc_assert (width == GET_MODE_BITSIZE (op_mode)); |
0cedb36c JL |
1232 | val = arg0; |
1233 | } | |
1234 | else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT) | |
1235 | { | |
1236 | val | |
1237 | = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode)); | |
1238 | if (val | |
1239 | & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1))) | |
1240 | val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode); | |
1241 | } | |
1242 | else | |
1243 | return 0; | |
1244 | break; | |
1245 | ||
1246 | case SQRT: | |
6a51f4a0 RK |
1247 | case FLOAT_EXTEND: |
1248 | case FLOAT_TRUNCATE: | |
6f1a6c5b RH |
1249 | case SS_TRUNCATE: |
1250 | case US_TRUNCATE: | |
e551ad26 | 1251 | case SS_NEG: |
14c931f1 | 1252 | case US_NEG: |
0cedb36c JL |
1253 | return 0; |
1254 | ||
1255 | default: | |
41374e13 | 1256 | gcc_unreachable (); |
0cedb36c JL |
1257 | } |
1258 | ||
bb80db7b | 1259 | return gen_int_mode (val, mode); |
0cedb36c JL |
1260 | } |
1261 | ||
1262 | /* We can do some operations on integer CONST_DOUBLEs. Also allow | |
1263 | for a DImode operation on a CONST_INT. */ | |
0a67e02c | 1264 | else if (GET_MODE (op) == VOIDmode |
4161da12 | 1265 | && width <= HOST_BITS_PER_WIDE_INT * 2 |
0a67e02c | 1266 | && (GET_CODE (op) == CONST_DOUBLE |
481683e1 | 1267 | || CONST_INT_P (op))) |
0cedb36c | 1268 | { |
3839069b ZW |
1269 | unsigned HOST_WIDE_INT l1, lv; |
1270 | HOST_WIDE_INT h1, hv; | |
0cedb36c | 1271 | |
0a67e02c PB |
1272 | if (GET_CODE (op) == CONST_DOUBLE) |
1273 | l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op); | |
0cedb36c | 1274 | else |
0a67e02c | 1275 | l1 = INTVAL (op), h1 = HWI_SIGN_EXTEND (l1); |
0cedb36c JL |
1276 | |
1277 | switch (code) | |
1278 | { | |
1279 | case NOT: | |
1280 | lv = ~ l1; | |
1281 | hv = ~ h1; | |
1282 | break; | |
1283 | ||
1284 | case NEG: | |
1285 | neg_double (l1, h1, &lv, &hv); | |
1286 | break; | |
1287 | ||
1288 | case ABS: | |
1289 | if (h1 < 0) | |
1290 | neg_double (l1, h1, &lv, &hv); | |
1291 | else | |
1292 | lv = l1, hv = h1; | |
1293 | break; | |
1294 | ||
1295 | case FFS: | |
1296 | hv = 0; | |
1297 | if (l1 == 0) | |
2928cd7a RH |
1298 | { |
1299 | if (h1 == 0) | |
1300 | lv = 0; | |
1301 | else | |
1302 | lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & -h1) + 1; | |
1303 | } | |
0cedb36c | 1304 | else |
2928cd7a RH |
1305 | lv = exact_log2 (l1 & -l1) + 1; |
1306 | break; | |
1307 | ||
1308 | case CLZ: | |
1309 | hv = 0; | |
cf42869d | 1310 | if (h1 != 0) |
2928cd7a RH |
1311 | lv = GET_MODE_BITSIZE (mode) - floor_log2 (h1) - 1 |
1312 | - HOST_BITS_PER_WIDE_INT; | |
cf42869d RS |
1313 | else if (l1 != 0) |
1314 | lv = GET_MODE_BITSIZE (mode) - floor_log2 (l1) - 1; | |
1315 | else if (! CLZ_DEFINED_VALUE_AT_ZERO (mode, lv)) | |
1316 | lv = GET_MODE_BITSIZE (mode); | |
2928cd7a RH |
1317 | break; |
1318 | ||
1319 | case CTZ: | |
1320 | hv = 0; | |
cf42869d | 1321 | if (l1 != 0) |
2928cd7a | 1322 | lv = exact_log2 (l1 & -l1); |
cf42869d RS |
1323 | else if (h1 != 0) |
1324 | lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & -h1); | |
1325 | else if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, lv)) | |
1326 | lv = GET_MODE_BITSIZE (mode); | |
2928cd7a RH |
1327 | break; |
1328 | ||
1329 | case POPCOUNT: | |
1330 | hv = 0; | |
1331 | lv = 0; | |
1332 | while (l1) | |
1333 | lv++, l1 &= l1 - 1; | |
1334 | while (h1) | |
1335 | lv++, h1 &= h1 - 1; | |
1336 | break; | |
1337 | ||
1338 | case PARITY: | |
1339 | hv = 0; | |
1340 | lv = 0; | |
1341 | while (l1) | |
1342 | lv++, l1 &= l1 - 1; | |
1343 | while (h1) | |
1344 | lv++, h1 &= h1 - 1; | |
1345 | lv &= 1; | |
0cedb36c JL |
1346 | break; |
1347 | ||
9f05adb0 RS |
1348 | case BSWAP: |
1349 | { | |
1350 | unsigned int s; | |
1351 | ||
1352 | hv = 0; | |
1353 | lv = 0; | |
1354 | for (s = 0; s < width; s += 8) | |
1355 | { | |
1356 | unsigned int d = width - s - 8; | |
1357 | unsigned HOST_WIDE_INT byte; | |
1358 | ||
1359 | if (s < HOST_BITS_PER_WIDE_INT) | |
1360 | byte = (l1 >> s) & 0xff; | |
1361 | else | |
1362 | byte = (h1 >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff; | |
1363 | ||
1364 | if (d < HOST_BITS_PER_WIDE_INT) | |
1365 | lv |= byte << d; | |
1366 | else | |
1367 | hv |= byte << (d - HOST_BITS_PER_WIDE_INT); | |
1368 | } | |
1369 | } | |
1370 | break; | |
1371 | ||
0cedb36c JL |
1372 | case TRUNCATE: |
1373 | /* This is just a change-of-mode, so do nothing. */ | |
1374 | lv = l1, hv = h1; | |
1375 | break; | |
1376 | ||
1377 | case ZERO_EXTEND: | |
41374e13 | 1378 | gcc_assert (op_mode != VOIDmode); |
4161da12 AO |
1379 | |
1380 | if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT) | |
0cedb36c JL |
1381 | return 0; |
1382 | ||
1383 | hv = 0; | |
1384 | lv = l1 & GET_MODE_MASK (op_mode); | |
1385 | break; | |
1386 | ||
1387 | case SIGN_EXTEND: | |
1388 | if (op_mode == VOIDmode | |
1389 | || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT) | |
1390 | return 0; | |
1391 | else | |
1392 | { | |
1393 | lv = l1 & GET_MODE_MASK (op_mode); | |
1394 | if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT | |
1395 | && (lv & ((HOST_WIDE_INT) 1 | |
1396 | << (GET_MODE_BITSIZE (op_mode) - 1))) != 0) | |
1397 | lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode); | |
1398 | ||
ba34d877 | 1399 | hv = HWI_SIGN_EXTEND (lv); |
0cedb36c JL |
1400 | } |
1401 | break; | |
1402 | ||
1403 | case SQRT: | |
1404 | return 0; | |
1405 | ||
1406 | default: | |
1407 | return 0; | |
1408 | } | |
1409 | ||
1410 | return immed_double_const (lv, hv, mode); | |
1411 | } | |
1412 | ||
0a67e02c | 1413 | else if (GET_CODE (op) == CONST_DOUBLE |
3d8bf70f | 1414 | && SCALAR_FLOAT_MODE_P (mode)) |
0cedb36c | 1415 | { |
4977bab6 | 1416 | REAL_VALUE_TYPE d, t; |
0a67e02c | 1417 | REAL_VALUE_FROM_CONST_DOUBLE (d, op); |
0cedb36c | 1418 | |
15e5ad76 ZW |
1419 | switch (code) |
1420 | { | |
1421 | case SQRT: | |
4977bab6 ZW |
1422 | if (HONOR_SNANS (mode) && real_isnan (&d)) |
1423 | return 0; | |
1424 | real_sqrt (&t, mode, &d); | |
1425 | d = t; | |
1426 | break; | |
94313f35 RH |
1427 | case ABS: |
1428 | d = REAL_VALUE_ABS (d); | |
1429 | break; | |
1430 | case NEG: | |
1431 | d = REAL_VALUE_NEGATE (d); | |
1432 | break; | |
1433 | case FLOAT_TRUNCATE: | |
1434 | d = real_value_truncate (mode, d); | |
1435 | break; | |
1436 | case FLOAT_EXTEND: | |
1437 | /* All this does is change the mode. */ | |
1438 | break; | |
1439 | case FIX: | |
1440 | real_arithmetic (&d, FIX_TRUNC_EXPR, &d, NULL); | |
1441 | break; | |
79ae63b1 JH |
1442 | case NOT: |
1443 | { | |
1444 | long tmp[4]; | |
1445 | int i; | |
1446 | ||
0a67e02c | 1447 | real_to_target (tmp, &d, GET_MODE (op)); |
79ae63b1 JH |
1448 | for (i = 0; i < 4; i++) |
1449 | tmp[i] = ~tmp[i]; | |
1450 | real_from_target (&d, tmp, mode); | |
0a67e02c | 1451 | break; |
79ae63b1 | 1452 | } |
15e5ad76 | 1453 | default: |
41374e13 | 1454 | gcc_unreachable (); |
15e5ad76 ZW |
1455 | } |
1456 | return CONST_DOUBLE_FROM_REAL_VALUE (d, mode); | |
0cedb36c JL |
1457 | } |
1458 | ||
0a67e02c | 1459 | else if (GET_CODE (op) == CONST_DOUBLE |
3d8bf70f | 1460 | && SCALAR_FLOAT_MODE_P (GET_MODE (op)) |
0cedb36c | 1461 | && GET_MODE_CLASS (mode) == MODE_INT |
875eda9c | 1462 | && width <= 2*HOST_BITS_PER_WIDE_INT && width > 0) |
0cedb36c | 1463 | { |
875eda9c | 1464 | /* Although the overflow semantics of RTL's FIX and UNSIGNED_FIX |
2067c116 | 1465 | operators are intentionally left unspecified (to ease implementation |
875eda9c RS |
1466 | by target backends), for consistency, this routine implements the |
1467 | same semantics for constant folding as used by the middle-end. */ | |
1468 | ||
0a67e02c PB |
1469 | /* This was formerly used only for non-IEEE float. |
1470 | eggert@twinsun.com says it is safe for IEEE also. */ | |
875eda9c RS |
1471 | HOST_WIDE_INT xh, xl, th, tl; |
1472 | REAL_VALUE_TYPE x, t; | |
0a67e02c | 1473 | REAL_VALUE_FROM_CONST_DOUBLE (x, op); |
15e5ad76 ZW |
1474 | switch (code) |
1475 | { | |
875eda9c RS |
1476 | case FIX: |
1477 | if (REAL_VALUE_ISNAN (x)) | |
1478 | return const0_rtx; | |
1479 | ||
1480 | /* Test against the signed upper bound. */ | |
1481 | if (width > HOST_BITS_PER_WIDE_INT) | |
1482 | { | |
1483 | th = ((unsigned HOST_WIDE_INT) 1 | |
1484 | << (width - HOST_BITS_PER_WIDE_INT - 1)) - 1; | |
1485 | tl = -1; | |
1486 | } | |
1487 | else | |
1488 | { | |
1489 | th = 0; | |
1490 | tl = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1; | |
1491 | } | |
1492 | real_from_integer (&t, VOIDmode, tl, th, 0); | |
1493 | if (REAL_VALUES_LESS (t, x)) | |
1494 | { | |
1495 | xh = th; | |
1496 | xl = tl; | |
1497 | break; | |
1498 | } | |
1499 | ||
1500 | /* Test against the signed lower bound. */ | |
1501 | if (width > HOST_BITS_PER_WIDE_INT) | |
1502 | { | |
1503 | th = (HOST_WIDE_INT) -1 << (width - HOST_BITS_PER_WIDE_INT - 1); | |
1504 | tl = 0; | |
1505 | } | |
1506 | else | |
1507 | { | |
1508 | th = -1; | |
1509 | tl = (HOST_WIDE_INT) -1 << (width - 1); | |
1510 | } | |
1511 | real_from_integer (&t, VOIDmode, tl, th, 0); | |
1512 | if (REAL_VALUES_LESS (x, t)) | |
1513 | { | |
1514 | xh = th; | |
1515 | xl = tl; | |
1516 | break; | |
1517 | } | |
1518 | REAL_VALUE_TO_INT (&xl, &xh, x); | |
1519 | break; | |
1520 | ||
1521 | case UNSIGNED_FIX: | |
1522 | if (REAL_VALUE_ISNAN (x) || REAL_VALUE_NEGATIVE (x)) | |
1523 | return const0_rtx; | |
1524 | ||
1525 | /* Test against the unsigned upper bound. */ | |
1526 | if (width == 2*HOST_BITS_PER_WIDE_INT) | |
1527 | { | |
1528 | th = -1; | |
1529 | tl = -1; | |
1530 | } | |
1531 | else if (width >= HOST_BITS_PER_WIDE_INT) | |
1532 | { | |
1533 | th = ((unsigned HOST_WIDE_INT) 1 | |
1534 | << (width - HOST_BITS_PER_WIDE_INT)) - 1; | |
1535 | tl = -1; | |
1536 | } | |
1537 | else | |
1538 | { | |
1539 | th = 0; | |
1540 | tl = ((unsigned HOST_WIDE_INT) 1 << width) - 1; | |
1541 | } | |
1542 | real_from_integer (&t, VOIDmode, tl, th, 1); | |
1543 | if (REAL_VALUES_LESS (t, x)) | |
1544 | { | |
1545 | xh = th; | |
1546 | xl = tl; | |
1547 | break; | |
1548 | } | |
1549 | ||
1550 | REAL_VALUE_TO_INT (&xl, &xh, x); | |
1551 | break; | |
1552 | ||
15e5ad76 | 1553 | default: |
41374e13 | 1554 | gcc_unreachable (); |
15e5ad76 | 1555 | } |
875eda9c | 1556 | return immed_double_const (xl, xh, mode); |
0cedb36c | 1557 | } |
ba31d94e | 1558 | |
0a67e02c | 1559 | return NULL_RTX; |
0cedb36c JL |
1560 | } |
1561 | \f | |
9ce79a7a RS |
1562 | /* Subroutine of simplify_binary_operation to simplify a commutative, |
1563 | associative binary operation CODE with result mode MODE, operating | |
1564 | on OP0 and OP1. CODE is currently one of PLUS, MULT, AND, IOR, XOR, | |
1565 | SMIN, SMAX, UMIN or UMAX. Return zero if no simplification or | |
1566 | canonicalization is possible. */ | |
dd61aa98 | 1567 | |
dd61aa98 RS |
1568 | static rtx |
1569 | simplify_associative_operation (enum rtx_code code, enum machine_mode mode, | |
1570 | rtx op0, rtx op1) | |
1571 | { | |
1572 | rtx tem; | |
1573 | ||
9ce79a7a RS |
1574 | /* Linearize the operator to the left. */ |
1575 | if (GET_CODE (op1) == code) | |
dd61aa98 | 1576 | { |
9ce79a7a RS |
1577 | /* "(a op b) op (c op d)" becomes "((a op b) op c) op d)". */ |
1578 | if (GET_CODE (op0) == code) | |
1579 | { | |
1580 | tem = simplify_gen_binary (code, mode, op0, XEXP (op1, 0)); | |
1581 | return simplify_gen_binary (code, mode, tem, XEXP (op1, 1)); | |
1582 | } | |
dd61aa98 | 1583 | |
9ce79a7a RS |
1584 | /* "a op (b op c)" becomes "(b op c) op a". */ |
1585 | if (! swap_commutative_operands_p (op1, op0)) | |
1586 | return simplify_gen_binary (code, mode, op1, op0); | |
dd61aa98 | 1587 | |
9ce79a7a RS |
1588 | tem = op0; |
1589 | op0 = op1; | |
1590 | op1 = tem; | |
dd61aa98 RS |
1591 | } |
1592 | ||
9ce79a7a | 1593 | if (GET_CODE (op0) == code) |
dd61aa98 | 1594 | { |
9ce79a7a RS |
1595 | /* Canonicalize "(x op c) op y" as "(x op y) op c". */ |
1596 | if (swap_commutative_operands_p (XEXP (op0, 1), op1)) | |
1597 | { | |
1598 | tem = simplify_gen_binary (code, mode, XEXP (op0, 0), op1); | |
1599 | return simplify_gen_binary (code, mode, tem, XEXP (op0, 1)); | |
1600 | } | |
1601 | ||
1602 | /* Attempt to simplify "(a op b) op c" as "a op (b op c)". */ | |
7e0b4eae | 1603 | tem = simplify_binary_operation (code, mode, XEXP (op0, 1), op1); |
9ce79a7a RS |
1604 | if (tem != 0) |
1605 | return simplify_gen_binary (code, mode, XEXP (op0, 0), tem); | |
1606 | ||
1607 | /* Attempt to simplify "(a op b) op c" as "(a op c) op b". */ | |
7e0b4eae | 1608 | tem = simplify_binary_operation (code, mode, XEXP (op0, 0), op1); |
9ce79a7a RS |
1609 | if (tem != 0) |
1610 | return simplify_gen_binary (code, mode, tem, XEXP (op0, 1)); | |
dd61aa98 RS |
1611 | } |
1612 | ||
1613 | return 0; | |
1614 | } | |
1615 | ||
0a67e02c | 1616 | |
0cedb36c JL |
1617 | /* Simplify a binary operation CODE with result mode MODE, operating on OP0 |
1618 | and OP1. Return 0 if no simplification is possible. | |
1619 | ||
1620 | Don't use this for relational operations such as EQ or LT. | |
1621 | Use simplify_relational_operation instead. */ | |
0cedb36c | 1622 | rtx |
46c5ad27 AJ |
1623 | simplify_binary_operation (enum rtx_code code, enum machine_mode mode, |
1624 | rtx op0, rtx op1) | |
0cedb36c | 1625 | { |
9ce79a7a | 1626 | rtx trueop0, trueop1; |
0cedb36c JL |
1627 | rtx tem; |
1628 | ||
1629 | /* Relational operations don't work here. We must know the mode | |
1630 | of the operands in order to do the comparison correctly. | |
1631 | Assuming a full word can give incorrect results. | |
1632 | Consider comparing 128 with -128 in QImode. */ | |
41374e13 NS |
1633 | gcc_assert (GET_RTX_CLASS (code) != RTX_COMPARE); |
1634 | gcc_assert (GET_RTX_CLASS (code) != RTX_COMM_COMPARE); | |
0cedb36c | 1635 | |
4ba5f925 | 1636 | /* Make sure the constant is second. */ |
ec8e098d | 1637 | if (GET_RTX_CLASS (code) == RTX_COMM_ARITH |
9ce79a7a | 1638 | && swap_commutative_operands_p (op0, op1)) |
4ba5f925 JH |
1639 | { |
1640 | tem = op0, op0 = op1, op1 = tem; | |
4ba5f925 JH |
1641 | } |
1642 | ||
9ce79a7a RS |
1643 | trueop0 = avoid_constant_pool_reference (op0); |
1644 | trueop1 = avoid_constant_pool_reference (op1); | |
1645 | ||
0a67e02c PB |
1646 | tem = simplify_const_binary_operation (code, mode, trueop0, trueop1); |
1647 | if (tem) | |
1648 | return tem; | |
1649 | return simplify_binary_operation_1 (code, mode, op0, op1, trueop0, trueop1); | |
1650 | } | |
1651 | ||
1753331b RS |
1652 | /* Subroutine of simplify_binary_operation. Simplify a binary operation |
1653 | CODE with result mode MODE, operating on OP0 and OP1. If OP0 and/or | |
1654 | OP1 are constant pool references, TRUEOP0 and TRUEOP1 represent the | |
1655 | actual constants. */ | |
1656 | ||
0a67e02c PB |
1657 | static rtx |
1658 | simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode, | |
1659 | rtx op0, rtx op1, rtx trueop0, rtx trueop1) | |
1660 | { | |
bd1ef757 | 1661 | rtx tem, reversed, opleft, opright; |
0a67e02c PB |
1662 | HOST_WIDE_INT val; |
1663 | unsigned int width = GET_MODE_BITSIZE (mode); | |
1664 | ||
1665 | /* Even if we can't compute a constant result, | |
1666 | there are some cases worth simplifying. */ | |
1667 | ||
1668 | switch (code) | |
852c8ba1 | 1669 | { |
0a67e02c PB |
1670 | case PLUS: |
1671 | /* Maybe simplify x + 0 to x. The two expressions are equivalent | |
1672 | when x is NaN, infinite, or finite and nonzero. They aren't | |
1673 | when x is -0 and the rounding mode is not towards -infinity, | |
1674 | since (-0) + 0 is then 0. */ | |
1675 | if (!HONOR_SIGNED_ZEROS (mode) && trueop1 == CONST0_RTX (mode)) | |
1676 | return op0; | |
1677 | ||
1678 | /* ((-a) + b) -> (b - a) and similarly for (a + (-b)). These | |
1679 | transformations are safe even for IEEE. */ | |
1680 | if (GET_CODE (op0) == NEG) | |
1681 | return simplify_gen_binary (MINUS, mode, op1, XEXP (op0, 0)); | |
1682 | else if (GET_CODE (op1) == NEG) | |
1683 | return simplify_gen_binary (MINUS, mode, op0, XEXP (op1, 0)); | |
1684 | ||
1685 | /* (~a) + 1 -> -a */ | |
1686 | if (INTEGRAL_MODE_P (mode) | |
1687 | && GET_CODE (op0) == NOT | |
1688 | && trueop1 == const1_rtx) | |
1689 | return simplify_gen_unary (NEG, mode, XEXP (op0, 0), mode); | |
1690 | ||
1691 | /* Handle both-operands-constant cases. We can only add | |
1692 | CONST_INTs to constants since the sum of relocatable symbols | |
1693 | can't be handled by most assemblers. Don't add CONST_INT | |
1694 | to CONST_INT since overflow won't be computed properly if wider | |
1695 | than HOST_BITS_PER_WIDE_INT. */ | |
1696 | ||
dd59ef13 RS |
1697 | if ((GET_CODE (op0) == CONST |
1698 | || GET_CODE (op0) == SYMBOL_REF | |
1699 | || GET_CODE (op0) == LABEL_REF) | |
481683e1 | 1700 | && CONST_INT_P (op1)) |
0a67e02c | 1701 | return plus_constant (op0, INTVAL (op1)); |
dd59ef13 RS |
1702 | else if ((GET_CODE (op1) == CONST |
1703 | || GET_CODE (op1) == SYMBOL_REF | |
1704 | || GET_CODE (op1) == LABEL_REF) | |
481683e1 | 1705 | && CONST_INT_P (op0)) |
0a67e02c PB |
1706 | return plus_constant (op1, INTVAL (op0)); |
1707 | ||
1708 | /* See if this is something like X * C - X or vice versa or | |
1709 | if the multiplication is written as a shift. If so, we can | |
1710 | distribute and make a new multiply, shift, or maybe just | |
1711 | have X (if C is 2 in the example above). But don't make | |
1712 | something more expensive than we had before. */ | |
1713 | ||
6800ea5c | 1714 | if (SCALAR_INT_MODE_P (mode)) |
0a67e02c | 1715 | { |
fab2f52c AO |
1716 | HOST_WIDE_INT coeff0h = 0, coeff1h = 0; |
1717 | unsigned HOST_WIDE_INT coeff0l = 1, coeff1l = 1; | |
0a67e02c PB |
1718 | rtx lhs = op0, rhs = op1; |
1719 | ||
1720 | if (GET_CODE (lhs) == NEG) | |
fab2f52c AO |
1721 | { |
1722 | coeff0l = -1; | |
1723 | coeff0h = -1; | |
1724 | lhs = XEXP (lhs, 0); | |
1725 | } | |
0a67e02c | 1726 | else if (GET_CODE (lhs) == MULT |
481683e1 | 1727 | && CONST_INT_P (XEXP (lhs, 1))) |
fab2f52c AO |
1728 | { |
1729 | coeff0l = INTVAL (XEXP (lhs, 1)); | |
1730 | coeff0h = INTVAL (XEXP (lhs, 1)) < 0 ? -1 : 0; | |
1731 | lhs = XEXP (lhs, 0); | |
1732 | } | |
0a67e02c | 1733 | else if (GET_CODE (lhs) == ASHIFT |
481683e1 | 1734 | && CONST_INT_P (XEXP (lhs, 1)) |
0a67e02c PB |
1735 | && INTVAL (XEXP (lhs, 1)) >= 0 |
1736 | && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT) | |
1737 | { | |
fab2f52c AO |
1738 | coeff0l = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1)); |
1739 | coeff0h = 0; | |
0a67e02c PB |
1740 | lhs = XEXP (lhs, 0); |
1741 | } | |
852c8ba1 | 1742 | |
0a67e02c | 1743 | if (GET_CODE (rhs) == NEG) |
fab2f52c AO |
1744 | { |
1745 | coeff1l = -1; | |
1746 | coeff1h = -1; | |
1747 | rhs = XEXP (rhs, 0); | |
1748 | } | |
0a67e02c | 1749 | else if (GET_CODE (rhs) == MULT |
481683e1 | 1750 | && CONST_INT_P (XEXP (rhs, 1))) |
0a67e02c | 1751 | { |
fab2f52c AO |
1752 | coeff1l = INTVAL (XEXP (rhs, 1)); |
1753 | coeff1h = INTVAL (XEXP (rhs, 1)) < 0 ? -1 : 0; | |
1754 | rhs = XEXP (rhs, 0); | |
0a67e02c PB |
1755 | } |
1756 | else if (GET_CODE (rhs) == ASHIFT | |
481683e1 | 1757 | && CONST_INT_P (XEXP (rhs, 1)) |
0a67e02c PB |
1758 | && INTVAL (XEXP (rhs, 1)) >= 0 |
1759 | && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT) | |
1760 | { | |
fab2f52c AO |
1761 | coeff1l = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1)); |
1762 | coeff1h = 0; | |
0a67e02c PB |
1763 | rhs = XEXP (rhs, 0); |
1764 | } | |
1765 | ||
1766 | if (rtx_equal_p (lhs, rhs)) | |
1767 | { | |
1768 | rtx orig = gen_rtx_PLUS (mode, op0, op1); | |
fab2f52c AO |
1769 | rtx coeff; |
1770 | unsigned HOST_WIDE_INT l; | |
1771 | HOST_WIDE_INT h; | |
f40751dd | 1772 | bool speed = optimize_function_for_speed_p (cfun); |
fab2f52c AO |
1773 | |
1774 | add_double (coeff0l, coeff0h, coeff1l, coeff1h, &l, &h); | |
1775 | coeff = immed_double_const (l, h, mode); | |
1776 | ||
1777 | tem = simplify_gen_binary (MULT, mode, lhs, coeff); | |
f40751dd | 1778 | return rtx_cost (tem, SET, speed) <= rtx_cost (orig, SET, speed) |
0a67e02c PB |
1779 | ? tem : 0; |
1780 | } | |
1781 | } | |
1782 | ||
1783 | /* (plus (xor X C1) C2) is (xor X (C1^C2)) if C2 is signbit. */ | |
481683e1 | 1784 | if ((CONST_INT_P (op1) |
0a67e02c PB |
1785 | || GET_CODE (op1) == CONST_DOUBLE) |
1786 | && GET_CODE (op0) == XOR | |
481683e1 | 1787 | && (CONST_INT_P (XEXP (op0, 1)) |
0a67e02c PB |
1788 | || GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE) |
1789 | && mode_signbit_p (mode, op1)) | |
1790 | return simplify_gen_binary (XOR, mode, XEXP (op0, 0), | |
1791 | simplify_gen_binary (XOR, mode, op1, | |
1792 | XEXP (op0, 1))); | |
1793 | ||
bd1ef757 | 1794 | /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)). */ |
4bf371ea RG |
1795 | if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode) |
1796 | && GET_CODE (op0) == MULT | |
bd1ef757 PB |
1797 | && GET_CODE (XEXP (op0, 0)) == NEG) |
1798 | { | |
1799 | rtx in1, in2; | |
1800 | ||
1801 | in1 = XEXP (XEXP (op0, 0), 0); | |
1802 | in2 = XEXP (op0, 1); | |
1803 | return simplify_gen_binary (MINUS, mode, op1, | |
1804 | simplify_gen_binary (MULT, mode, | |
1805 | in1, in2)); | |
1806 | } | |
1807 | ||
1808 | /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if | |
1809 | C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE | |
1810 | is 1. */ | |
1811 | if (COMPARISON_P (op0) | |
1812 | && ((STORE_FLAG_VALUE == -1 && trueop1 == const1_rtx) | |
1813 | || (STORE_FLAG_VALUE == 1 && trueop1 == constm1_rtx)) | |
1814 | && (reversed = reversed_comparison (op0, mode))) | |
1815 | return | |
1816 | simplify_gen_unary (NEG, mode, reversed, mode); | |
1817 | ||
0a67e02c PB |
1818 | /* If one of the operands is a PLUS or a MINUS, see if we can |
1819 | simplify this by the associative law. | |
1820 | Don't use the associative law for floating point. | |
1821 | The inaccuracy makes it nonassociative, | |
1822 | and subtle programs can break if operations are associated. */ | |
1823 | ||
1824 | if (INTEGRAL_MODE_P (mode) | |
1825 | && (plus_minus_operand_p (op0) | |
1826 | || plus_minus_operand_p (op1)) | |
1941069a | 1827 | && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0) |
0a67e02c PB |
1828 | return tem; |
1829 | ||
1830 | /* Reassociate floating point addition only when the user | |
a1a82611 | 1831 | specifies associative math operations. */ |
0a67e02c | 1832 | if (FLOAT_MODE_P (mode) |
a1a82611 | 1833 | && flag_associative_math) |
852c8ba1 | 1834 | { |
0a67e02c PB |
1835 | tem = simplify_associative_operation (code, mode, op0, op1); |
1836 | if (tem) | |
1837 | return tem; | |
852c8ba1 | 1838 | } |
0a67e02c | 1839 | break; |
852c8ba1 | 1840 | |
0a67e02c | 1841 | case COMPARE: |
0a67e02c PB |
1842 | /* Convert (compare (gt (flags) 0) (lt (flags) 0)) to (flags). */ |
1843 | if (((GET_CODE (op0) == GT && GET_CODE (op1) == LT) | |
1844 | || (GET_CODE (op0) == GTU && GET_CODE (op1) == LTU)) | |
1845 | && XEXP (op0, 1) == const0_rtx && XEXP (op1, 1) == const0_rtx) | |
3198b947 | 1846 | { |
0a67e02c PB |
1847 | rtx xop00 = XEXP (op0, 0); |
1848 | rtx xop10 = XEXP (op1, 0); | |
3198b947 | 1849 | |
0a67e02c PB |
1850 | #ifdef HAVE_cc0 |
1851 | if (GET_CODE (xop00) == CC0 && GET_CODE (xop10) == CC0) | |
1852 | #else | |
1853 | if (REG_P (xop00) && REG_P (xop10) | |
1854 | && GET_MODE (xop00) == GET_MODE (xop10) | |
1855 | && REGNO (xop00) == REGNO (xop10) | |
1856 | && GET_MODE_CLASS (GET_MODE (xop00)) == MODE_CC | |
1857 | && GET_MODE_CLASS (GET_MODE (xop10)) == MODE_CC) | |
1858 | #endif | |
1859 | return xop00; | |
3198b947 | 1860 | } |
0a67e02c PB |
1861 | break; |
1862 | ||
1863 | case MINUS: | |
1864 | /* We can't assume x-x is 0 even with non-IEEE floating point, | |
1865 | but since it is zero except in very strange circumstances, we | |
81d2fb02 | 1866 | will treat it as zero with -ffinite-math-only. */ |
0a67e02c PB |
1867 | if (rtx_equal_p (trueop0, trueop1) |
1868 | && ! side_effects_p (op0) | |
81d2fb02 | 1869 | && (!FLOAT_MODE_P (mode) || !HONOR_NANS (mode))) |
0a67e02c PB |
1870 | return CONST0_RTX (mode); |
1871 | ||
1872 | /* Change subtraction from zero into negation. (0 - x) is the | |
1873 | same as -x when x is NaN, infinite, or finite and nonzero. | |
1874 | But if the mode has signed zeros, and does not round towards | |
1875 | -infinity, then 0 - 0 is 0, not -0. */ | |
1876 | if (!HONOR_SIGNED_ZEROS (mode) && trueop0 == CONST0_RTX (mode)) | |
1877 | return simplify_gen_unary (NEG, mode, op1, mode); | |
1878 | ||
1879 | /* (-1 - a) is ~a. */ | |
1880 | if (trueop0 == constm1_rtx) | |
1881 | return simplify_gen_unary (NOT, mode, op1, mode); | |
1882 | ||
1883 | /* Subtracting 0 has no effect unless the mode has signed zeros | |
1884 | and supports rounding towards -infinity. In such a case, | |
1885 | 0 - 0 is -0. */ | |
1886 | if (!(HONOR_SIGNED_ZEROS (mode) | |
1887 | && HONOR_SIGN_DEPENDENT_ROUNDING (mode)) | |
1888 | && trueop1 == CONST0_RTX (mode)) | |
1889 | return op0; | |
1890 | ||
1891 | /* See if this is something like X * C - X or vice versa or | |
1892 | if the multiplication is written as a shift. If so, we can | |
1893 | distribute and make a new multiply, shift, or maybe just | |
1894 | have X (if C is 2 in the example above). But don't make | |
1895 | something more expensive than we had before. */ | |
1896 | ||
6800ea5c | 1897 | if (SCALAR_INT_MODE_P (mode)) |
3198b947 | 1898 | { |
fab2f52c AO |
1899 | HOST_WIDE_INT coeff0h = 0, negcoeff1h = -1; |
1900 | unsigned HOST_WIDE_INT coeff0l = 1, negcoeff1l = -1; | |
0a67e02c | 1901 | rtx lhs = op0, rhs = op1; |
3198b947 | 1902 | |
0a67e02c | 1903 | if (GET_CODE (lhs) == NEG) |
fab2f52c AO |
1904 | { |
1905 | coeff0l = -1; | |
1906 | coeff0h = -1; | |
1907 | lhs = XEXP (lhs, 0); | |
1908 | } | |
0a67e02c | 1909 | else if (GET_CODE (lhs) == MULT |
481683e1 | 1910 | && CONST_INT_P (XEXP (lhs, 1))) |
0a67e02c | 1911 | { |
fab2f52c AO |
1912 | coeff0l = INTVAL (XEXP (lhs, 1)); |
1913 | coeff0h = INTVAL (XEXP (lhs, 1)) < 0 ? -1 : 0; | |
1914 | lhs = XEXP (lhs, 0); | |
0a67e02c PB |
1915 | } |
1916 | else if (GET_CODE (lhs) == ASHIFT | |
481683e1 | 1917 | && CONST_INT_P (XEXP (lhs, 1)) |
0a67e02c PB |
1918 | && INTVAL (XEXP (lhs, 1)) >= 0 |
1919 | && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT) | |
1920 | { | |
fab2f52c AO |
1921 | coeff0l = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1)); |
1922 | coeff0h = 0; | |
0a67e02c PB |
1923 | lhs = XEXP (lhs, 0); |
1924 | } | |
3198b947 | 1925 | |
0a67e02c | 1926 | if (GET_CODE (rhs) == NEG) |
fab2f52c AO |
1927 | { |
1928 | negcoeff1l = 1; | |
1929 | negcoeff1h = 0; | |
1930 | rhs = XEXP (rhs, 0); | |
1931 | } | |
0a67e02c | 1932 | else if (GET_CODE (rhs) == MULT |
481683e1 | 1933 | && CONST_INT_P (XEXP (rhs, 1))) |
0a67e02c | 1934 | { |
fab2f52c AO |
1935 | negcoeff1l = -INTVAL (XEXP (rhs, 1)); |
1936 | negcoeff1h = INTVAL (XEXP (rhs, 1)) <= 0 ? 0 : -1; | |
1937 | rhs = XEXP (rhs, 0); | |
0a67e02c PB |
1938 | } |
1939 | else if (GET_CODE (rhs) == ASHIFT | |
481683e1 | 1940 | && CONST_INT_P (XEXP (rhs, 1)) |
0a67e02c PB |
1941 | && INTVAL (XEXP (rhs, 1)) >= 0 |
1942 | && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT) | |
1943 | { | |
fab2f52c AO |
1944 | negcoeff1l = -(((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1))); |
1945 | negcoeff1h = -1; | |
0a67e02c PB |
1946 | rhs = XEXP (rhs, 0); |
1947 | } | |
1948 | ||
1949 | if (rtx_equal_p (lhs, rhs)) | |
1950 | { | |
1951 | rtx orig = gen_rtx_MINUS (mode, op0, op1); | |
fab2f52c AO |
1952 | rtx coeff; |
1953 | unsigned HOST_WIDE_INT l; | |
1954 | HOST_WIDE_INT h; | |
f40751dd | 1955 | bool speed = optimize_function_for_speed_p (cfun); |
fab2f52c AO |
1956 | |
1957 | add_double (coeff0l, coeff0h, negcoeff1l, negcoeff1h, &l, &h); | |
1958 | coeff = immed_double_const (l, h, mode); | |
1959 | ||
1960 | tem = simplify_gen_binary (MULT, mode, lhs, coeff); | |
f40751dd | 1961 | return rtx_cost (tem, SET, speed) <= rtx_cost (orig, SET, speed) |
0a67e02c PB |
1962 | ? tem : 0; |
1963 | } | |
3198b947 RH |
1964 | } |
1965 | ||
0a67e02c PB |
1966 | /* (a - (-b)) -> (a + b). True even for IEEE. */ |
1967 | if (GET_CODE (op1) == NEG) | |
1968 | return simplify_gen_binary (PLUS, mode, op0, XEXP (op1, 0)); | |
3198b947 | 1969 | |
0a67e02c PB |
1970 | /* (-x - c) may be simplified as (-c - x). */ |
1971 | if (GET_CODE (op0) == NEG | |
481683e1 | 1972 | && (CONST_INT_P (op1) |
0a67e02c | 1973 | || GET_CODE (op1) == CONST_DOUBLE)) |
79ae63b1 | 1974 | { |
0a67e02c PB |
1975 | tem = simplify_unary_operation (NEG, mode, op1, mode); |
1976 | if (tem) | |
1977 | return simplify_gen_binary (MINUS, mode, tem, XEXP (op0, 0)); | |
1978 | } | |
79ae63b1 | 1979 | |
0a67e02c | 1980 | /* Don't let a relocatable value get a negative coeff. */ |
481683e1 | 1981 | if (CONST_INT_P (op1) && GET_MODE (op0) != VOIDmode) |
0a67e02c PB |
1982 | return simplify_gen_binary (PLUS, mode, |
1983 | op0, | |
1984 | neg_const_int (mode, op1)); | |
1985 | ||
1986 | /* (x - (x & y)) -> (x & ~y) */ | |
1987 | if (GET_CODE (op1) == AND) | |
1988 | { | |
1989 | if (rtx_equal_p (op0, XEXP (op1, 0))) | |
79ae63b1 | 1990 | { |
0a67e02c PB |
1991 | tem = simplify_gen_unary (NOT, mode, XEXP (op1, 1), |
1992 | GET_MODE (XEXP (op1, 1))); | |
1993 | return simplify_gen_binary (AND, mode, op0, tem); | |
1994 | } | |
1995 | if (rtx_equal_p (op0, XEXP (op1, 1))) | |
1996 | { | |
1997 | tem = simplify_gen_unary (NOT, mode, XEXP (op1, 0), | |
1998 | GET_MODE (XEXP (op1, 0))); | |
1999 | return simplify_gen_binary (AND, mode, op0, tem); | |
79ae63b1 | 2000 | } |
79ae63b1 | 2001 | } |
1941069a | 2002 | |
bd1ef757 PB |
2003 | /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done |
2004 | by reversing the comparison code if valid. */ | |
2005 | if (STORE_FLAG_VALUE == 1 | |
2006 | && trueop0 == const1_rtx | |
2007 | && COMPARISON_P (op1) | |
2008 | && (reversed = reversed_comparison (op1, mode))) | |
2009 | return reversed; | |
2010 | ||
2011 | /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A). */ | |
4bf371ea RG |
2012 | if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode) |
2013 | && GET_CODE (op1) == MULT | |
bd1ef757 PB |
2014 | && GET_CODE (XEXP (op1, 0)) == NEG) |
2015 | { | |
2016 | rtx in1, in2; | |
2017 | ||
2018 | in1 = XEXP (XEXP (op1, 0), 0); | |
2019 | in2 = XEXP (op1, 1); | |
2020 | return simplify_gen_binary (PLUS, mode, | |
2021 | simplify_gen_binary (MULT, mode, | |
2022 | in1, in2), | |
2023 | op0); | |
2024 | } | |
2025 | ||
2026 | /* Canonicalize (minus (neg A) (mult B C)) to | |
2027 | (minus (mult (neg B) C) A). */ | |
4bf371ea RG |
2028 | if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode) |
2029 | && GET_CODE (op1) == MULT | |
bd1ef757 PB |
2030 | && GET_CODE (op0) == NEG) |
2031 | { | |
2032 | rtx in1, in2; | |
2033 | ||
2034 | in1 = simplify_gen_unary (NEG, mode, XEXP (op1, 0), mode); | |
2035 | in2 = XEXP (op1, 1); | |
2036 | return simplify_gen_binary (MINUS, mode, | |
2037 | simplify_gen_binary (MULT, mode, | |
2038 | in1, in2), | |
2039 | XEXP (op0, 0)); | |
2040 | } | |
2041 | ||
1941069a PB |
2042 | /* If one of the operands is a PLUS or a MINUS, see if we can |
2043 | simplify this by the associative law. This will, for example, | |
2044 | canonicalize (minus A (plus B C)) to (minus (minus A B) C). | |
2045 | Don't use the associative law for floating point. | |
2046 | The inaccuracy makes it nonassociative, | |
2047 | and subtle programs can break if operations are associated. */ | |
2048 | ||
2049 | if (INTEGRAL_MODE_P (mode) | |
2050 | && (plus_minus_operand_p (op0) | |
2051 | || plus_minus_operand_p (op1)) | |
2052 | && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0) | |
2053 | return tem; | |
0a67e02c | 2054 | break; |
15e5ad76 | 2055 | |
0a67e02c PB |
2056 | case MULT: |
2057 | if (trueop1 == constm1_rtx) | |
2058 | return simplify_gen_unary (NEG, mode, op0, mode); | |
2059 | ||
2060 | /* Maybe simplify x * 0 to 0. The reduction is not valid if | |
2061 | x is NaN, since x * 0 is then also NaN. Nor is it valid | |
2062 | when the mode has signed zeros, since multiplying a negative | |
2063 | number by 0 will give -0, not 0. */ | |
2064 | if (!HONOR_NANS (mode) | |
2065 | && !HONOR_SIGNED_ZEROS (mode) | |
2066 | && trueop1 == CONST0_RTX (mode) | |
2067 | && ! side_effects_p (op0)) | |
2068 | return op1; | |
2069 | ||
2070 | /* In IEEE floating point, x*1 is not equivalent to x for | |
2071 | signalling NaNs. */ | |
2072 | if (!HONOR_SNANS (mode) | |
2073 | && trueop1 == CONST1_RTX (mode)) | |
2074 | return op0; | |
2075 | ||
2076 | /* Convert multiply by constant power of two into shift unless | |
2077 | we are still generating RTL. This test is a kludge. */ | |
481683e1 | 2078 | if (CONST_INT_P (trueop1) |
0a67e02c PB |
2079 | && (val = exact_log2 (INTVAL (trueop1))) >= 0 |
2080 | /* If the mode is larger than the host word size, and the | |
2081 | uppermost bit is set, then this isn't a power of two due | |
2082 | to implicit sign extension. */ | |
2083 | && (width <= HOST_BITS_PER_WIDE_INT | |
2084 | || val != HOST_BITS_PER_WIDE_INT - 1)) | |
2085 | return simplify_gen_binary (ASHIFT, mode, op0, GEN_INT (val)); | |
2086 | ||
fab2f52c | 2087 | /* Likewise for multipliers wider than a word. */ |
1753331b RS |
2088 | if (GET_CODE (trueop1) == CONST_DOUBLE |
2089 | && (GET_MODE (trueop1) == VOIDmode | |
2090 | || GET_MODE_CLASS (GET_MODE (trueop1)) == MODE_INT) | |
2091 | && GET_MODE (op0) == mode | |
2092 | && CONST_DOUBLE_LOW (trueop1) == 0 | |
2093 | && (val = exact_log2 (CONST_DOUBLE_HIGH (trueop1))) >= 0) | |
fab2f52c AO |
2094 | return simplify_gen_binary (ASHIFT, mode, op0, |
2095 | GEN_INT (val + HOST_BITS_PER_WIDE_INT)); | |
2096 | ||
0a67e02c PB |
2097 | /* x*2 is x+x and x*(-1) is -x */ |
2098 | if (GET_CODE (trueop1) == CONST_DOUBLE | |
3d8bf70f | 2099 | && SCALAR_FLOAT_MODE_P (GET_MODE (trueop1)) |
50cd60be | 2100 | && !DECIMAL_FLOAT_MODE_P (GET_MODE (trueop1)) |
0a67e02c PB |
2101 | && GET_MODE (op0) == mode) |
2102 | { | |
2103 | REAL_VALUE_TYPE d; | |
2104 | REAL_VALUE_FROM_CONST_DOUBLE (d, trueop1); | |
15e5ad76 | 2105 | |
0a67e02c PB |
2106 | if (REAL_VALUES_EQUAL (d, dconst2)) |
2107 | return simplify_gen_binary (PLUS, mode, op0, copy_rtx (op0)); | |
3e4093b6 | 2108 | |
1753331b RS |
2109 | if (!HONOR_SNANS (mode) |
2110 | && REAL_VALUES_EQUAL (d, dconstm1)) | |
0a67e02c PB |
2111 | return simplify_gen_unary (NEG, mode, op0, mode); |
2112 | } | |
15e5ad76 | 2113 | |
1753331b RS |
2114 | /* Optimize -x * -x as x * x. */ |
2115 | if (FLOAT_MODE_P (mode) | |
2116 | && GET_CODE (op0) == NEG | |
2117 | && GET_CODE (op1) == NEG | |
2118 | && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0)) | |
2119 | && !side_effects_p (XEXP (op0, 0))) | |
2120 | return simplify_gen_binary (MULT, mode, XEXP (op0, 0), XEXP (op1, 0)); | |
2121 | ||
2122 | /* Likewise, optimize abs(x) * abs(x) as x * x. */ | |
2123 | if (SCALAR_FLOAT_MODE_P (mode) | |
2124 | && GET_CODE (op0) == ABS | |
2125 | && GET_CODE (op1) == ABS | |
2126 | && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0)) | |
2127 | && !side_effects_p (XEXP (op0, 0))) | |
2128 | return simplify_gen_binary (MULT, mode, XEXP (op0, 0), XEXP (op1, 0)); | |
2129 | ||
0a67e02c PB |
2130 | /* Reassociate multiplication, but for floating point MULTs |
2131 | only when the user specifies unsafe math optimizations. */ | |
2132 | if (! FLOAT_MODE_P (mode) | |
2133 | || flag_unsafe_math_optimizations) | |
2134 | { | |
2135 | tem = simplify_associative_operation (code, mode, op0, op1); | |
2136 | if (tem) | |
2137 | return tem; | |
2138 | } | |
2139 | break; | |
6355b2d5 | 2140 | |
0a67e02c PB |
2141 | case IOR: |
2142 | if (trueop1 == const0_rtx) | |
2143 | return op0; | |
481683e1 | 2144 | if (CONST_INT_P (trueop1) |
0a67e02c PB |
2145 | && ((INTVAL (trueop1) & GET_MODE_MASK (mode)) |
2146 | == GET_MODE_MASK (mode))) | |
2147 | return op1; | |
2148 | if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) | |
2149 | return op0; | |
2150 | /* A | (~A) -> -1 */ | |
2151 | if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1)) | |
2152 | || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0))) | |
2153 | && ! side_effects_p (op0) | |
3f2960d5 | 2154 | && SCALAR_INT_MODE_P (mode)) |
0a67e02c | 2155 | return constm1_rtx; |
bd1ef757 PB |
2156 | |
2157 | /* (ior A C) is C if all bits of A that might be nonzero are on in C. */ | |
481683e1 | 2158 | if (CONST_INT_P (op1) |
bd1ef757 PB |
2159 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
2160 | && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0) | |
2161 | return op1; | |
2162 | ||
49e7a9d4 RS |
2163 | /* Canonicalize (X & C1) | C2. */ |
2164 | if (GET_CODE (op0) == AND | |
481683e1 SZ |
2165 | && CONST_INT_P (trueop1) |
2166 | && CONST_INT_P (XEXP (op0, 1))) | |
49e7a9d4 RS |
2167 | { |
2168 | HOST_WIDE_INT mask = GET_MODE_MASK (mode); | |
2169 | HOST_WIDE_INT c1 = INTVAL (XEXP (op0, 1)); | |
2170 | HOST_WIDE_INT c2 = INTVAL (trueop1); | |
2171 | ||
2172 | /* If (C1&C2) == C1, then (X&C1)|C2 becomes X. */ | |
2173 | if ((c1 & c2) == c1 | |
2174 | && !side_effects_p (XEXP (op0, 0))) | |
2175 | return trueop1; | |
2176 | ||
2177 | /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */ | |
2178 | if (((c1|c2) & mask) == mask) | |
2179 | return simplify_gen_binary (IOR, mode, XEXP (op0, 0), op1); | |
2180 | ||
2181 | /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */ | |
2182 | if (((c1 & ~c2) & mask) != (c1 & mask)) | |
2183 | { | |
2184 | tem = simplify_gen_binary (AND, mode, XEXP (op0, 0), | |
2185 | gen_int_mode (c1 & ~c2, mode)); | |
2186 | return simplify_gen_binary (IOR, mode, tem, op1); | |
2187 | } | |
2188 | } | |
2189 | ||
bd1ef757 PB |
2190 | /* Convert (A & B) | A to A. */ |
2191 | if (GET_CODE (op0) == AND | |
2192 | && (rtx_equal_p (XEXP (op0, 0), op1) | |
2193 | || rtx_equal_p (XEXP (op0, 1), op1)) | |
2194 | && ! side_effects_p (XEXP (op0, 0)) | |
2195 | && ! side_effects_p (XEXP (op0, 1))) | |
2196 | return op1; | |
2197 | ||
2198 | /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the | |
2199 | mode size to (rotate A CX). */ | |
2200 | ||
2201 | if (GET_CODE (op1) == ASHIFT | |
2202 | || GET_CODE (op1) == SUBREG) | |
2203 | { | |
2204 | opleft = op1; | |
2205 | opright = op0; | |
2206 | } | |
2207 | else | |
2208 | { | |
2209 | opright = op1; | |
2210 | opleft = op0; | |
2211 | } | |
2212 | ||
2213 | if (GET_CODE (opleft) == ASHIFT && GET_CODE (opright) == LSHIFTRT | |
2214 | && rtx_equal_p (XEXP (opleft, 0), XEXP (opright, 0)) | |
481683e1 SZ |
2215 | && CONST_INT_P (XEXP (opleft, 1)) |
2216 | && CONST_INT_P (XEXP (opright, 1)) | |
bd1ef757 PB |
2217 | && (INTVAL (XEXP (opleft, 1)) + INTVAL (XEXP (opright, 1)) |
2218 | == GET_MODE_BITSIZE (mode))) | |
2219 | return gen_rtx_ROTATE (mode, XEXP (opright, 0), XEXP (opleft, 1)); | |
2220 | ||
2221 | /* Same, but for ashift that has been "simplified" to a wider mode | |
2222 | by simplify_shift_const. */ | |
2223 | ||
2224 | if (GET_CODE (opleft) == SUBREG | |
2225 | && GET_CODE (SUBREG_REG (opleft)) == ASHIFT | |
2226 | && GET_CODE (opright) == LSHIFTRT | |
2227 | && GET_CODE (XEXP (opright, 0)) == SUBREG | |
2228 | && GET_MODE (opleft) == GET_MODE (XEXP (opright, 0)) | |
2229 | && SUBREG_BYTE (opleft) == SUBREG_BYTE (XEXP (opright, 0)) | |
2230 | && (GET_MODE_SIZE (GET_MODE (opleft)) | |
2231 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (opleft)))) | |
2232 | && rtx_equal_p (XEXP (SUBREG_REG (opleft), 0), | |
2233 | SUBREG_REG (XEXP (opright, 0))) | |
481683e1 SZ |
2234 | && CONST_INT_P (XEXP (SUBREG_REG (opleft), 1)) |
2235 | && CONST_INT_P (XEXP (opright, 1)) | |
bd1ef757 PB |
2236 | && (INTVAL (XEXP (SUBREG_REG (opleft), 1)) + INTVAL (XEXP (opright, 1)) |
2237 | == GET_MODE_BITSIZE (mode))) | |
2238 | return gen_rtx_ROTATE (mode, XEXP (opright, 0), | |
01578564 | 2239 | XEXP (SUBREG_REG (opleft), 1)); |
bd1ef757 PB |
2240 | |
2241 | /* If we have (ior (and (X C1) C2)), simplify this by making | |
2242 | C1 as small as possible if C1 actually changes. */ | |
481683e1 | 2243 | if (CONST_INT_P (op1) |
bd1ef757 PB |
2244 | && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
2245 | || INTVAL (op1) > 0) | |
2246 | && GET_CODE (op0) == AND | |
481683e1 SZ |
2247 | && CONST_INT_P (XEXP (op0, 1)) |
2248 | && CONST_INT_P (op1) | |
bd1ef757 PB |
2249 | && (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0) |
2250 | return simplify_gen_binary (IOR, mode, | |
2251 | simplify_gen_binary | |
2252 | (AND, mode, XEXP (op0, 0), | |
2253 | GEN_INT (INTVAL (XEXP (op0, 1)) | |
2254 | & ~INTVAL (op1))), | |
2255 | op1); | |
2256 | ||
2257 | /* If OP0 is (ashiftrt (plus ...) C), it might actually be | |
2258 | a (sign_extend (plus ...)). Then check if OP1 is a CONST_INT and | |
2259 | the PLUS does not affect any of the bits in OP1: then we can do | |
2260 | the IOR as a PLUS and we can associate. This is valid if OP1 | |
2261 | can be safely shifted left C bits. */ | |
481683e1 | 2262 | if (CONST_INT_P (trueop1) && GET_CODE (op0) == ASHIFTRT |
bd1ef757 | 2263 | && GET_CODE (XEXP (op0, 0)) == PLUS |
481683e1 SZ |
2264 | && CONST_INT_P (XEXP (XEXP (op0, 0), 1)) |
2265 | && CONST_INT_P (XEXP (op0, 1)) | |
bd1ef757 PB |
2266 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT) |
2267 | { | |
2268 | int count = INTVAL (XEXP (op0, 1)); | |
2269 | HOST_WIDE_INT mask = INTVAL (trueop1) << count; | |
2270 | ||
2271 | if (mask >> count == INTVAL (trueop1) | |
2272 | && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0) | |
2273 | return simplify_gen_binary (ASHIFTRT, mode, | |
2274 | plus_constant (XEXP (op0, 0), mask), | |
2275 | XEXP (op0, 1)); | |
2276 | } | |
2277 | ||
0a67e02c PB |
2278 | tem = simplify_associative_operation (code, mode, op0, op1); |
2279 | if (tem) | |
2280 | return tem; | |
2281 | break; | |
2282 | ||
2283 | case XOR: | |
2284 | if (trueop1 == const0_rtx) | |
2285 | return op0; | |
481683e1 | 2286 | if (CONST_INT_P (trueop1) |
0a67e02c PB |
2287 | && ((INTVAL (trueop1) & GET_MODE_MASK (mode)) |
2288 | == GET_MODE_MASK (mode))) | |
2289 | return simplify_gen_unary (NOT, mode, op0, mode); | |
f5d1572a | 2290 | if (rtx_equal_p (trueop0, trueop1) |
0a67e02c PB |
2291 | && ! side_effects_p (op0) |
2292 | && GET_MODE_CLASS (mode) != MODE_CC) | |
6bd13540 | 2293 | return CONST0_RTX (mode); |
0a67e02c PB |
2294 | |
2295 | /* Canonicalize XOR of the most significant bit to PLUS. */ | |
481683e1 | 2296 | if ((CONST_INT_P (op1) |
0a67e02c PB |
2297 | || GET_CODE (op1) == CONST_DOUBLE) |
2298 | && mode_signbit_p (mode, op1)) | |
2299 | return simplify_gen_binary (PLUS, mode, op0, op1); | |
2300 | /* (xor (plus X C1) C2) is (xor X (C1^C2)) if C1 is signbit. */ | |
481683e1 | 2301 | if ((CONST_INT_P (op1) |
0a67e02c PB |
2302 | || GET_CODE (op1) == CONST_DOUBLE) |
2303 | && GET_CODE (op0) == PLUS | |
481683e1 | 2304 | && (CONST_INT_P (XEXP (op0, 1)) |
0a67e02c PB |
2305 | || GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE) |
2306 | && mode_signbit_p (mode, XEXP (op0, 1))) | |
2307 | return simplify_gen_binary (XOR, mode, XEXP (op0, 0), | |
2308 | simplify_gen_binary (XOR, mode, op1, | |
2309 | XEXP (op0, 1))); | |
bd1ef757 PB |
2310 | |
2311 | /* If we are XORing two things that have no bits in common, | |
2312 | convert them into an IOR. This helps to detect rotation encoded | |
2313 | using those methods and possibly other simplifications. */ | |
2314 | ||
2315 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
2316 | && (nonzero_bits (op0, mode) | |
2317 | & nonzero_bits (op1, mode)) == 0) | |
2318 | return (simplify_gen_binary (IOR, mode, op0, op1)); | |
2319 | ||
2320 | /* Convert (XOR (NOT x) (NOT y)) to (XOR x y). | |
2321 | Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for | |
2322 | (NOT y). */ | |
2323 | { | |
2324 | int num_negated = 0; | |
2325 | ||
2326 | if (GET_CODE (op0) == NOT) | |
2327 | num_negated++, op0 = XEXP (op0, 0); | |
2328 | if (GET_CODE (op1) == NOT) | |
2329 | num_negated++, op1 = XEXP (op1, 0); | |
2330 | ||
2331 | if (num_negated == 2) | |
2332 | return simplify_gen_binary (XOR, mode, op0, op1); | |
2333 | else if (num_negated == 1) | |
2334 | return simplify_gen_unary (NOT, mode, | |
2335 | simplify_gen_binary (XOR, mode, op0, op1), | |
2336 | mode); | |
2337 | } | |
2338 | ||
2339 | /* Convert (xor (and A B) B) to (and (not A) B). The latter may | |
2340 | correspond to a machine insn or result in further simplifications | |
2341 | if B is a constant. */ | |
2342 | ||
2343 | if (GET_CODE (op0) == AND | |
2344 | && rtx_equal_p (XEXP (op0, 1), op1) | |
2345 | && ! side_effects_p (op1)) | |
2346 | return simplify_gen_binary (AND, mode, | |
2347 | simplify_gen_unary (NOT, mode, | |
2348 | XEXP (op0, 0), mode), | |
2349 | op1); | |
2350 | ||
2351 | else if (GET_CODE (op0) == AND | |
2352 | && rtx_equal_p (XEXP (op0, 0), op1) | |
2353 | && ! side_effects_p (op1)) | |
2354 | return simplify_gen_binary (AND, mode, | |
2355 | simplify_gen_unary (NOT, mode, | |
2356 | XEXP (op0, 1), mode), | |
2357 | op1); | |
2358 | ||
2359 | /* (xor (comparison foo bar) (const_int 1)) can become the reversed | |
2360 | comparison if STORE_FLAG_VALUE is 1. */ | |
2361 | if (STORE_FLAG_VALUE == 1 | |
2362 | && trueop1 == const1_rtx | |
2363 | && COMPARISON_P (op0) | |
2364 | && (reversed = reversed_comparison (op0, mode))) | |
2365 | return reversed; | |
2366 | ||
2367 | /* (lshiftrt foo C) where C is the number of bits in FOO minus 1 | |
2368 | is (lt foo (const_int 0)), so we can perform the above | |
2369 | simplification if STORE_FLAG_VALUE is 1. */ | |
2370 | ||
2371 | if (STORE_FLAG_VALUE == 1 | |
2372 | && trueop1 == const1_rtx | |
2373 | && GET_CODE (op0) == LSHIFTRT | |
481683e1 | 2374 | && CONST_INT_P (XEXP (op0, 1)) |
bd1ef757 PB |
2375 | && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1) |
2376 | return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx); | |
2377 | ||
2378 | /* (xor (comparison foo bar) (const_int sign-bit)) | |
2379 | when STORE_FLAG_VALUE is the sign bit. */ | |
2380 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
2381 | && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode)) | |
2382 | == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1)) | |
2383 | && trueop1 == const_true_rtx | |
2384 | && COMPARISON_P (op0) | |
2385 | && (reversed = reversed_comparison (op0, mode))) | |
2386 | return reversed; | |
2387 | ||
0a67e02c PB |
2388 | tem = simplify_associative_operation (code, mode, op0, op1); |
2389 | if (tem) | |
2390 | return tem; | |
2391 | break; | |
2392 | ||
2393 | case AND: | |
3f2960d5 RH |
2394 | if (trueop1 == CONST0_RTX (mode) && ! side_effects_p (op0)) |
2395 | return trueop1; | |
f5a17c43 | 2396 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
dc5b3407 ZD |
2397 | { |
2398 | HOST_WIDE_INT nzop0 = nonzero_bits (trueop0, mode); | |
f5a17c43 | 2399 | HOST_WIDE_INT nzop1; |
481683e1 | 2400 | if (CONST_INT_P (trueop1)) |
f5a17c43 BS |
2401 | { |
2402 | HOST_WIDE_INT val1 = INTVAL (trueop1); | |
2403 | /* If we are turning off bits already known off in OP0, we need | |
2404 | not do an AND. */ | |
2405 | if ((nzop0 & ~val1) == 0) | |
2406 | return op0; | |
2407 | } | |
2408 | nzop1 = nonzero_bits (trueop1, mode); | |
dc5b3407 | 2409 | /* If we are clearing all the nonzero bits, the result is zero. */ |
f5a17c43 BS |
2410 | if ((nzop1 & nzop0) == 0 |
2411 | && !side_effects_p (op0) && !side_effects_p (op1)) | |
dc5b3407 ZD |
2412 | return CONST0_RTX (mode); |
2413 | } | |
f5d1572a | 2414 | if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0) |
0a67e02c PB |
2415 | && GET_MODE_CLASS (mode) != MODE_CC) |
2416 | return op0; | |
2417 | /* A & (~A) -> 0 */ | |
2418 | if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1)) | |
2419 | || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0))) | |
2420 | && ! side_effects_p (op0) | |
2421 | && GET_MODE_CLASS (mode) != MODE_CC) | |
3f2960d5 | 2422 | return CONST0_RTX (mode); |
0a67e02c PB |
2423 | |
2424 | /* Transform (and (extend X) C) into (zero_extend (and X C)) if | |
2425 | there are no nonzero bits of C outside of X's mode. */ | |
2426 | if ((GET_CODE (op0) == SIGN_EXTEND | |
2427 | || GET_CODE (op0) == ZERO_EXTEND) | |
481683e1 | 2428 | && CONST_INT_P (trueop1) |
0a67e02c PB |
2429 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
2430 | && (~GET_MODE_MASK (GET_MODE (XEXP (op0, 0))) | |
2431 | & INTVAL (trueop1)) == 0) | |
2432 | { | |
2433 | enum machine_mode imode = GET_MODE (XEXP (op0, 0)); | |
2434 | tem = simplify_gen_binary (AND, imode, XEXP (op0, 0), | |
2435 | gen_int_mode (INTVAL (trueop1), | |
2436 | imode)); | |
2437 | return simplify_gen_unary (ZERO_EXTEND, mode, tem, imode); | |
2438 | } | |
2439 | ||
fcaf7e12 AN |
2440 | /* Transform (and (truncate X) C) into (truncate (and X C)). This way |
2441 | we might be able to further simplify the AND with X and potentially | |
2442 | remove the truncation altogether. */ | |
2443 | if (GET_CODE (op0) == TRUNCATE && CONST_INT_P (trueop1)) | |
2444 | { | |
2445 | rtx x = XEXP (op0, 0); | |
2446 | enum machine_mode xmode = GET_MODE (x); | |
2447 | tem = simplify_gen_binary (AND, xmode, x, | |
2448 | gen_int_mode (INTVAL (trueop1), xmode)); | |
2449 | return simplify_gen_unary (TRUNCATE, mode, tem, xmode); | |
2450 | } | |
2451 | ||
49e7a9d4 RS |
2452 | /* Canonicalize (A | C1) & C2 as (A & C2) | (C1 & C2). */ |
2453 | if (GET_CODE (op0) == IOR | |
481683e1 SZ |
2454 | && CONST_INT_P (trueop1) |
2455 | && CONST_INT_P (XEXP (op0, 1))) | |
49e7a9d4 RS |
2456 | { |
2457 | HOST_WIDE_INT tmp = INTVAL (trueop1) & INTVAL (XEXP (op0, 1)); | |
2458 | return simplify_gen_binary (IOR, mode, | |
2459 | simplify_gen_binary (AND, mode, | |
2460 | XEXP (op0, 0), op1), | |
2461 | gen_int_mode (tmp, mode)); | |
2462 | } | |
2463 | ||
bd1ef757 PB |
2464 | /* Convert (A ^ B) & A to A & (~B) since the latter is often a single |
2465 | insn (and may simplify more). */ | |
2466 | if (GET_CODE (op0) == XOR | |
2467 | && rtx_equal_p (XEXP (op0, 0), op1) | |
2468 | && ! side_effects_p (op1)) | |
2469 | return simplify_gen_binary (AND, mode, | |
2470 | simplify_gen_unary (NOT, mode, | |
2471 | XEXP (op0, 1), mode), | |
2472 | op1); | |
2473 | ||
2474 | if (GET_CODE (op0) == XOR | |
2475 | && rtx_equal_p (XEXP (op0, 1), op1) | |
2476 | && ! side_effects_p (op1)) | |
2477 | return simplify_gen_binary (AND, mode, | |
2478 | simplify_gen_unary (NOT, mode, | |
2479 | XEXP (op0, 0), mode), | |
2480 | op1); | |
2481 | ||
2482 | /* Similarly for (~(A ^ B)) & A. */ | |
2483 | if (GET_CODE (op0) == NOT | |
2484 | && GET_CODE (XEXP (op0, 0)) == XOR | |
2485 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1) | |
2486 | && ! side_effects_p (op1)) | |
2487 | return simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1); | |
2488 | ||
2489 | if (GET_CODE (op0) == NOT | |
2490 | && GET_CODE (XEXP (op0, 0)) == XOR | |
2491 | && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1) | |
2492 | && ! side_effects_p (op1)) | |
2493 | return simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1); | |
2494 | ||
2495 | /* Convert (A | B) & A to A. */ | |
2496 | if (GET_CODE (op0) == IOR | |
2497 | && (rtx_equal_p (XEXP (op0, 0), op1) | |
2498 | || rtx_equal_p (XEXP (op0, 1), op1)) | |
2499 | && ! side_effects_p (XEXP (op0, 0)) | |
2500 | && ! side_effects_p (XEXP (op0, 1))) | |
2501 | return op1; | |
2502 | ||
0a67e02c PB |
2503 | /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M, |
2504 | ((A & N) + B) & M -> (A + B) & M | |
2505 | Similarly if (N & M) == 0, | |
2506 | ((A | N) + B) & M -> (A + B) & M | |
dc5b3407 ZD |
2507 | and for - instead of + and/or ^ instead of |. |
2508 | Also, if (N & M) == 0, then | |
2509 | (A +- N) & M -> A & M. */ | |
481683e1 | 2510 | if (CONST_INT_P (trueop1) |
0a67e02c PB |
2511 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
2512 | && ~INTVAL (trueop1) | |
2513 | && (INTVAL (trueop1) & (INTVAL (trueop1) + 1)) == 0 | |
2514 | && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS)) | |
2515 | { | |
2516 | rtx pmop[2]; | |
2517 | int which; | |
2518 | ||
2519 | pmop[0] = XEXP (op0, 0); | |
2520 | pmop[1] = XEXP (op0, 1); | |
2521 | ||
481683e1 | 2522 | if (CONST_INT_P (pmop[1]) |
dc5b3407 ZD |
2523 | && (INTVAL (pmop[1]) & INTVAL (trueop1)) == 0) |
2524 | return simplify_gen_binary (AND, mode, pmop[0], op1); | |
2525 | ||
0a67e02c PB |
2526 | for (which = 0; which < 2; which++) |
2527 | { | |
2528 | tem = pmop[which]; | |
2529 | switch (GET_CODE (tem)) | |
6355b2d5 | 2530 | { |
0a67e02c | 2531 | case AND: |
481683e1 | 2532 | if (CONST_INT_P (XEXP (tem, 1)) |
0a67e02c PB |
2533 | && (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1)) |
2534 | == INTVAL (trueop1)) | |
2535 | pmop[which] = XEXP (tem, 0); | |
6355b2d5 | 2536 | break; |
0a67e02c PB |
2537 | case IOR: |
2538 | case XOR: | |
481683e1 | 2539 | if (CONST_INT_P (XEXP (tem, 1)) |
0a67e02c PB |
2540 | && (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1)) == 0) |
2541 | pmop[which] = XEXP (tem, 0); | |
6355b2d5 | 2542 | break; |
6355b2d5 JJ |
2543 | default: |
2544 | break; | |
2545 | } | |
2546 | } | |
2547 | ||
0a67e02c PB |
2548 | if (pmop[0] != XEXP (op0, 0) || pmop[1] != XEXP (op0, 1)) |
2549 | { | |
2550 | tem = simplify_gen_binary (GET_CODE (op0), mode, | |
2551 | pmop[0], pmop[1]); | |
2552 | return simplify_gen_binary (code, mode, tem, op1); | |
2553 | } | |
2554 | } | |
f79db4f6 AP |
2555 | |
2556 | /* (and X (ior (not X) Y) -> (and X Y) */ | |
2557 | if (GET_CODE (op1) == IOR | |
2558 | && GET_CODE (XEXP (op1, 0)) == NOT | |
2559 | && op0 == XEXP (XEXP (op1, 0), 0)) | |
2560 | return simplify_gen_binary (AND, mode, op0, XEXP (op1, 1)); | |
2561 | ||
2562 | /* (and (ior (not X) Y) X) -> (and X Y) */ | |
2563 | if (GET_CODE (op0) == IOR | |
2564 | && GET_CODE (XEXP (op0, 0)) == NOT | |
2565 | && op1 == XEXP (XEXP (op0, 0), 0)) | |
2566 | return simplify_gen_binary (AND, mode, op1, XEXP (op0, 1)); | |
2567 | ||
0a67e02c PB |
2568 | tem = simplify_associative_operation (code, mode, op0, op1); |
2569 | if (tem) | |
2570 | return tem; | |
2571 | break; | |
762297d9 | 2572 | |
0a67e02c PB |
2573 | case UDIV: |
2574 | /* 0/x is 0 (or x&0 if x has side-effects). */ | |
3f2960d5 RH |
2575 | if (trueop0 == CONST0_RTX (mode)) |
2576 | { | |
2577 | if (side_effects_p (op1)) | |
2578 | return simplify_gen_binary (AND, mode, op1, trueop0); | |
2579 | return trueop0; | |
2580 | } | |
2581 | /* x/1 is x. */ | |
2582 | if (trueop1 == CONST1_RTX (mode)) | |
2583 | return rtl_hooks.gen_lowpart_no_emit (mode, op0); | |
2584 | /* Convert divide by power of two into shift. */ | |
481683e1 | 2585 | if (CONST_INT_P (trueop1) |
3f2960d5 RH |
2586 | && (val = exact_log2 (INTVAL (trueop1))) > 0) |
2587 | return simplify_gen_binary (LSHIFTRT, mode, op0, GEN_INT (val)); | |
2588 | break; | |
d284eb28 | 2589 | |
0a67e02c PB |
2590 | case DIV: |
2591 | /* Handle floating point and integers separately. */ | |
3d8bf70f | 2592 | if (SCALAR_FLOAT_MODE_P (mode)) |
0a67e02c PB |
2593 | { |
2594 | /* Maybe change 0.0 / x to 0.0. This transformation isn't | |
2595 | safe for modes with NaNs, since 0.0 / 0.0 will then be | |
2596 | NaN rather than 0.0. Nor is it safe for modes with signed | |
2597 | zeros, since dividing 0 by a negative number gives -0.0 */ | |
2598 | if (trueop0 == CONST0_RTX (mode) | |
2599 | && !HONOR_NANS (mode) | |
2600 | && !HONOR_SIGNED_ZEROS (mode) | |
2601 | && ! side_effects_p (op1)) | |
2602 | return op0; | |
2603 | /* x/1.0 is x. */ | |
2604 | if (trueop1 == CONST1_RTX (mode) | |
2605 | && !HONOR_SNANS (mode)) | |
2606 | return op0; | |
0cedb36c | 2607 | |
0a67e02c PB |
2608 | if (GET_CODE (trueop1) == CONST_DOUBLE |
2609 | && trueop1 != CONST0_RTX (mode)) | |
2610 | { | |
2611 | REAL_VALUE_TYPE d; | |
2612 | REAL_VALUE_FROM_CONST_DOUBLE (d, trueop1); | |
0cedb36c | 2613 | |
0a67e02c PB |
2614 | /* x/-1.0 is -x. */ |
2615 | if (REAL_VALUES_EQUAL (d, dconstm1) | |
2616 | && !HONOR_SNANS (mode)) | |
2617 | return simplify_gen_unary (NEG, mode, op0, mode); | |
0cedb36c | 2618 | |
0a67e02c | 2619 | /* Change FP division by a constant into multiplication. |
a1a82611 RE |
2620 | Only do this with -freciprocal-math. */ |
2621 | if (flag_reciprocal_math | |
0a67e02c PB |
2622 | && !REAL_VALUES_EQUAL (d, dconst0)) |
2623 | { | |
2624 | REAL_ARITHMETIC (d, RDIV_EXPR, dconst1, d); | |
2625 | tem = CONST_DOUBLE_FROM_REAL_VALUE (d, mode); | |
2626 | return simplify_gen_binary (MULT, mode, op0, tem); | |
2627 | } | |
2628 | } | |
2629 | } | |
0cedb36c | 2630 | else |
0cedb36c | 2631 | { |
0a67e02c | 2632 | /* 0/x is 0 (or x&0 if x has side-effects). */ |
3f2960d5 RH |
2633 | if (trueop0 == CONST0_RTX (mode)) |
2634 | { | |
2635 | if (side_effects_p (op1)) | |
2636 | return simplify_gen_binary (AND, mode, op1, trueop0); | |
2637 | return trueop0; | |
2638 | } | |
0a67e02c | 2639 | /* x/1 is x. */ |
3f2960d5 | 2640 | if (trueop1 == CONST1_RTX (mode)) |
9ce921ab | 2641 | return rtl_hooks.gen_lowpart_no_emit (mode, op0); |
0a67e02c PB |
2642 | /* x/-1 is -x. */ |
2643 | if (trueop1 == constm1_rtx) | |
2644 | { | |
9ce921ab | 2645 | rtx x = rtl_hooks.gen_lowpart_no_emit (mode, op0); |
0a67e02c PB |
2646 | return simplify_gen_unary (NEG, mode, x, mode); |
2647 | } | |
2648 | } | |
2649 | break; | |
0cedb36c | 2650 | |
0a67e02c PB |
2651 | case UMOD: |
2652 | /* 0%x is 0 (or x&0 if x has side-effects). */ | |
3f2960d5 RH |
2653 | if (trueop0 == CONST0_RTX (mode)) |
2654 | { | |
2655 | if (side_effects_p (op1)) | |
2656 | return simplify_gen_binary (AND, mode, op1, trueop0); | |
2657 | return trueop0; | |
2658 | } | |
2659 | /* x%1 is 0 (of x&0 if x has side-effects). */ | |
2660 | if (trueop1 == CONST1_RTX (mode)) | |
2661 | { | |
2662 | if (side_effects_p (op0)) | |
2663 | return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode)); | |
2664 | return CONST0_RTX (mode); | |
2665 | } | |
2666 | /* Implement modulus by power of two as AND. */ | |
481683e1 | 2667 | if (CONST_INT_P (trueop1) |
3f2960d5 RH |
2668 | && exact_log2 (INTVAL (trueop1)) > 0) |
2669 | return simplify_gen_binary (AND, mode, op0, | |
2670 | GEN_INT (INTVAL (op1) - 1)); | |
2671 | break; | |
0cedb36c | 2672 | |
0a67e02c PB |
2673 | case MOD: |
2674 | /* 0%x is 0 (or x&0 if x has side-effects). */ | |
3f2960d5 RH |
2675 | if (trueop0 == CONST0_RTX (mode)) |
2676 | { | |
2677 | if (side_effects_p (op1)) | |
2678 | return simplify_gen_binary (AND, mode, op1, trueop0); | |
2679 | return trueop0; | |
2680 | } | |
2681 | /* x%1 and x%-1 is 0 (or x&0 if x has side-effects). */ | |
2682 | if (trueop1 == CONST1_RTX (mode) || trueop1 == constm1_rtx) | |
2683 | { | |
2684 | if (side_effects_p (op0)) | |
2685 | return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode)); | |
2686 | return CONST0_RTX (mode); | |
2687 | } | |
2688 | break; | |
0cedb36c | 2689 | |
0a67e02c PB |
2690 | case ROTATERT: |
2691 | case ROTATE: | |
2692 | case ASHIFTRT: | |
70233f37 RS |
2693 | if (trueop1 == CONST0_RTX (mode)) |
2694 | return op0; | |
2695 | if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1)) | |
2696 | return op0; | |
0a67e02c | 2697 | /* Rotating ~0 always results in ~0. */ |
481683e1 | 2698 | if (CONST_INT_P (trueop0) && width <= HOST_BITS_PER_WIDE_INT |
0a67e02c PB |
2699 | && (unsigned HOST_WIDE_INT) INTVAL (trueop0) == GET_MODE_MASK (mode) |
2700 | && ! side_effects_p (op1)) | |
2701 | return op0; | |
cbc9503d | 2702 | canonicalize_shift: |
481683e1 | 2703 | if (SHIFT_COUNT_TRUNCATED && CONST_INT_P (op1)) |
cbc9503d RS |
2704 | { |
2705 | val = INTVAL (op1) & (GET_MODE_BITSIZE (mode) - 1); | |
2706 | if (val != INTVAL (op1)) | |
2707 | return simplify_gen_binary (code, mode, op0, GEN_INT (val)); | |
2708 | } | |
70233f37 | 2709 | break; |
9d317251 | 2710 | |
0a67e02c | 2711 | case ASHIFT: |
e551ad26 | 2712 | case SS_ASHIFT: |
14c931f1 | 2713 | case US_ASHIFT: |
70233f37 RS |
2714 | if (trueop1 == CONST0_RTX (mode)) |
2715 | return op0; | |
2716 | if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1)) | |
2717 | return op0; | |
cbc9503d | 2718 | goto canonicalize_shift; |
70233f37 | 2719 | |
0a67e02c | 2720 | case LSHIFTRT: |
3f2960d5 | 2721 | if (trueop1 == CONST0_RTX (mode)) |
0a67e02c | 2722 | return op0; |
3f2960d5 | 2723 | if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1)) |
0a67e02c | 2724 | return op0; |
70233f37 RS |
2725 | /* Optimize (lshiftrt (clz X) C) as (eq X 0). */ |
2726 | if (GET_CODE (op0) == CLZ | |
481683e1 | 2727 | && CONST_INT_P (trueop1) |
70233f37 | 2728 | && STORE_FLAG_VALUE == 1 |
e40122f0 | 2729 | && INTVAL (trueop1) < (HOST_WIDE_INT)width) |
70233f37 RS |
2730 | { |
2731 | enum machine_mode imode = GET_MODE (XEXP (op0, 0)); | |
2732 | unsigned HOST_WIDE_INT zero_val = 0; | |
2733 | ||
2734 | if (CLZ_DEFINED_VALUE_AT_ZERO (imode, zero_val) | |
2735 | && zero_val == GET_MODE_BITSIZE (imode) | |
2736 | && INTVAL (trueop1) == exact_log2 (zero_val)) | |
2737 | return simplify_gen_relational (EQ, mode, imode, | |
2738 | XEXP (op0, 0), const0_rtx); | |
2739 | } | |
cbc9503d | 2740 | goto canonicalize_shift; |
9d317251 | 2741 | |
0a67e02c PB |
2742 | case SMIN: |
2743 | if (width <= HOST_BITS_PER_WIDE_INT | |
481683e1 | 2744 | && CONST_INT_P (trueop1) |
0a67e02c PB |
2745 | && INTVAL (trueop1) == (HOST_WIDE_INT) 1 << (width -1) |
2746 | && ! side_effects_p (op0)) | |
2747 | return op1; | |
2748 | if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) | |
2749 | return op0; | |
2750 | tem = simplify_associative_operation (code, mode, op0, op1); | |
2751 | if (tem) | |
2752 | return tem; | |
2753 | break; | |
0cedb36c | 2754 | |
0a67e02c PB |
2755 | case SMAX: |
2756 | if (width <= HOST_BITS_PER_WIDE_INT | |
481683e1 | 2757 | && CONST_INT_P (trueop1) |
0a67e02c PB |
2758 | && ((unsigned HOST_WIDE_INT) INTVAL (trueop1) |
2759 | == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1) | |
2760 | && ! side_effects_p (op0)) | |
2761 | return op1; | |
2762 | if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) | |
2763 | return op0; | |
2764 | tem = simplify_associative_operation (code, mode, op0, op1); | |
2765 | if (tem) | |
2766 | return tem; | |
2767 | break; | |
0cedb36c | 2768 | |
0a67e02c | 2769 | case UMIN: |
3f2960d5 | 2770 | if (trueop1 == CONST0_RTX (mode) && ! side_effects_p (op0)) |
0a67e02c PB |
2771 | return op1; |
2772 | if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) | |
2773 | return op0; | |
2774 | tem = simplify_associative_operation (code, mode, op0, op1); | |
2775 | if (tem) | |
2776 | return tem; | |
2777 | break; | |
0cedb36c | 2778 | |
0a67e02c PB |
2779 | case UMAX: |
2780 | if (trueop1 == constm1_rtx && ! side_effects_p (op0)) | |
2781 | return op1; | |
2782 | if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) | |
2783 | return op0; | |
2784 | tem = simplify_associative_operation (code, mode, op0, op1); | |
2785 | if (tem) | |
2786 | return tem; | |
2787 | break; | |
0cedb36c | 2788 | |
0a67e02c PB |
2789 | case SS_PLUS: |
2790 | case US_PLUS: | |
2791 | case SS_MINUS: | |
2792 | case US_MINUS: | |
14c931f1 CF |
2793 | case SS_MULT: |
2794 | case US_MULT: | |
2795 | case SS_DIV: | |
2796 | case US_DIV: | |
0a67e02c PB |
2797 | /* ??? There are simplifications that can be done. */ |
2798 | return 0; | |
0cedb36c | 2799 | |
0a67e02c PB |
2800 | case VEC_SELECT: |
2801 | if (!VECTOR_MODE_P (mode)) | |
2802 | { | |
2803 | gcc_assert (VECTOR_MODE_P (GET_MODE (trueop0))); | |
2804 | gcc_assert (mode == GET_MODE_INNER (GET_MODE (trueop0))); | |
2805 | gcc_assert (GET_CODE (trueop1) == PARALLEL); | |
2806 | gcc_assert (XVECLEN (trueop1, 0) == 1); | |
481683e1 | 2807 | gcc_assert (CONST_INT_P (XVECEXP (trueop1, 0, 0))); |
0a67e02c PB |
2808 | |
2809 | if (GET_CODE (trueop0) == CONST_VECTOR) | |
2810 | return CONST_VECTOR_ELT (trueop0, INTVAL (XVECEXP | |
2811 | (trueop1, 0, 0))); | |
7f97f938 UB |
2812 | |
2813 | /* Extract a scalar element from a nested VEC_SELECT expression | |
2814 | (with optional nested VEC_CONCAT expression). Some targets | |
2815 | (i386) extract scalar element from a vector using chain of | |
2816 | nested VEC_SELECT expressions. When input operand is a memory | |
2817 | operand, this operation can be simplified to a simple scalar | |
2818 | load from an offseted memory address. */ | |
2819 | if (GET_CODE (trueop0) == VEC_SELECT) | |
2820 | { | |
2821 | rtx op0 = XEXP (trueop0, 0); | |
2822 | rtx op1 = XEXP (trueop0, 1); | |
2823 | ||
2824 | enum machine_mode opmode = GET_MODE (op0); | |
2825 | int elt_size = GET_MODE_SIZE (GET_MODE_INNER (opmode)); | |
2826 | int n_elts = GET_MODE_SIZE (opmode) / elt_size; | |
2827 | ||
2828 | int i = INTVAL (XVECEXP (trueop1, 0, 0)); | |
2829 | int elem; | |
2830 | ||
2831 | rtvec vec; | |
2832 | rtx tmp_op, tmp; | |
2833 | ||
2834 | gcc_assert (GET_CODE (op1) == PARALLEL); | |
2835 | gcc_assert (i < n_elts); | |
2836 | ||
2837 | /* Select element, pointed by nested selector. */ | |
3743c639 | 2838 | elem = INTVAL (XVECEXP (op1, 0, i)); |
7f97f938 UB |
2839 | |
2840 | /* Handle the case when nested VEC_SELECT wraps VEC_CONCAT. */ | |
2841 | if (GET_CODE (op0) == VEC_CONCAT) | |
2842 | { | |
2843 | rtx op00 = XEXP (op0, 0); | |
2844 | rtx op01 = XEXP (op0, 1); | |
2845 | ||
2846 | enum machine_mode mode00, mode01; | |
2847 | int n_elts00, n_elts01; | |
2848 | ||
2849 | mode00 = GET_MODE (op00); | |
2850 | mode01 = GET_MODE (op01); | |
2851 | ||
2852 | /* Find out number of elements of each operand. */ | |
2853 | if (VECTOR_MODE_P (mode00)) | |
2854 | { | |
2855 | elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode00)); | |
2856 | n_elts00 = GET_MODE_SIZE (mode00) / elt_size; | |
2857 | } | |
2858 | else | |
2859 | n_elts00 = 1; | |
2860 | ||
2861 | if (VECTOR_MODE_P (mode01)) | |
2862 | { | |
2863 | elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode01)); | |
2864 | n_elts01 = GET_MODE_SIZE (mode01) / elt_size; | |
2865 | } | |
2866 | else | |
2867 | n_elts01 = 1; | |
2868 | ||
2869 | gcc_assert (n_elts == n_elts00 + n_elts01); | |
2870 | ||
2871 | /* Select correct operand of VEC_CONCAT | |
2872 | and adjust selector. */ | |
2873 | if (elem < n_elts01) | |
2874 | tmp_op = op00; | |
2875 | else | |
2876 | { | |
2877 | tmp_op = op01; | |
2878 | elem -= n_elts00; | |
2879 | } | |
2880 | } | |
2881 | else | |
2882 | tmp_op = op0; | |
2883 | ||
2884 | vec = rtvec_alloc (1); | |
2885 | RTVEC_ELT (vec, 0) = GEN_INT (elem); | |
2886 | ||
2887 | tmp = gen_rtx_fmt_ee (code, mode, | |
2888 | tmp_op, gen_rtx_PARALLEL (VOIDmode, vec)); | |
2889 | return tmp; | |
2890 | } | |
0a67e02c PB |
2891 | } |
2892 | else | |
2893 | { | |
2894 | gcc_assert (VECTOR_MODE_P (GET_MODE (trueop0))); | |
2895 | gcc_assert (GET_MODE_INNER (mode) | |
2896 | == GET_MODE_INNER (GET_MODE (trueop0))); | |
2897 | gcc_assert (GET_CODE (trueop1) == PARALLEL); | |
0cedb36c | 2898 | |
0a67e02c PB |
2899 | if (GET_CODE (trueop0) == CONST_VECTOR) |
2900 | { | |
2901 | int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode)); | |
2902 | unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size); | |
2903 | rtvec v = rtvec_alloc (n_elts); | |
2904 | unsigned int i; | |
0cedb36c | 2905 | |
0a67e02c PB |
2906 | gcc_assert (XVECLEN (trueop1, 0) == (int) n_elts); |
2907 | for (i = 0; i < n_elts; i++) | |
2908 | { | |
2909 | rtx x = XVECEXP (trueop1, 0, i); | |
0cedb36c | 2910 | |
481683e1 | 2911 | gcc_assert (CONST_INT_P (x)); |
0a67e02c PB |
2912 | RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0, |
2913 | INTVAL (x)); | |
0cedb36c JL |
2914 | } |
2915 | ||
0a67e02c | 2916 | return gen_rtx_CONST_VECTOR (mode, v); |
dd61aa98 | 2917 | } |
0a67e02c | 2918 | } |
bd1ef757 PB |
2919 | |
2920 | if (XVECLEN (trueop1, 0) == 1 | |
481683e1 | 2921 | && CONST_INT_P (XVECEXP (trueop1, 0, 0)) |
bd1ef757 PB |
2922 | && GET_CODE (trueop0) == VEC_CONCAT) |
2923 | { | |
2924 | rtx vec = trueop0; | |
2925 | int offset = INTVAL (XVECEXP (trueop1, 0, 0)) * GET_MODE_SIZE (mode); | |
2926 | ||
2927 | /* Try to find the element in the VEC_CONCAT. */ | |
2928 | while (GET_MODE (vec) != mode | |
2929 | && GET_CODE (vec) == VEC_CONCAT) | |
2930 | { | |
2931 | HOST_WIDE_INT vec_size = GET_MODE_SIZE (GET_MODE (XEXP (vec, 0))); | |
2932 | if (offset < vec_size) | |
2933 | vec = XEXP (vec, 0); | |
2934 | else | |
2935 | { | |
2936 | offset -= vec_size; | |
2937 | vec = XEXP (vec, 1); | |
2938 | } | |
2939 | vec = avoid_constant_pool_reference (vec); | |
2940 | } | |
2941 | ||
2942 | if (GET_MODE (vec) == mode) | |
2943 | return vec; | |
2944 | } | |
2945 | ||
0a67e02c PB |
2946 | return 0; |
2947 | case VEC_CONCAT: | |
2948 | { | |
2949 | enum machine_mode op0_mode = (GET_MODE (trueop0) != VOIDmode | |
2950 | ? GET_MODE (trueop0) | |
2951 | : GET_MODE_INNER (mode)); | |
2952 | enum machine_mode op1_mode = (GET_MODE (trueop1) != VOIDmode | |
2953 | ? GET_MODE (trueop1) | |
2954 | : GET_MODE_INNER (mode)); | |
2955 | ||
2956 | gcc_assert (VECTOR_MODE_P (mode)); | |
2957 | gcc_assert (GET_MODE_SIZE (op0_mode) + GET_MODE_SIZE (op1_mode) | |
2958 | == GET_MODE_SIZE (mode)); | |
2959 | ||
2960 | if (VECTOR_MODE_P (op0_mode)) | |
2961 | gcc_assert (GET_MODE_INNER (mode) | |
2962 | == GET_MODE_INNER (op0_mode)); | |
2963 | else | |
2964 | gcc_assert (GET_MODE_INNER (mode) == op0_mode); | |
0cedb36c | 2965 | |
0a67e02c PB |
2966 | if (VECTOR_MODE_P (op1_mode)) |
2967 | gcc_assert (GET_MODE_INNER (mode) | |
2968 | == GET_MODE_INNER (op1_mode)); | |
2969 | else | |
2970 | gcc_assert (GET_MODE_INNER (mode) == op1_mode); | |
2971 | ||
2972 | if ((GET_CODE (trueop0) == CONST_VECTOR | |
481683e1 | 2973 | || CONST_INT_P (trueop0) |
0a67e02c PB |
2974 | || GET_CODE (trueop0) == CONST_DOUBLE) |
2975 | && (GET_CODE (trueop1) == CONST_VECTOR | |
481683e1 | 2976 | || CONST_INT_P (trueop1) |
0a67e02c PB |
2977 | || GET_CODE (trueop1) == CONST_DOUBLE)) |
2978 | { | |
2979 | int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode)); | |
2980 | unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size); | |
2981 | rtvec v = rtvec_alloc (n_elts); | |
2982 | unsigned int i; | |
2983 | unsigned in_n_elts = 1; | |
c877353c | 2984 | |
0a67e02c PB |
2985 | if (VECTOR_MODE_P (op0_mode)) |
2986 | in_n_elts = (GET_MODE_SIZE (op0_mode) / elt_size); | |
2987 | for (i = 0; i < n_elts; i++) | |
2988 | { | |
2989 | if (i < in_n_elts) | |
2990 | { | |
2991 | if (!VECTOR_MODE_P (op0_mode)) | |
2992 | RTVEC_ELT (v, i) = trueop0; | |
2993 | else | |
2994 | RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0, i); | |
2995 | } | |
2996 | else | |
2997 | { | |
2998 | if (!VECTOR_MODE_P (op1_mode)) | |
2999 | RTVEC_ELT (v, i) = trueop1; | |
3000 | else | |
3001 | RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop1, | |
3002 | i - in_n_elts); | |
3003 | } | |
3004 | } | |
0cedb36c | 3005 | |
0a67e02c PB |
3006 | return gen_rtx_CONST_VECTOR (mode, v); |
3007 | } | |
3008 | } | |
3009 | return 0; | |
0cedb36c | 3010 | |
0a67e02c PB |
3011 | default: |
3012 | gcc_unreachable (); | |
3013 | } | |
0cedb36c | 3014 | |
0a67e02c PB |
3015 | return 0; |
3016 | } | |
0cedb36c | 3017 | |
0a67e02c PB |
3018 | rtx |
3019 | simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode, | |
3020 | rtx op0, rtx op1) | |
3021 | { | |
3022 | HOST_WIDE_INT arg0, arg1, arg0s, arg1s; | |
3023 | HOST_WIDE_INT val; | |
3024 | unsigned int width = GET_MODE_BITSIZE (mode); | |
0cedb36c | 3025 | |
0a67e02c PB |
3026 | if (VECTOR_MODE_P (mode) |
3027 | && code != VEC_CONCAT | |
3028 | && GET_CODE (op0) == CONST_VECTOR | |
3029 | && GET_CODE (op1) == CONST_VECTOR) | |
3030 | { | |
3031 | unsigned n_elts = GET_MODE_NUNITS (mode); | |
3032 | enum machine_mode op0mode = GET_MODE (op0); | |
3033 | unsigned op0_n_elts = GET_MODE_NUNITS (op0mode); | |
3034 | enum machine_mode op1mode = GET_MODE (op1); | |
3035 | unsigned op1_n_elts = GET_MODE_NUNITS (op1mode); | |
3036 | rtvec v = rtvec_alloc (n_elts); | |
3037 | unsigned int i; | |
0cedb36c | 3038 | |
0a67e02c PB |
3039 | gcc_assert (op0_n_elts == n_elts); |
3040 | gcc_assert (op1_n_elts == n_elts); | |
3041 | for (i = 0; i < n_elts; i++) | |
3042 | { | |
3043 | rtx x = simplify_binary_operation (code, GET_MODE_INNER (mode), | |
3044 | CONST_VECTOR_ELT (op0, i), | |
3045 | CONST_VECTOR_ELT (op1, i)); | |
3046 | if (!x) | |
3047 | return 0; | |
3048 | RTVEC_ELT (v, i) = x; | |
3049 | } | |
0cedb36c | 3050 | |
0a67e02c PB |
3051 | return gen_rtx_CONST_VECTOR (mode, v); |
3052 | } | |
0cedb36c | 3053 | |
0a67e02c PB |
3054 | if (VECTOR_MODE_P (mode) |
3055 | && code == VEC_CONCAT | |
d1f0728e UB |
3056 | && (CONST_INT_P (op0) |
3057 | || GET_CODE (op0) == CONST_DOUBLE | |
3058 | || GET_CODE (op0) == CONST_FIXED) | |
3059 | && (CONST_INT_P (op1) | |
3060 | || GET_CODE (op1) == CONST_DOUBLE | |
3061 | || GET_CODE (op1) == CONST_FIXED)) | |
0a67e02c PB |
3062 | { |
3063 | unsigned n_elts = GET_MODE_NUNITS (mode); | |
3064 | rtvec v = rtvec_alloc (n_elts); | |
0cedb36c | 3065 | |
0a67e02c PB |
3066 | gcc_assert (n_elts >= 2); |
3067 | if (n_elts == 2) | |
3068 | { | |
3069 | gcc_assert (GET_CODE (op0) != CONST_VECTOR); | |
3070 | gcc_assert (GET_CODE (op1) != CONST_VECTOR); | |
dd61aa98 | 3071 | |
0a67e02c PB |
3072 | RTVEC_ELT (v, 0) = op0; |
3073 | RTVEC_ELT (v, 1) = op1; | |
3074 | } | |
3075 | else | |
3076 | { | |
3077 | unsigned op0_n_elts = GET_MODE_NUNITS (GET_MODE (op0)); | |
3078 | unsigned op1_n_elts = GET_MODE_NUNITS (GET_MODE (op1)); | |
3079 | unsigned i; | |
0cedb36c | 3080 | |
0a67e02c PB |
3081 | gcc_assert (GET_CODE (op0) == CONST_VECTOR); |
3082 | gcc_assert (GET_CODE (op1) == CONST_VECTOR); | |
3083 | gcc_assert (op0_n_elts + op1_n_elts == n_elts); | |
0cedb36c | 3084 | |
0a67e02c PB |
3085 | for (i = 0; i < op0_n_elts; ++i) |
3086 | RTVEC_ELT (v, i) = XVECEXP (op0, 0, i); | |
3087 | for (i = 0; i < op1_n_elts; ++i) | |
3088 | RTVEC_ELT (v, op0_n_elts+i) = XVECEXP (op1, 0, i); | |
3089 | } | |
0b24db88 | 3090 | |
0a67e02c PB |
3091 | return gen_rtx_CONST_VECTOR (mode, v); |
3092 | } | |
0cedb36c | 3093 | |
3d8bf70f | 3094 | if (SCALAR_FLOAT_MODE_P (mode) |
0a67e02c PB |
3095 | && GET_CODE (op0) == CONST_DOUBLE |
3096 | && GET_CODE (op1) == CONST_DOUBLE | |
3097 | && mode == GET_MODE (op0) && mode == GET_MODE (op1)) | |
3098 | { | |
3099 | if (code == AND | |
3100 | || code == IOR | |
3101 | || code == XOR) | |
3102 | { | |
3103 | long tmp0[4]; | |
3104 | long tmp1[4]; | |
3105 | REAL_VALUE_TYPE r; | |
3106 | int i; | |
a0ee8b5f | 3107 | |
0a67e02c PB |
3108 | real_to_target (tmp0, CONST_DOUBLE_REAL_VALUE (op0), |
3109 | GET_MODE (op0)); | |
3110 | real_to_target (tmp1, CONST_DOUBLE_REAL_VALUE (op1), | |
3111 | GET_MODE (op1)); | |
3112 | for (i = 0; i < 4; i++) | |
a0ee8b5f | 3113 | { |
0a67e02c PB |
3114 | switch (code) |
3115 | { | |
3116 | case AND: | |
3117 | tmp0[i] &= tmp1[i]; | |
3118 | break; | |
3119 | case IOR: | |
3120 | tmp0[i] |= tmp1[i]; | |
3121 | break; | |
3122 | case XOR: | |
3123 | tmp0[i] ^= tmp1[i]; | |
3124 | break; | |
3125 | default: | |
3126 | gcc_unreachable (); | |
3127 | } | |
a0ee8b5f | 3128 | } |
0a67e02c PB |
3129 | real_from_target (&r, tmp0, mode); |
3130 | return CONST_DOUBLE_FROM_REAL_VALUE (r, mode); | |
3131 | } | |
3132 | else | |
3133 | { | |
3134 | REAL_VALUE_TYPE f0, f1, value, result; | |
3135 | bool inexact; | |
a0ee8b5f | 3136 | |
0a67e02c PB |
3137 | REAL_VALUE_FROM_CONST_DOUBLE (f0, op0); |
3138 | REAL_VALUE_FROM_CONST_DOUBLE (f1, op1); | |
3139 | real_convert (&f0, mode, &f0); | |
3140 | real_convert (&f1, mode, &f1); | |
df62f18a | 3141 | |
0a67e02c PB |
3142 | if (HONOR_SNANS (mode) |
3143 | && (REAL_VALUE_ISNAN (f0) || REAL_VALUE_ISNAN (f1))) | |
3144 | return 0; | |
0cedb36c | 3145 | |
0a67e02c PB |
3146 | if (code == DIV |
3147 | && REAL_VALUES_EQUAL (f1, dconst0) | |
3148 | && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode))) | |
3149 | return 0; | |
0cedb36c | 3150 | |
0a67e02c PB |
3151 | if (MODE_HAS_INFINITIES (mode) && HONOR_NANS (mode) |
3152 | && flag_trapping_math | |
3153 | && REAL_VALUE_ISINF (f0) && REAL_VALUE_ISINF (f1)) | |
0cedb36c | 3154 | { |
0a67e02c PB |
3155 | int s0 = REAL_VALUE_NEGATIVE (f0); |
3156 | int s1 = REAL_VALUE_NEGATIVE (f1); | |
0cedb36c | 3157 | |
0a67e02c | 3158 | switch (code) |
1e9b78b0 | 3159 | { |
0a67e02c PB |
3160 | case PLUS: |
3161 | /* Inf + -Inf = NaN plus exception. */ | |
3162 | if (s0 != s1) | |
3163 | return 0; | |
3164 | break; | |
3165 | case MINUS: | |
3166 | /* Inf - Inf = NaN plus exception. */ | |
3167 | if (s0 == s1) | |
3168 | return 0; | |
3169 | break; | |
3170 | case DIV: | |
3171 | /* Inf / Inf = NaN plus exception. */ | |
3172 | return 0; | |
3173 | default: | |
3174 | break; | |
0cedb36c JL |
3175 | } |
3176 | } | |
0cedb36c | 3177 | |
0a67e02c PB |
3178 | if (code == MULT && MODE_HAS_INFINITIES (mode) && HONOR_NANS (mode) |
3179 | && flag_trapping_math | |
3180 | && ((REAL_VALUE_ISINF (f0) && REAL_VALUES_EQUAL (f1, dconst0)) | |
3181 | || (REAL_VALUE_ISINF (f1) | |
3182 | && REAL_VALUES_EQUAL (f0, dconst0)))) | |
3183 | /* Inf * 0 = NaN plus exception. */ | |
3184 | return 0; | |
852c8ba1 | 3185 | |
0a67e02c PB |
3186 | inexact = real_arithmetic (&value, rtx_to_tree_code (code), |
3187 | &f0, &f1); | |
3188 | real_convert (&result, mode, &value); | |
41374e13 | 3189 | |
68328cda EB |
3190 | /* Don't constant fold this floating point operation if |
3191 | the result has overflowed and flag_trapping_math. */ | |
3192 | ||
3193 | if (flag_trapping_math | |
3194 | && MODE_HAS_INFINITIES (mode) | |
3195 | && REAL_VALUE_ISINF (result) | |
3196 | && !REAL_VALUE_ISINF (f0) | |
3197 | && !REAL_VALUE_ISINF (f1)) | |
3198 | /* Overflow plus exception. */ | |
3199 | return 0; | |
3200 | ||
0a67e02c PB |
3201 | /* Don't constant fold this floating point operation if the |
3202 | result may dependent upon the run-time rounding mode and | |
3203 | flag_rounding_math is set, or if GCC's software emulation | |
3204 | is unable to accurately represent the result. */ | |
852c8ba1 | 3205 | |
0a67e02c | 3206 | if ((flag_rounding_math |
4099e2c2 | 3207 | || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations)) |
0a67e02c PB |
3208 | && (inexact || !real_identical (&result, &value))) |
3209 | return NULL_RTX; | |
d9deed68 | 3210 | |
0a67e02c | 3211 | return CONST_DOUBLE_FROM_REAL_VALUE (result, mode); |
0cedb36c | 3212 | } |
0cedb36c JL |
3213 | } |
3214 | ||
0a67e02c PB |
3215 | /* We can fold some multi-word operations. */ |
3216 | if (GET_MODE_CLASS (mode) == MODE_INT | |
3217 | && width == HOST_BITS_PER_WIDE_INT * 2 | |
481683e1 SZ |
3218 | && (GET_CODE (op0) == CONST_DOUBLE || CONST_INT_P (op0)) |
3219 | && (GET_CODE (op1) == CONST_DOUBLE || CONST_INT_P (op1))) | |
0a67e02c PB |
3220 | { |
3221 | unsigned HOST_WIDE_INT l1, l2, lv, lt; | |
3222 | HOST_WIDE_INT h1, h2, hv, ht; | |
0cedb36c | 3223 | |
0a67e02c PB |
3224 | if (GET_CODE (op0) == CONST_DOUBLE) |
3225 | l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0); | |
3226 | else | |
3227 | l1 = INTVAL (op0), h1 = HWI_SIGN_EXTEND (l1); | |
0cedb36c | 3228 | |
0a67e02c PB |
3229 | if (GET_CODE (op1) == CONST_DOUBLE) |
3230 | l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1); | |
3231 | else | |
3232 | l2 = INTVAL (op1), h2 = HWI_SIGN_EXTEND (l2); | |
0cedb36c | 3233 | |
0a67e02c PB |
3234 | switch (code) |
3235 | { | |
3236 | case MINUS: | |
3237 | /* A - B == A + (-B). */ | |
3238 | neg_double (l2, h2, &lv, &hv); | |
3239 | l2 = lv, h2 = hv; | |
0cedb36c | 3240 | |
0a67e02c | 3241 | /* Fall through.... */ |
0cedb36c | 3242 | |
0a67e02c PB |
3243 | case PLUS: |
3244 | add_double (l1, h1, l2, h2, &lv, &hv); | |
3245 | break; | |
0cedb36c | 3246 | |
0a67e02c PB |
3247 | case MULT: |
3248 | mul_double (l1, h1, l2, h2, &lv, &hv); | |
3249 | break; | |
0cedb36c | 3250 | |
0a67e02c PB |
3251 | case DIV: |
3252 | if (div_and_round_double (TRUNC_DIV_EXPR, 0, l1, h1, l2, h2, | |
3253 | &lv, &hv, <, &ht)) | |
3254 | return 0; | |
3255 | break; | |
0cedb36c | 3256 | |
0a67e02c PB |
3257 | case MOD: |
3258 | if (div_and_round_double (TRUNC_DIV_EXPR, 0, l1, h1, l2, h2, | |
3259 | <, &ht, &lv, &hv)) | |
3260 | return 0; | |
3261 | break; | |
0cedb36c | 3262 | |
0a67e02c PB |
3263 | case UDIV: |
3264 | if (div_and_round_double (TRUNC_DIV_EXPR, 1, l1, h1, l2, h2, | |
3265 | &lv, &hv, <, &ht)) | |
3266 | return 0; | |
3267 | break; | |
0cedb36c | 3268 | |
0a67e02c PB |
3269 | case UMOD: |
3270 | if (div_and_round_double (TRUNC_DIV_EXPR, 1, l1, h1, l2, h2, | |
3271 | <, &ht, &lv, &hv)) | |
3272 | return 0; | |
3273 | break; | |
0cedb36c | 3274 | |
0a67e02c PB |
3275 | case AND: |
3276 | lv = l1 & l2, hv = h1 & h2; | |
3277 | break; | |
0cedb36c | 3278 | |
0a67e02c PB |
3279 | case IOR: |
3280 | lv = l1 | l2, hv = h1 | h2; | |
3281 | break; | |
0cedb36c | 3282 | |
0a67e02c PB |
3283 | case XOR: |
3284 | lv = l1 ^ l2, hv = h1 ^ h2; | |
3285 | break; | |
0cedb36c | 3286 | |
0a67e02c PB |
3287 | case SMIN: |
3288 | if (h1 < h2 | |
3289 | || (h1 == h2 | |
3290 | && ((unsigned HOST_WIDE_INT) l1 | |
3291 | < (unsigned HOST_WIDE_INT) l2))) | |
3292 | lv = l1, hv = h1; | |
3293 | else | |
3294 | lv = l2, hv = h2; | |
3295 | break; | |
0cedb36c | 3296 | |
0a67e02c PB |
3297 | case SMAX: |
3298 | if (h1 > h2 | |
3299 | || (h1 == h2 | |
3300 | && ((unsigned HOST_WIDE_INT) l1 | |
3301 | > (unsigned HOST_WIDE_INT) l2))) | |
3302 | lv = l1, hv = h1; | |
3303 | else | |
3304 | lv = l2, hv = h2; | |
3305 | break; | |
0cedb36c | 3306 | |
0a67e02c PB |
3307 | case UMIN: |
3308 | if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2 | |
3309 | || (h1 == h2 | |
3310 | && ((unsigned HOST_WIDE_INT) l1 | |
3311 | < (unsigned HOST_WIDE_INT) l2))) | |
3312 | lv = l1, hv = h1; | |
3313 | else | |
3314 | lv = l2, hv = h2; | |
3315 | break; | |
0cedb36c | 3316 | |
0a67e02c PB |
3317 | case UMAX: |
3318 | if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2 | |
3319 | || (h1 == h2 | |
3320 | && ((unsigned HOST_WIDE_INT) l1 | |
3321 | > (unsigned HOST_WIDE_INT) l2))) | |
3322 | lv = l1, hv = h1; | |
3323 | else | |
3324 | lv = l2, hv = h2; | |
3325 | break; | |
0cedb36c | 3326 | |
0a67e02c PB |
3327 | case LSHIFTRT: case ASHIFTRT: |
3328 | case ASHIFT: | |
3329 | case ROTATE: case ROTATERT: | |
3330 | if (SHIFT_COUNT_TRUNCATED) | |
3331 | l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0; | |
0cedb36c | 3332 | |
0a67e02c PB |
3333 | if (h2 != 0 || l2 >= GET_MODE_BITSIZE (mode)) |
3334 | return 0; | |
0cedb36c | 3335 | |
0a67e02c PB |
3336 | if (code == LSHIFTRT || code == ASHIFTRT) |
3337 | rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, | |
3338 | code == ASHIFTRT); | |
3339 | else if (code == ASHIFT) | |
3340 | lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1); | |
3341 | else if (code == ROTATE) | |
3342 | lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv); | |
3343 | else /* code == ROTATERT */ | |
3344 | rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv); | |
3345 | break; | |
0cedb36c | 3346 | |
0a67e02c PB |
3347 | default: |
3348 | return 0; | |
3349 | } | |
0cedb36c | 3350 | |
0a67e02c PB |
3351 | return immed_double_const (lv, hv, mode); |
3352 | } | |
0cedb36c | 3353 | |
481683e1 | 3354 | if (CONST_INT_P (op0) && CONST_INT_P (op1) |
0a67e02c PB |
3355 | && width <= HOST_BITS_PER_WIDE_INT && width != 0) |
3356 | { | |
3357 | /* Get the integer argument values in two forms: | |
3358 | zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */ | |
0cedb36c | 3359 | |
0a67e02c PB |
3360 | arg0 = INTVAL (op0); |
3361 | arg1 = INTVAL (op1); | |
0cedb36c | 3362 | |
0a67e02c PB |
3363 | if (width < HOST_BITS_PER_WIDE_INT) |
3364 | { | |
3365 | arg0 &= ((HOST_WIDE_INT) 1 << width) - 1; | |
3366 | arg1 &= ((HOST_WIDE_INT) 1 << width) - 1; | |
0cedb36c | 3367 | |
0a67e02c PB |
3368 | arg0s = arg0; |
3369 | if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1))) | |
3370 | arg0s |= ((HOST_WIDE_INT) (-1) << width); | |
4f5c0f7e | 3371 | |
0a67e02c PB |
3372 | arg1s = arg1; |
3373 | if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1))) | |
3374 | arg1s |= ((HOST_WIDE_INT) (-1) << width); | |
3375 | } | |
3376 | else | |
3377 | { | |
3378 | arg0s = arg0; | |
3379 | arg1s = arg1; | |
3380 | } | |
3381 | ||
3382 | /* Compute the value of the arithmetic. */ | |
3383 | ||
3384 | switch (code) | |
3385 | { | |
3386 | case PLUS: | |
3387 | val = arg0s + arg1s; | |
3388 | break; | |
3389 | ||
3390 | case MINUS: | |
3391 | val = arg0s - arg1s; | |
3392 | break; | |
3393 | ||
3394 | case MULT: | |
3395 | val = arg0s * arg1s; | |
3396 | break; | |
3397 | ||
3398 | case DIV: | |
3399 | if (arg1s == 0 | |
3400 | || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1) | |
3401 | && arg1s == -1)) | |
3402 | return 0; | |
3403 | val = arg0s / arg1s; | |
3404 | break; | |
3405 | ||
3406 | case MOD: | |
3407 | if (arg1s == 0 | |
3408 | || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1) | |
3409 | && arg1s == -1)) | |
3410 | return 0; | |
3411 | val = arg0s % arg1s; | |
3412 | break; | |
3413 | ||
3414 | case UDIV: | |
3415 | if (arg1 == 0 | |
3416 | || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1) | |
3417 | && arg1s == -1)) | |
3418 | return 0; | |
3419 | val = (unsigned HOST_WIDE_INT) arg0 / arg1; | |
3420 | break; | |
3421 | ||
3422 | case UMOD: | |
3423 | if (arg1 == 0 | |
3424 | || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1) | |
3425 | && arg1s == -1)) | |
3426 | return 0; | |
3427 | val = (unsigned HOST_WIDE_INT) arg0 % arg1; | |
3428 | break; | |
3429 | ||
3430 | case AND: | |
3431 | val = arg0 & arg1; | |
3432 | break; | |
3433 | ||
3434 | case IOR: | |
3435 | val = arg0 | arg1; | |
3436 | break; | |
3437 | ||
3438 | case XOR: | |
3439 | val = arg0 ^ arg1; | |
3440 | break; | |
3441 | ||
3442 | case LSHIFTRT: | |
3443 | case ASHIFT: | |
3444 | case ASHIFTRT: | |
3445 | /* Truncate the shift if SHIFT_COUNT_TRUNCATED, otherwise make sure | |
3446 | the value is in range. We can't return any old value for | |
3447 | out-of-range arguments because either the middle-end (via | |
3448 | shift_truncation_mask) or the back-end might be relying on | |
3449 | target-specific knowledge. Nor can we rely on | |
3450 | shift_truncation_mask, since the shift might not be part of an | |
3451 | ashlM3, lshrM3 or ashrM3 instruction. */ | |
3452 | if (SHIFT_COUNT_TRUNCATED) | |
3453 | arg1 = (unsigned HOST_WIDE_INT) arg1 % width; | |
3454 | else if (arg1 < 0 || arg1 >= GET_MODE_BITSIZE (mode)) | |
3455 | return 0; | |
3456 | ||
3457 | val = (code == ASHIFT | |
3458 | ? ((unsigned HOST_WIDE_INT) arg0) << arg1 | |
3459 | : ((unsigned HOST_WIDE_INT) arg0) >> arg1); | |
3460 | ||
3461 | /* Sign-extend the result for arithmetic right shifts. */ | |
3462 | if (code == ASHIFTRT && arg0s < 0 && arg1 > 0) | |
3463 | val |= ((HOST_WIDE_INT) -1) << (width - arg1); | |
3464 | break; | |
3465 | ||
3466 | case ROTATERT: | |
3467 | if (arg1 < 0) | |
3468 | return 0; | |
3469 | ||
3470 | arg1 %= width; | |
3471 | val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1)) | |
3472 | | (((unsigned HOST_WIDE_INT) arg0) >> arg1)); | |
3473 | break; | |
3474 | ||
3475 | case ROTATE: | |
3476 | if (arg1 < 0) | |
3477 | return 0; | |
3478 | ||
3479 | arg1 %= width; | |
3480 | val = ((((unsigned HOST_WIDE_INT) arg0) << arg1) | |
3481 | | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1))); | |
3482 | break; | |
3483 | ||
3484 | case COMPARE: | |
3485 | /* Do nothing here. */ | |
3486 | return 0; | |
3487 | ||
3488 | case SMIN: | |
3489 | val = arg0s <= arg1s ? arg0s : arg1s; | |
3490 | break; | |
3491 | ||
3492 | case UMIN: | |
3493 | val = ((unsigned HOST_WIDE_INT) arg0 | |
3494 | <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1); | |
3495 | break; | |
3496 | ||
3497 | case SMAX: | |
3498 | val = arg0s > arg1s ? arg0s : arg1s; | |
3499 | break; | |
3500 | ||
3501 | case UMAX: | |
3502 | val = ((unsigned HOST_WIDE_INT) arg0 | |
3503 | > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1); | |
3504 | break; | |
3505 | ||
3506 | case SS_PLUS: | |
3507 | case US_PLUS: | |
3508 | case SS_MINUS: | |
3509 | case US_MINUS: | |
14c931f1 CF |
3510 | case SS_MULT: |
3511 | case US_MULT: | |
3512 | case SS_DIV: | |
3513 | case US_DIV: | |
e551ad26 | 3514 | case SS_ASHIFT: |
14c931f1 | 3515 | case US_ASHIFT: |
0a67e02c PB |
3516 | /* ??? There are simplifications that can be done. */ |
3517 | return 0; | |
3518 | ||
3519 | default: | |
3520 | gcc_unreachable (); | |
3521 | } | |
0cedb36c | 3522 | |
bb80db7b | 3523 | return gen_int_mode (val, mode); |
0a67e02c | 3524 | } |
0cedb36c | 3525 | |
0a67e02c | 3526 | return NULL_RTX; |
0cedb36c | 3527 | } |
0a67e02c PB |
3528 | |
3529 | ||
0cedb36c JL |
3530 | \f |
3531 | /* Simplify a PLUS or MINUS, at least one of whose operands may be another | |
3532 | PLUS or MINUS. | |
3533 | ||
3534 | Rather than test for specific case, we do this by a brute-force method | |
3535 | and do all possible simplifications until no more changes occur. Then | |
1941069a | 3536 | we rebuild the operation. */ |
0cedb36c | 3537 | |
9b3bd424 RH |
3538 | struct simplify_plus_minus_op_data |
3539 | { | |
3540 | rtx op; | |
f805670f | 3541 | short neg; |
9b3bd424 RH |
3542 | }; |
3543 | ||
7e0b4eae PB |
3544 | static bool |
3545 | simplify_plus_minus_op_data_cmp (rtx x, rtx y) | |
9b3bd424 | 3546 | { |
f805670f | 3547 | int result; |
9b3bd424 | 3548 | |
7e0b4eae PB |
3549 | result = (commutative_operand_precedence (y) |
3550 | - commutative_operand_precedence (x)); | |
f805670f | 3551 | if (result) |
7e0b4eae | 3552 | return result > 0; |
d26cef13 PB |
3553 | |
3554 | /* Group together equal REGs to do more simplification. */ | |
7e0b4eae PB |
3555 | if (REG_P (x) && REG_P (y)) |
3556 | return REGNO (x) > REGNO (y); | |
d26cef13 | 3557 | else |
7e0b4eae | 3558 | return false; |
9b3bd424 RH |
3559 | } |
3560 | ||
0cedb36c | 3561 | static rtx |
46c5ad27 | 3562 | simplify_plus_minus (enum rtx_code code, enum machine_mode mode, rtx op0, |
1941069a | 3563 | rtx op1) |
0cedb36c | 3564 | { |
9b3bd424 | 3565 | struct simplify_plus_minus_op_data ops[8]; |
0cedb36c | 3566 | rtx result, tem; |
36686ad6 | 3567 | int n_ops = 2, input_ops = 2; |
d26cef13 | 3568 | int changed, n_constants = 0, canonicalized = 0; |
0cedb36c JL |
3569 | int i, j; |
3570 | ||
703ad42b | 3571 | memset (ops, 0, sizeof ops); |
786de7eb | 3572 | |
0cedb36c JL |
3573 | /* Set up the two operands and then expand them until nothing has been |
3574 | changed. If we run out of room in our array, give up; this should | |
3575 | almost never happen. */ | |
3576 | ||
9b3bd424 RH |
3577 | ops[0].op = op0; |
3578 | ops[0].neg = 0; | |
3579 | ops[1].op = op1; | |
3580 | ops[1].neg = (code == MINUS); | |
0cedb36c | 3581 | |
9b3bd424 | 3582 | do |
0cedb36c JL |
3583 | { |
3584 | changed = 0; | |
3585 | ||
3586 | for (i = 0; i < n_ops; i++) | |
9b3bd424 RH |
3587 | { |
3588 | rtx this_op = ops[i].op; | |
3589 | int this_neg = ops[i].neg; | |
3590 | enum rtx_code this_code = GET_CODE (this_op); | |
0cedb36c | 3591 | |
9b3bd424 RH |
3592 | switch (this_code) |
3593 | { | |
3594 | case PLUS: | |
3595 | case MINUS: | |
3596 | if (n_ops == 7) | |
e16e3291 | 3597 | return NULL_RTX; |
0cedb36c | 3598 | |
9b3bd424 RH |
3599 | ops[n_ops].op = XEXP (this_op, 1); |
3600 | ops[n_ops].neg = (this_code == MINUS) ^ this_neg; | |
3601 | n_ops++; | |
3602 | ||
3603 | ops[i].op = XEXP (this_op, 0); | |
3604 | input_ops++; | |
3605 | changed = 1; | |
1941069a | 3606 | canonicalized |= this_neg; |
9b3bd424 RH |
3607 | break; |
3608 | ||
3609 | case NEG: | |
3610 | ops[i].op = XEXP (this_op, 0); | |
3611 | ops[i].neg = ! this_neg; | |
3612 | changed = 1; | |
1941069a | 3613 | canonicalized = 1; |
9b3bd424 RH |
3614 | break; |
3615 | ||
3616 | case CONST: | |
e3c8ea67 RH |
3617 | if (n_ops < 7 |
3618 | && GET_CODE (XEXP (this_op, 0)) == PLUS | |
3619 | && CONSTANT_P (XEXP (XEXP (this_op, 0), 0)) | |
3620 | && CONSTANT_P (XEXP (XEXP (this_op, 0), 1))) | |
3621 | { | |
3622 | ops[i].op = XEXP (XEXP (this_op, 0), 0); | |
3623 | ops[n_ops].op = XEXP (XEXP (this_op, 0), 1); | |
3624 | ops[n_ops].neg = this_neg; | |
3625 | n_ops++; | |
e3c8ea67 | 3626 | changed = 1; |
1941069a | 3627 | canonicalized = 1; |
e3c8ea67 | 3628 | } |
9b3bd424 RH |
3629 | break; |
3630 | ||
3631 | case NOT: | |
3632 | /* ~a -> (-a - 1) */ | |
3633 | if (n_ops != 7) | |
3634 | { | |
3635 | ops[n_ops].op = constm1_rtx; | |
2e951384 | 3636 | ops[n_ops++].neg = this_neg; |
9b3bd424 RH |
3637 | ops[i].op = XEXP (this_op, 0); |
3638 | ops[i].neg = !this_neg; | |
3639 | changed = 1; | |
1941069a | 3640 | canonicalized = 1; |
9b3bd424 RH |
3641 | } |
3642 | break; | |
0cedb36c | 3643 | |
9b3bd424 | 3644 | case CONST_INT: |
d26cef13 | 3645 | n_constants++; |
9b3bd424 RH |
3646 | if (this_neg) |
3647 | { | |
aff8a8d5 | 3648 | ops[i].op = neg_const_int (mode, this_op); |
9b3bd424 RH |
3649 | ops[i].neg = 0; |
3650 | changed = 1; | |
1941069a | 3651 | canonicalized = 1; |
9b3bd424 RH |
3652 | } |
3653 | break; | |
0cedb36c | 3654 | |
9b3bd424 RH |
3655 | default: |
3656 | break; | |
3657 | } | |
3658 | } | |
0cedb36c | 3659 | } |
9b3bd424 | 3660 | while (changed); |
0cedb36c | 3661 | |
d26cef13 PB |
3662 | if (n_constants > 1) |
3663 | canonicalized = 1; | |
36686ad6 | 3664 | |
d26cef13 | 3665 | gcc_assert (n_ops >= 2); |
0cedb36c | 3666 | |
1941069a PB |
3667 | /* If we only have two operands, we can avoid the loops. */ |
3668 | if (n_ops == 2) | |
3669 | { | |
3670 | enum rtx_code code = ops[0].neg || ops[1].neg ? MINUS : PLUS; | |
3671 | rtx lhs, rhs; | |
3672 | ||
3673 | /* Get the two operands. Be careful with the order, especially for | |
3674 | the cases where code == MINUS. */ | |
3675 | if (ops[0].neg && ops[1].neg) | |
3676 | { | |
3677 | lhs = gen_rtx_NEG (mode, ops[0].op); | |
3678 | rhs = ops[1].op; | |
3679 | } | |
3680 | else if (ops[0].neg) | |
3681 | { | |
3682 | lhs = ops[1].op; | |
3683 | rhs = ops[0].op; | |
3684 | } | |
3685 | else | |
3686 | { | |
3687 | lhs = ops[0].op; | |
3688 | rhs = ops[1].op; | |
3689 | } | |
3690 | ||
3691 | return simplify_const_binary_operation (code, mode, lhs, rhs); | |
3692 | } | |
3693 | ||
d26cef13 | 3694 | /* Now simplify each pair of operands until nothing changes. */ |
9b3bd424 | 3695 | do |
0cedb36c | 3696 | { |
d26cef13 PB |
3697 | /* Insertion sort is good enough for an eight-element array. */ |
3698 | for (i = 1; i < n_ops; i++) | |
3699 | { | |
3700 | struct simplify_plus_minus_op_data save; | |
3701 | j = i - 1; | |
7e0b4eae | 3702 | if (!simplify_plus_minus_op_data_cmp (ops[j].op, ops[i].op)) |
d26cef13 PB |
3703 | continue; |
3704 | ||
3705 | canonicalized = 1; | |
3706 | save = ops[i]; | |
3707 | do | |
3708 | ops[j + 1] = ops[j]; | |
7e0b4eae | 3709 | while (j-- && simplify_plus_minus_op_data_cmp (ops[j].op, save.op)); |
d26cef13 PB |
3710 | ops[j + 1] = save; |
3711 | } | |
0cedb36c | 3712 | |
d26cef13 PB |
3713 | changed = 0; |
3714 | for (i = n_ops - 1; i > 0; i--) | |
3715 | for (j = i - 1; j >= 0; j--) | |
9b3bd424 | 3716 | { |
d26cef13 PB |
3717 | rtx lhs = ops[j].op, rhs = ops[i].op; |
3718 | int lneg = ops[j].neg, rneg = ops[i].neg; | |
0cedb36c | 3719 | |
d26cef13 | 3720 | if (lhs != 0 && rhs != 0) |
9b3bd424 RH |
3721 | { |
3722 | enum rtx_code ncode = PLUS; | |
3723 | ||
3724 | if (lneg != rneg) | |
3725 | { | |
3726 | ncode = MINUS; | |
3727 | if (lneg) | |
3728 | tem = lhs, lhs = rhs, rhs = tem; | |
3729 | } | |
3730 | else if (swap_commutative_operands_p (lhs, rhs)) | |
3731 | tem = lhs, lhs = rhs, rhs = tem; | |
3732 | ||
481683e1 SZ |
3733 | if ((GET_CODE (lhs) == CONST || CONST_INT_P (lhs)) |
3734 | && (GET_CODE (rhs) == CONST || CONST_INT_P (rhs))) | |
349f4ea1 AK |
3735 | { |
3736 | rtx tem_lhs, tem_rhs; | |
3737 | ||
3738 | tem_lhs = GET_CODE (lhs) == CONST ? XEXP (lhs, 0) : lhs; | |
3739 | tem_rhs = GET_CODE (rhs) == CONST ? XEXP (rhs, 0) : rhs; | |
3740 | tem = simplify_binary_operation (ncode, mode, tem_lhs, tem_rhs); | |
9b3bd424 | 3741 | |
349f4ea1 AK |
3742 | if (tem && !CONSTANT_P (tem)) |
3743 | tem = gen_rtx_CONST (GET_MODE (tem), tem); | |
3744 | } | |
3745 | else | |
3746 | tem = simplify_binary_operation (ncode, mode, lhs, rhs); | |
3747 | ||
786de7eb | 3748 | /* Reject "simplifications" that just wrap the two |
9b3bd424 RH |
3749 | arguments in a CONST. Failure to do so can result |
3750 | in infinite recursion with simplify_binary_operation | |
3751 | when it calls us to simplify CONST operations. */ | |
3752 | if (tem | |
3753 | && ! (GET_CODE (tem) == CONST | |
3754 | && GET_CODE (XEXP (tem, 0)) == ncode | |
3755 | && XEXP (XEXP (tem, 0), 0) == lhs | |
d26cef13 | 3756 | && XEXP (XEXP (tem, 0), 1) == rhs)) |
9b3bd424 RH |
3757 | { |
3758 | lneg &= rneg; | |
3759 | if (GET_CODE (tem) == NEG) | |
3760 | tem = XEXP (tem, 0), lneg = !lneg; | |
481683e1 | 3761 | if (CONST_INT_P (tem) && lneg) |
aff8a8d5 | 3762 | tem = neg_const_int (mode, tem), lneg = 0; |
9b3bd424 RH |
3763 | |
3764 | ops[i].op = tem; | |
3765 | ops[i].neg = lneg; | |
3766 | ops[j].op = NULL_RTX; | |
3767 | changed = 1; | |
dc5b3407 | 3768 | canonicalized = 1; |
9b3bd424 RH |
3769 | } |
3770 | } | |
3771 | } | |
0cedb36c | 3772 | |
dc5b3407 ZD |
3773 | /* If nothing changed, fail. */ |
3774 | if (!canonicalized) | |
3775 | return NULL_RTX; | |
3776 | ||
d26cef13 PB |
3777 | /* Pack all the operands to the lower-numbered entries. */ |
3778 | for (i = 0, j = 0; j < n_ops; j++) | |
3779 | if (ops[j].op) | |
3780 | { | |
3781 | ops[i] = ops[j]; | |
3782 | i++; | |
3783 | } | |
3784 | n_ops = i; | |
0cedb36c | 3785 | } |
9b3bd424 | 3786 | while (changed); |
0cedb36c | 3787 | |
c877353c RS |
3788 | /* Create (minus -C X) instead of (neg (const (plus X C))). */ |
3789 | if (n_ops == 2 | |
481683e1 | 3790 | && CONST_INT_P (ops[1].op) |
c877353c RS |
3791 | && CONSTANT_P (ops[0].op) |
3792 | && ops[0].neg) | |
3793 | return gen_rtx_fmt_ee (MINUS, mode, ops[1].op, ops[0].op); | |
3794 | ||
9b3bd424 RH |
3795 | /* We suppressed creation of trivial CONST expressions in the |
3796 | combination loop to avoid recursion. Create one manually now. | |
3797 | The combination loop should have ensured that there is exactly | |
3798 | one CONST_INT, and the sort will have ensured that it is last | |
3799 | in the array and that any other constant will be next-to-last. */ | |
0cedb36c | 3800 | |
9b3bd424 | 3801 | if (n_ops > 1 |
481683e1 | 3802 | && CONST_INT_P (ops[n_ops - 1].op) |
9b3bd424 RH |
3803 | && CONSTANT_P (ops[n_ops - 2].op)) |
3804 | { | |
aff8a8d5 | 3805 | rtx value = ops[n_ops - 1].op; |
4768dbdd | 3806 | if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg) |
aff8a8d5 CM |
3807 | value = neg_const_int (mode, value); |
3808 | ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value)); | |
9b3bd424 RH |
3809 | n_ops--; |
3810 | } | |
3811 | ||
0786ca87 | 3812 | /* Put a non-negated operand first, if possible. */ |
0cedb36c | 3813 | |
9b3bd424 RH |
3814 | for (i = 0; i < n_ops && ops[i].neg; i++) |
3815 | continue; | |
0cedb36c | 3816 | if (i == n_ops) |
0786ca87 | 3817 | ops[0].op = gen_rtx_NEG (mode, ops[0].op); |
0cedb36c JL |
3818 | else if (i != 0) |
3819 | { | |
9b3bd424 RH |
3820 | tem = ops[0].op; |
3821 | ops[0] = ops[i]; | |
3822 | ops[i].op = tem; | |
3823 | ops[i].neg = 1; | |
0cedb36c JL |
3824 | } |
3825 | ||
3826 | /* Now make the result by performing the requested operations. */ | |
9b3bd424 | 3827 | result = ops[0].op; |
0cedb36c | 3828 | for (i = 1; i < n_ops; i++) |
9b3bd424 RH |
3829 | result = gen_rtx_fmt_ee (ops[i].neg ? MINUS : PLUS, |
3830 | mode, result, ops[i].op); | |
0cedb36c | 3831 | |
0786ca87 | 3832 | return result; |
0cedb36c JL |
3833 | } |
3834 | ||
5ac20c1a RS |
3835 | /* Check whether an operand is suitable for calling simplify_plus_minus. */ |
3836 | static bool | |
f7d504c2 | 3837 | plus_minus_operand_p (const_rtx x) |
5ac20c1a RS |
3838 | { |
3839 | return GET_CODE (x) == PLUS | |
3840 | || GET_CODE (x) == MINUS | |
3841 | || (GET_CODE (x) == CONST | |
3842 | && GET_CODE (XEXP (x, 0)) == PLUS | |
3843 | && CONSTANT_P (XEXP (XEXP (x, 0), 0)) | |
3844 | && CONSTANT_P (XEXP (XEXP (x, 0), 1))); | |
3845 | } | |
3846 | ||
0cedb36c | 3847 | /* Like simplify_binary_operation except used for relational operators. |
c6fb08ad | 3848 | MODE is the mode of the result. If MODE is VOIDmode, both operands must |
fc7ca5fd | 3849 | not also be VOIDmode. |
c6fb08ad PB |
3850 | |
3851 | CMP_MODE specifies in which mode the comparison is done in, so it is | |
3852 | the mode of the operands. If CMP_MODE is VOIDmode, it is taken from | |
3853 | the operands or, if both are VOIDmode, the operands are compared in | |
3854 | "infinite precision". */ | |
3855 | rtx | |
3856 | simplify_relational_operation (enum rtx_code code, enum machine_mode mode, | |
3857 | enum machine_mode cmp_mode, rtx op0, rtx op1) | |
3858 | { | |
3859 | rtx tem, trueop0, trueop1; | |
3860 | ||
3861 | if (cmp_mode == VOIDmode) | |
3862 | cmp_mode = GET_MODE (op0); | |
3863 | if (cmp_mode == VOIDmode) | |
3864 | cmp_mode = GET_MODE (op1); | |
3865 | ||
3866 | tem = simplify_const_relational_operation (code, cmp_mode, op0, op1); | |
3867 | if (tem) | |
3868 | { | |
3d8bf70f | 3869 | if (SCALAR_FLOAT_MODE_P (mode)) |
c6fb08ad PB |
3870 | { |
3871 | if (tem == const0_rtx) | |
3872 | return CONST0_RTX (mode); | |
fc7ca5fd RS |
3873 | #ifdef FLOAT_STORE_FLAG_VALUE |
3874 | { | |
3875 | REAL_VALUE_TYPE val; | |
3876 | val = FLOAT_STORE_FLAG_VALUE (mode); | |
3877 | return CONST_DOUBLE_FROM_REAL_VALUE (val, mode); | |
3878 | } | |
3879 | #else | |
3880 | return NULL_RTX; | |
3881 | #endif | |
c6fb08ad | 3882 | } |
fc7ca5fd RS |
3883 | if (VECTOR_MODE_P (mode)) |
3884 | { | |
3885 | if (tem == const0_rtx) | |
3886 | return CONST0_RTX (mode); | |
3887 | #ifdef VECTOR_STORE_FLAG_VALUE | |
3888 | { | |
3889 | int i, units; | |
21e5076a | 3890 | rtvec v; |
fc7ca5fd RS |
3891 | |
3892 | rtx val = VECTOR_STORE_FLAG_VALUE (mode); | |
3893 | if (val == NULL_RTX) | |
3894 | return NULL_RTX; | |
3895 | if (val == const1_rtx) | |
3896 | return CONST1_RTX (mode); | |
3897 | ||
3898 | units = GET_MODE_NUNITS (mode); | |
3899 | v = rtvec_alloc (units); | |
3900 | for (i = 0; i < units; i++) | |
3901 | RTVEC_ELT (v, i) = val; | |
3902 | return gen_rtx_raw_CONST_VECTOR (mode, v); | |
3903 | } | |
3904 | #else | |
3905 | return NULL_RTX; | |
c6fb08ad | 3906 | #endif |
fc7ca5fd | 3907 | } |
c6fb08ad PB |
3908 | |
3909 | return tem; | |
3910 | } | |
3911 | ||
3912 | /* For the following tests, ensure const0_rtx is op1. */ | |
3913 | if (swap_commutative_operands_p (op0, op1) | |
3914 | || (op0 == const0_rtx && op1 != const0_rtx)) | |
3915 | tem = op0, op0 = op1, op1 = tem, code = swap_condition (code); | |
3916 | ||
3917 | /* If op0 is a compare, extract the comparison arguments from it. */ | |
3918 | if (GET_CODE (op0) == COMPARE && op1 == const0_rtx) | |
f90b7a5a PB |
3919 | return simplify_gen_relational (code, mode, VOIDmode, |
3920 | XEXP (op0, 0), XEXP (op0, 1)); | |
c6fb08ad | 3921 | |
30a440a7 | 3922 | if (GET_MODE_CLASS (cmp_mode) == MODE_CC |
c6fb08ad PB |
3923 | || CC0_P (op0)) |
3924 | return NULL_RTX; | |
3925 | ||
3926 | trueop0 = avoid_constant_pool_reference (op0); | |
3927 | trueop1 = avoid_constant_pool_reference (op1); | |
3928 | return simplify_relational_operation_1 (code, mode, cmp_mode, | |
3929 | trueop0, trueop1); | |
3930 | } | |
3931 | ||
3932 | /* This part of simplify_relational_operation is only used when CMP_MODE | |
3933 | is not in class MODE_CC (i.e. it is a real comparison). | |
3934 | ||
3935 | MODE is the mode of the result, while CMP_MODE specifies in which | |
3936 | mode the comparison is done in, so it is the mode of the operands. */ | |
bc4ad38c ZD |
3937 | |
3938 | static rtx | |
c6fb08ad PB |
3939 | simplify_relational_operation_1 (enum rtx_code code, enum machine_mode mode, |
3940 | enum machine_mode cmp_mode, rtx op0, rtx op1) | |
3941 | { | |
bc4ad38c ZD |
3942 | enum rtx_code op0code = GET_CODE (op0); |
3943 | ||
3fa0cacd | 3944 | if (op1 == const0_rtx && COMPARISON_P (op0)) |
c6fb08ad | 3945 | { |
3fa0cacd RS |
3946 | /* If op0 is a comparison, extract the comparison arguments |
3947 | from it. */ | |
3948 | if (code == NE) | |
c6fb08ad | 3949 | { |
3fa0cacd RS |
3950 | if (GET_MODE (op0) == mode) |
3951 | return simplify_rtx (op0); | |
3952 | else | |
3953 | return simplify_gen_relational (GET_CODE (op0), mode, VOIDmode, | |
3954 | XEXP (op0, 0), XEXP (op0, 1)); | |
3955 | } | |
3956 | else if (code == EQ) | |
3957 | { | |
3958 | enum rtx_code new_code = reversed_comparison_code (op0, NULL_RTX); | |
3959 | if (new_code != UNKNOWN) | |
3960 | return simplify_gen_relational (new_code, mode, VOIDmode, | |
3961 | XEXP (op0, 0), XEXP (op0, 1)); | |
3962 | } | |
3963 | } | |
3964 | ||
1d1eb80c BS |
3965 | /* (LTU/GEU (PLUS a C) C), where C is constant, can be simplified to |
3966 | (GEU/LTU a -C). Likewise for (LTU/GEU (PLUS a C) a). */ | |
3967 | if ((code == LTU || code == GEU) | |
3968 | && GET_CODE (op0) == PLUS | |
481683e1 | 3969 | && CONST_INT_P (XEXP (op0, 1)) |
1d1eb80c BS |
3970 | && (rtx_equal_p (op1, XEXP (op0, 0)) |
3971 | || rtx_equal_p (op1, XEXP (op0, 1)))) | |
3972 | { | |
3973 | rtx new_cmp | |
3974 | = simplify_gen_unary (NEG, cmp_mode, XEXP (op0, 1), cmp_mode); | |
3975 | return simplify_gen_relational ((code == LTU ? GEU : LTU), mode, | |
3976 | cmp_mode, XEXP (op0, 0), new_cmp); | |
3977 | } | |
3978 | ||
921c4418 RIL |
3979 | /* Canonicalize (LTU/GEU (PLUS a b) b) as (LTU/GEU (PLUS a b) a). */ |
3980 | if ((code == LTU || code == GEU) | |
3981 | && GET_CODE (op0) == PLUS | |
cf369845 HPN |
3982 | && rtx_equal_p (op1, XEXP (op0, 1)) |
3983 | /* Don't recurse "infinitely" for (LTU/GEU (PLUS b b) b). */ | |
3984 | && !rtx_equal_p (op1, XEXP (op0, 0))) | |
921c4418 RIL |
3985 | return simplify_gen_relational (code, mode, cmp_mode, op0, XEXP (op0, 0)); |
3986 | ||
3fa0cacd RS |
3987 | if (op1 == const0_rtx) |
3988 | { | |
3989 | /* Canonicalize (GTU x 0) as (NE x 0). */ | |
3990 | if (code == GTU) | |
3991 | return simplify_gen_relational (NE, mode, cmp_mode, op0, op1); | |
3992 | /* Canonicalize (LEU x 0) as (EQ x 0). */ | |
3993 | if (code == LEU) | |
3994 | return simplify_gen_relational (EQ, mode, cmp_mode, op0, op1); | |
3995 | } | |
3996 | else if (op1 == const1_rtx) | |
3997 | { | |
3998 | switch (code) | |
3999 | { | |
4000 | case GE: | |
4001 | /* Canonicalize (GE x 1) as (GT x 0). */ | |
4002 | return simplify_gen_relational (GT, mode, cmp_mode, | |
4003 | op0, const0_rtx); | |
4004 | case GEU: | |
4005 | /* Canonicalize (GEU x 1) as (NE x 0). */ | |
4006 | return simplify_gen_relational (NE, mode, cmp_mode, | |
4007 | op0, const0_rtx); | |
4008 | case LT: | |
4009 | /* Canonicalize (LT x 1) as (LE x 0). */ | |
4010 | return simplify_gen_relational (LE, mode, cmp_mode, | |
4011 | op0, const0_rtx); | |
4012 | case LTU: | |
4013 | /* Canonicalize (LTU x 1) as (EQ x 0). */ | |
4014 | return simplify_gen_relational (EQ, mode, cmp_mode, | |
4015 | op0, const0_rtx); | |
4016 | default: | |
4017 | break; | |
c6fb08ad PB |
4018 | } |
4019 | } | |
3fa0cacd RS |
4020 | else if (op1 == constm1_rtx) |
4021 | { | |
4022 | /* Canonicalize (LE x -1) as (LT x 0). */ | |
4023 | if (code == LE) | |
4024 | return simplify_gen_relational (LT, mode, cmp_mode, op0, const0_rtx); | |
4025 | /* Canonicalize (GT x -1) as (GE x 0). */ | |
4026 | if (code == GT) | |
4027 | return simplify_gen_relational (GE, mode, cmp_mode, op0, const0_rtx); | |
4028 | } | |
0cedb36c | 4029 | |
bc4ad38c ZD |
4030 | /* (eq/ne (plus x cst1) cst2) simplifies to (eq/ne x (cst2 - cst1)) */ |
4031 | if ((code == EQ || code == NE) | |
4032 | && (op0code == PLUS || op0code == MINUS) | |
4033 | && CONSTANT_P (op1) | |
551a3297 RH |
4034 | && CONSTANT_P (XEXP (op0, 1)) |
4035 | && (INTEGRAL_MODE_P (cmp_mode) || flag_unsafe_math_optimizations)) | |
bc4ad38c ZD |
4036 | { |
4037 | rtx x = XEXP (op0, 0); | |
4038 | rtx c = XEXP (op0, 1); | |
4039 | ||
4040 | c = simplify_gen_binary (op0code == PLUS ? MINUS : PLUS, | |
4041 | cmp_mode, op1, c); | |
4042 | return simplify_gen_relational (code, mode, cmp_mode, x, c); | |
4043 | } | |
4044 | ||
1419a885 RS |
4045 | /* (ne:SI (zero_extract:SI FOO (const_int 1) BAR) (const_int 0))) is |
4046 | the same as (zero_extract:SI FOO (const_int 1) BAR). */ | |
4047 | if (code == NE | |
4048 | && op1 == const0_rtx | |
4049 | && GET_MODE_CLASS (mode) == MODE_INT | |
4050 | && cmp_mode != VOIDmode | |
61961eff RS |
4051 | /* ??? Work-around BImode bugs in the ia64 backend. */ |
4052 | && mode != BImode | |
f8eacd97 | 4053 | && cmp_mode != BImode |
1419a885 RS |
4054 | && nonzero_bits (op0, cmp_mode) == 1 |
4055 | && STORE_FLAG_VALUE == 1) | |
f8eacd97 RS |
4056 | return GET_MODE_SIZE (mode) > GET_MODE_SIZE (cmp_mode) |
4057 | ? simplify_gen_unary (ZERO_EXTEND, mode, op0, cmp_mode) | |
4058 | : lowpart_subreg (mode, op0, cmp_mode); | |
1419a885 | 4059 | |
5484a3c3 RS |
4060 | /* (eq/ne (xor x y) 0) simplifies to (eq/ne x y). */ |
4061 | if ((code == EQ || code == NE) | |
4062 | && op1 == const0_rtx | |
4063 | && op0code == XOR) | |
4064 | return simplify_gen_relational (code, mode, cmp_mode, | |
4065 | XEXP (op0, 0), XEXP (op0, 1)); | |
4066 | ||
4d49d44d | 4067 | /* (eq/ne (xor x y) x) simplifies to (eq/ne y 0). */ |
5484a3c3 RS |
4068 | if ((code == EQ || code == NE) |
4069 | && op0code == XOR | |
4070 | && rtx_equal_p (XEXP (op0, 0), op1) | |
4d49d44d KH |
4071 | && !side_effects_p (XEXP (op0, 0))) |
4072 | return simplify_gen_relational (code, mode, cmp_mode, | |
4073 | XEXP (op0, 1), const0_rtx); | |
4074 | ||
4075 | /* Likewise (eq/ne (xor x y) y) simplifies to (eq/ne x 0). */ | |
5484a3c3 RS |
4076 | if ((code == EQ || code == NE) |
4077 | && op0code == XOR | |
4078 | && rtx_equal_p (XEXP (op0, 1), op1) | |
4d49d44d KH |
4079 | && !side_effects_p (XEXP (op0, 1))) |
4080 | return simplify_gen_relational (code, mode, cmp_mode, | |
4081 | XEXP (op0, 0), const0_rtx); | |
5484a3c3 RS |
4082 | |
4083 | /* (eq/ne (xor x C1) C2) simplifies to (eq/ne x (C1^C2)). */ | |
4084 | if ((code == EQ || code == NE) | |
4085 | && op0code == XOR | |
481683e1 | 4086 | && (CONST_INT_P (op1) |
5484a3c3 | 4087 | || GET_CODE (op1) == CONST_DOUBLE) |
481683e1 | 4088 | && (CONST_INT_P (XEXP (op0, 1)) |
5484a3c3 RS |
4089 | || GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE)) |
4090 | return simplify_gen_relational (code, mode, cmp_mode, XEXP (op0, 0), | |
4091 | simplify_gen_binary (XOR, cmp_mode, | |
4092 | XEXP (op0, 1), op1)); | |
4093 | ||
69fce32f RS |
4094 | if (op0code == POPCOUNT && op1 == const0_rtx) |
4095 | switch (code) | |
4096 | { | |
4097 | case EQ: | |
4098 | case LE: | |
4099 | case LEU: | |
4100 | /* (eq (popcount x) (const_int 0)) -> (eq x (const_int 0)). */ | |
4101 | return simplify_gen_relational (EQ, mode, GET_MODE (XEXP (op0, 0)), | |
4102 | XEXP (op0, 0), const0_rtx); | |
4103 | ||
4104 | case NE: | |
4105 | case GT: | |
4106 | case GTU: | |
4107 | /* (ne (popcount x) (const_int 0)) -> (ne x (const_int 0)). */ | |
2376c58f | 4108 | return simplify_gen_relational (NE, mode, GET_MODE (XEXP (op0, 0)), |
69fce32f RS |
4109 | XEXP (op0, 0), const0_rtx); |
4110 | ||
4111 | default: | |
4112 | break; | |
4113 | } | |
4114 | ||
c6fb08ad PB |
4115 | return NULL_RTX; |
4116 | } | |
4117 | ||
39641489 PB |
4118 | enum |
4119 | { | |
a567207e PB |
4120 | CMP_EQ = 1, |
4121 | CMP_LT = 2, | |
4122 | CMP_GT = 4, | |
4123 | CMP_LTU = 8, | |
4124 | CMP_GTU = 16 | |
39641489 PB |
4125 | }; |
4126 | ||
4127 | ||
4128 | /* Convert the known results for EQ, LT, GT, LTU, GTU contained in | |
4129 | KNOWN_RESULT to a CONST_INT, based on the requested comparison CODE | |
a567207e PB |
4130 | For KNOWN_RESULT to make sense it should be either CMP_EQ, or the |
4131 | logical OR of one of (CMP_LT, CMP_GT) and one of (CMP_LTU, CMP_GTU). | |
39641489 PB |
4132 | For floating-point comparisons, assume that the operands were ordered. */ |
4133 | ||
4134 | static rtx | |
4135 | comparison_result (enum rtx_code code, int known_results) | |
4136 | { | |
39641489 PB |
4137 | switch (code) |
4138 | { | |
4139 | case EQ: | |
4140 | case UNEQ: | |
a567207e | 4141 | return (known_results & CMP_EQ) ? const_true_rtx : const0_rtx; |
39641489 PB |
4142 | case NE: |
4143 | case LTGT: | |
a567207e | 4144 | return (known_results & CMP_EQ) ? const0_rtx : const_true_rtx; |
39641489 PB |
4145 | |
4146 | case LT: | |
4147 | case UNLT: | |
a567207e | 4148 | return (known_results & CMP_LT) ? const_true_rtx : const0_rtx; |
39641489 PB |
4149 | case GE: |
4150 | case UNGE: | |
a567207e | 4151 | return (known_results & CMP_LT) ? const0_rtx : const_true_rtx; |
39641489 PB |
4152 | |
4153 | case GT: | |
4154 | case UNGT: | |
a567207e | 4155 | return (known_results & CMP_GT) ? const_true_rtx : const0_rtx; |
39641489 PB |
4156 | case LE: |
4157 | case UNLE: | |
a567207e | 4158 | return (known_results & CMP_GT) ? const0_rtx : const_true_rtx; |
39641489 PB |
4159 | |
4160 | case LTU: | |
a567207e | 4161 | return (known_results & CMP_LTU) ? const_true_rtx : const0_rtx; |
39641489 | 4162 | case GEU: |
a567207e | 4163 | return (known_results & CMP_LTU) ? const0_rtx : const_true_rtx; |
39641489 PB |
4164 | |
4165 | case GTU: | |
a567207e | 4166 | return (known_results & CMP_GTU) ? const_true_rtx : const0_rtx; |
39641489 | 4167 | case LEU: |
a567207e | 4168 | return (known_results & CMP_GTU) ? const0_rtx : const_true_rtx; |
39641489 PB |
4169 | |
4170 | case ORDERED: | |
4171 | return const_true_rtx; | |
4172 | case UNORDERED: | |
4173 | return const0_rtx; | |
4174 | default: | |
4175 | gcc_unreachable (); | |
4176 | } | |
4177 | } | |
4178 | ||
c6fb08ad PB |
4179 | /* Check if the given comparison (done in the given MODE) is actually a |
4180 | tautology or a contradiction. | |
7ce3e360 RS |
4181 | If no simplification is possible, this function returns zero. |
4182 | Otherwise, it returns either const_true_rtx or const0_rtx. */ | |
0cedb36c JL |
4183 | |
4184 | rtx | |
7ce3e360 RS |
4185 | simplify_const_relational_operation (enum rtx_code code, |
4186 | enum machine_mode mode, | |
4187 | rtx op0, rtx op1) | |
0cedb36c | 4188 | { |
0cedb36c | 4189 | rtx tem; |
4ba5f925 JH |
4190 | rtx trueop0; |
4191 | rtx trueop1; | |
0cedb36c | 4192 | |
41374e13 NS |
4193 | gcc_assert (mode != VOIDmode |
4194 | || (GET_MODE (op0) == VOIDmode | |
4195 | && GET_MODE (op1) == VOIDmode)); | |
47b1e19b | 4196 | |
0cedb36c JL |
4197 | /* If op0 is a compare, extract the comparison arguments from it. */ |
4198 | if (GET_CODE (op0) == COMPARE && op1 == const0_rtx) | |
5b5dc475 UW |
4199 | { |
4200 | op1 = XEXP (op0, 1); | |
4201 | op0 = XEXP (op0, 0); | |
4202 | ||
4203 | if (GET_MODE (op0) != VOIDmode) | |
4204 | mode = GET_MODE (op0); | |
4205 | else if (GET_MODE (op1) != VOIDmode) | |
4206 | mode = GET_MODE (op1); | |
4207 | else | |
4208 | return 0; | |
4209 | } | |
0cedb36c JL |
4210 | |
4211 | /* We can't simplify MODE_CC values since we don't know what the | |
4212 | actual comparison is. */ | |
8beccec8 | 4213 | if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC || CC0_P (op0)) |
0cedb36c JL |
4214 | return 0; |
4215 | ||
52a75c3c | 4216 | /* Make sure the constant is second. */ |
9ce79a7a | 4217 | if (swap_commutative_operands_p (op0, op1)) |
52a75c3c RH |
4218 | { |
4219 | tem = op0, op0 = op1, op1 = tem; | |
4220 | code = swap_condition (code); | |
4221 | } | |
4222 | ||
9ce79a7a RS |
4223 | trueop0 = avoid_constant_pool_reference (op0); |
4224 | trueop1 = avoid_constant_pool_reference (op1); | |
4225 | ||
0cedb36c JL |
4226 | /* For integer comparisons of A and B maybe we can simplify A - B and can |
4227 | then simplify a comparison of that with zero. If A and B are both either | |
4228 | a register or a CONST_INT, this can't help; testing for these cases will | |
4229 | prevent infinite recursion here and speed things up. | |
4230 | ||
e0d0c193 RG |
4231 | We can only do this for EQ and NE comparisons as otherwise we may |
4232 | lose or introduce overflow which we cannot disregard as undefined as | |
4233 | we do not know the signedness of the operation on either the left or | |
4234 | the right hand side of the comparison. */ | |
0cedb36c | 4235 | |
e0d0c193 RG |
4236 | if (INTEGRAL_MODE_P (mode) && trueop1 != const0_rtx |
4237 | && (code == EQ || code == NE) | |
481683e1 SZ |
4238 | && ! ((REG_P (op0) || CONST_INT_P (trueop0)) |
4239 | && (REG_P (op1) || CONST_INT_P (trueop1))) | |
0cedb36c | 4240 | && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1)) |
e0d0c193 RG |
4241 | /* We cannot do this if tem is a nonzero address. */ |
4242 | && ! nonzero_address_p (tem)) | |
7ce3e360 RS |
4243 | return simplify_const_relational_operation (signed_condition (code), |
4244 | mode, tem, const0_rtx); | |
0cedb36c | 4245 | |
bdbb0460 | 4246 | if (! HONOR_NANS (mode) && code == ORDERED) |
1f36a2dd JH |
4247 | return const_true_rtx; |
4248 | ||
bdbb0460 | 4249 | if (! HONOR_NANS (mode) && code == UNORDERED) |
1f36a2dd JH |
4250 | return const0_rtx; |
4251 | ||
71925bc0 | 4252 | /* For modes without NaNs, if the two operands are equal, we know the |
39641489 PB |
4253 | result except if they have side-effects. Even with NaNs we know |
4254 | the result of unordered comparisons and, if signaling NaNs are | |
4255 | irrelevant, also the result of LT/GT/LTGT. */ | |
4256 | if ((! HONOR_NANS (GET_MODE (trueop0)) | |
4257 | || code == UNEQ || code == UNLE || code == UNGE | |
4258 | || ((code == LT || code == GT || code == LTGT) | |
4259 | && ! HONOR_SNANS (GET_MODE (trueop0)))) | |
8821d091 EB |
4260 | && rtx_equal_p (trueop0, trueop1) |
4261 | && ! side_effects_p (trueop0)) | |
a567207e | 4262 | return comparison_result (code, CMP_EQ); |
0cedb36c JL |
4263 | |
4264 | /* If the operands are floating-point constants, see if we can fold | |
4265 | the result. */ | |
39641489 PB |
4266 | if (GET_CODE (trueop0) == CONST_DOUBLE |
4267 | && GET_CODE (trueop1) == CONST_DOUBLE | |
4268 | && SCALAR_FLOAT_MODE_P (GET_MODE (trueop0))) | |
0cedb36c | 4269 | { |
15e5ad76 | 4270 | REAL_VALUE_TYPE d0, d1; |
0cedb36c | 4271 | |
15e5ad76 ZW |
4272 | REAL_VALUE_FROM_CONST_DOUBLE (d0, trueop0); |
4273 | REAL_VALUE_FROM_CONST_DOUBLE (d1, trueop1); | |
90a74703 | 4274 | |
1eeeb6a4 | 4275 | /* Comparisons are unordered iff at least one of the values is NaN. */ |
15e5ad76 | 4276 | if (REAL_VALUE_ISNAN (d0) || REAL_VALUE_ISNAN (d1)) |
90a74703 JH |
4277 | switch (code) |
4278 | { | |
4279 | case UNEQ: | |
4280 | case UNLT: | |
4281 | case UNGT: | |
4282 | case UNLE: | |
4283 | case UNGE: | |
4284 | case NE: | |
4285 | case UNORDERED: | |
4286 | return const_true_rtx; | |
4287 | case EQ: | |
4288 | case LT: | |
4289 | case GT: | |
4290 | case LE: | |
4291 | case GE: | |
4292 | case LTGT: | |
4293 | case ORDERED: | |
4294 | return const0_rtx; | |
4295 | default: | |
4296 | return 0; | |
4297 | } | |
0cedb36c | 4298 | |
39641489 | 4299 | return comparison_result (code, |
a567207e PB |
4300 | (REAL_VALUES_EQUAL (d0, d1) ? CMP_EQ : |
4301 | REAL_VALUES_LESS (d0, d1) ? CMP_LT : CMP_GT)); | |
0cedb36c | 4302 | } |
0cedb36c JL |
4303 | |
4304 | /* Otherwise, see if the operands are both integers. */ | |
39641489 PB |
4305 | if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode) |
4306 | && (GET_CODE (trueop0) == CONST_DOUBLE | |
481683e1 | 4307 | || CONST_INT_P (trueop0)) |
39641489 | 4308 | && (GET_CODE (trueop1) == CONST_DOUBLE |
481683e1 | 4309 | || CONST_INT_P (trueop1))) |
0cedb36c JL |
4310 | { |
4311 | int width = GET_MODE_BITSIZE (mode); | |
4312 | HOST_WIDE_INT l0s, h0s, l1s, h1s; | |
4313 | unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u; | |
4314 | ||
4315 | /* Get the two words comprising each integer constant. */ | |
4ba5f925 | 4316 | if (GET_CODE (trueop0) == CONST_DOUBLE) |
0cedb36c | 4317 | { |
4ba5f925 JH |
4318 | l0u = l0s = CONST_DOUBLE_LOW (trueop0); |
4319 | h0u = h0s = CONST_DOUBLE_HIGH (trueop0); | |
0cedb36c JL |
4320 | } |
4321 | else | |
4322 | { | |
4ba5f925 | 4323 | l0u = l0s = INTVAL (trueop0); |
ba34d877 | 4324 | h0u = h0s = HWI_SIGN_EXTEND (l0s); |
0cedb36c | 4325 | } |
786de7eb | 4326 | |
4ba5f925 | 4327 | if (GET_CODE (trueop1) == CONST_DOUBLE) |
0cedb36c | 4328 | { |
4ba5f925 JH |
4329 | l1u = l1s = CONST_DOUBLE_LOW (trueop1); |
4330 | h1u = h1s = CONST_DOUBLE_HIGH (trueop1); | |
0cedb36c JL |
4331 | } |
4332 | else | |
4333 | { | |
4ba5f925 | 4334 | l1u = l1s = INTVAL (trueop1); |
ba34d877 | 4335 | h1u = h1s = HWI_SIGN_EXTEND (l1s); |
0cedb36c JL |
4336 | } |
4337 | ||
4338 | /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT, | |
4339 | we have to sign or zero-extend the values. */ | |
0cedb36c JL |
4340 | if (width != 0 && width < HOST_BITS_PER_WIDE_INT) |
4341 | { | |
4342 | l0u &= ((HOST_WIDE_INT) 1 << width) - 1; | |
4343 | l1u &= ((HOST_WIDE_INT) 1 << width) - 1; | |
4344 | ||
4345 | if (l0s & ((HOST_WIDE_INT) 1 << (width - 1))) | |
4346 | l0s |= ((HOST_WIDE_INT) (-1) << width); | |
4347 | ||
4348 | if (l1s & ((HOST_WIDE_INT) 1 << (width - 1))) | |
4349 | l1s |= ((HOST_WIDE_INT) (-1) << width); | |
4350 | } | |
d4f1c1fa RH |
4351 | if (width != 0 && width <= HOST_BITS_PER_WIDE_INT) |
4352 | h0u = h1u = 0, h0s = HWI_SIGN_EXTEND (l0s), h1s = HWI_SIGN_EXTEND (l1s); | |
0cedb36c | 4353 | |
39641489 | 4354 | if (h0u == h1u && l0u == l1u) |
a567207e | 4355 | return comparison_result (code, CMP_EQ); |
39641489 PB |
4356 | else |
4357 | { | |
4358 | int cr; | |
a567207e PB |
4359 | cr = (h0s < h1s || (h0s == h1s && l0u < l1u)) ? CMP_LT : CMP_GT; |
4360 | cr |= (h0u < h1u || (h0u == h1u && l0u < l1u)) ? CMP_LTU : CMP_GTU; | |
4361 | return comparison_result (code, cr); | |
39641489 | 4362 | } |
0cedb36c JL |
4363 | } |
4364 | ||
39641489 PB |
4365 | /* Optimize comparisons with upper and lower bounds. */ |
4366 | if (SCALAR_INT_MODE_P (mode) | |
4367 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
481683e1 | 4368 | && CONST_INT_P (trueop1)) |
0cedb36c | 4369 | { |
39641489 PB |
4370 | int sign; |
4371 | unsigned HOST_WIDE_INT nonzero = nonzero_bits (trueop0, mode); | |
4372 | HOST_WIDE_INT val = INTVAL (trueop1); | |
4373 | HOST_WIDE_INT mmin, mmax; | |
4374 | ||
4375 | if (code == GEU | |
4376 | || code == LEU | |
4377 | || code == GTU | |
4378 | || code == LTU) | |
4379 | sign = 0; | |
4380 | else | |
4381 | sign = 1; | |
0aea6467 | 4382 | |
39641489 PB |
4383 | /* Get a reduced range if the sign bit is zero. */ |
4384 | if (nonzero <= (GET_MODE_MASK (mode) >> 1)) | |
4385 | { | |
4386 | mmin = 0; | |
4387 | mmax = nonzero; | |
4388 | } | |
4389 | else | |
4390 | { | |
4391 | rtx mmin_rtx, mmax_rtx; | |
a567207e | 4392 | get_mode_bounds (mode, sign, mode, &mmin_rtx, &mmax_rtx); |
39641489 | 4393 | |
dc7c279e JJ |
4394 | mmin = INTVAL (mmin_rtx); |
4395 | mmax = INTVAL (mmax_rtx); | |
4396 | if (sign) | |
4397 | { | |
4398 | unsigned int sign_copies = num_sign_bit_copies (trueop0, mode); | |
4399 | ||
4400 | mmin >>= (sign_copies - 1); | |
4401 | mmax >>= (sign_copies - 1); | |
4402 | } | |
0aea6467 ZD |
4403 | } |
4404 | ||
0cedb36c JL |
4405 | switch (code) |
4406 | { | |
39641489 PB |
4407 | /* x >= y is always true for y <= mmin, always false for y > mmax. */ |
4408 | case GEU: | |
4409 | if ((unsigned HOST_WIDE_INT) val <= (unsigned HOST_WIDE_INT) mmin) | |
4410 | return const_true_rtx; | |
4411 | if ((unsigned HOST_WIDE_INT) val > (unsigned HOST_WIDE_INT) mmax) | |
4412 | return const0_rtx; | |
4413 | break; | |
4414 | case GE: | |
4415 | if (val <= mmin) | |
4416 | return const_true_rtx; | |
4417 | if (val > mmax) | |
4418 | return const0_rtx; | |
4419 | break; | |
4420 | ||
4421 | /* x <= y is always true for y >= mmax, always false for y < mmin. */ | |
4422 | case LEU: | |
4423 | if ((unsigned HOST_WIDE_INT) val >= (unsigned HOST_WIDE_INT) mmax) | |
4424 | return const_true_rtx; | |
4425 | if ((unsigned HOST_WIDE_INT) val < (unsigned HOST_WIDE_INT) mmin) | |
4426 | return const0_rtx; | |
4427 | break; | |
4428 | case LE: | |
4429 | if (val >= mmax) | |
4430 | return const_true_rtx; | |
4431 | if (val < mmin) | |
4432 | return const0_rtx; | |
4433 | break; | |
4434 | ||
0cedb36c | 4435 | case EQ: |
39641489 PB |
4436 | /* x == y is always false for y out of range. */ |
4437 | if (val < mmin || val > mmax) | |
4438 | return const0_rtx; | |
4439 | break; | |
4440 | ||
4441 | /* x > y is always false for y >= mmax, always true for y < mmin. */ | |
4442 | case GTU: | |
4443 | if ((unsigned HOST_WIDE_INT) val >= (unsigned HOST_WIDE_INT) mmax) | |
4444 | return const0_rtx; | |
4445 | if ((unsigned HOST_WIDE_INT) val < (unsigned HOST_WIDE_INT) mmin) | |
4446 | return const_true_rtx; | |
4447 | break; | |
4448 | case GT: | |
4449 | if (val >= mmax) | |
4450 | return const0_rtx; | |
4451 | if (val < mmin) | |
4452 | return const_true_rtx; | |
4453 | break; | |
4454 | ||
4455 | /* x < y is always false for y <= mmin, always true for y > mmax. */ | |
4456 | case LTU: | |
4457 | if ((unsigned HOST_WIDE_INT) val <= (unsigned HOST_WIDE_INT) mmin) | |
4458 | return const0_rtx; | |
4459 | if ((unsigned HOST_WIDE_INT) val > (unsigned HOST_WIDE_INT) mmax) | |
4460 | return const_true_rtx; | |
4461 | break; | |
4462 | case LT: | |
4463 | if (val <= mmin) | |
0cedb36c | 4464 | return const0_rtx; |
39641489 PB |
4465 | if (val > mmax) |
4466 | return const_true_rtx; | |
0cedb36c JL |
4467 | break; |
4468 | ||
4469 | case NE: | |
39641489 PB |
4470 | /* x != y is always true for y out of range. */ |
4471 | if (val < mmin || val > mmax) | |
0cedb36c JL |
4472 | return const_true_rtx; |
4473 | break; | |
4474 | ||
39641489 PB |
4475 | default: |
4476 | break; | |
4477 | } | |
4478 | } | |
4479 | ||
4480 | /* Optimize integer comparisons with zero. */ | |
4481 | if (trueop1 == const0_rtx) | |
4482 | { | |
4483 | /* Some addresses are known to be nonzero. We don't know | |
a567207e | 4484 | their sign, but equality comparisons are known. */ |
39641489 | 4485 | if (nonzero_address_p (trueop0)) |
a567207e | 4486 | { |
39641489 PB |
4487 | if (code == EQ || code == LEU) |
4488 | return const0_rtx; | |
4489 | if (code == NE || code == GTU) | |
4490 | return const_true_rtx; | |
a567207e | 4491 | } |
39641489 PB |
4492 | |
4493 | /* See if the first operand is an IOR with a constant. If so, we | |
4494 | may be able to determine the result of this comparison. */ | |
4495 | if (GET_CODE (op0) == IOR) | |
a567207e | 4496 | { |
39641489 | 4497 | rtx inner_const = avoid_constant_pool_reference (XEXP (op0, 1)); |
481683e1 | 4498 | if (CONST_INT_P (inner_const) && inner_const != const0_rtx) |
39641489 | 4499 | { |
a567207e PB |
4500 | int sign_bitnum = GET_MODE_BITSIZE (mode) - 1; |
4501 | int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum | |
4502 | && (INTVAL (inner_const) | |
4503 | & ((HOST_WIDE_INT) 1 << sign_bitnum))); | |
4504 | ||
4505 | switch (code) | |
4506 | { | |
4507 | case EQ: | |
39641489 | 4508 | case LEU: |
a567207e PB |
4509 | return const0_rtx; |
4510 | case NE: | |
39641489 | 4511 | case GTU: |
a567207e PB |
4512 | return const_true_rtx; |
4513 | case LT: | |
4514 | case LE: | |
4515 | if (has_sign) | |
4516 | return const_true_rtx; | |
4517 | break; | |
4518 | case GT: | |
39641489 | 4519 | case GE: |
a567207e PB |
4520 | if (has_sign) |
4521 | return const0_rtx; | |
4522 | break; | |
4523 | default: | |
4524 | break; | |
4525 | } | |
4526 | } | |
39641489 PB |
4527 | } |
4528 | } | |
4529 | ||
4530 | /* Optimize comparison of ABS with zero. */ | |
4531 | if (trueop1 == CONST0_RTX (mode) | |
4532 | && (GET_CODE (trueop0) == ABS | |
4533 | || (GET_CODE (trueop0) == FLOAT_EXTEND | |
4534 | && GET_CODE (XEXP (trueop0, 0)) == ABS))) | |
4535 | { | |
4536 | switch (code) | |
4537 | { | |
0da65b89 RS |
4538 | case LT: |
4539 | /* Optimize abs(x) < 0.0. */ | |
39641489 | 4540 | if (!HONOR_SNANS (mode) |
eeef0e45 ILT |
4541 | && (!INTEGRAL_MODE_P (mode) |
4542 | || (!flag_wrapv && !flag_trapv && flag_strict_overflow))) | |
0da65b89 | 4543 | { |
39641489 PB |
4544 | if (INTEGRAL_MODE_P (mode) |
4545 | && (issue_strict_overflow_warning | |
4546 | (WARN_STRICT_OVERFLOW_CONDITIONAL))) | |
4547 | warning (OPT_Wstrict_overflow, | |
4548 | ("assuming signed overflow does not occur when " | |
4549 | "assuming abs (x) < 0 is false")); | |
4550 | return const0_rtx; | |
0da65b89 RS |
4551 | } |
4552 | break; | |
4553 | ||
4554 | case GE: | |
4555 | /* Optimize abs(x) >= 0.0. */ | |
39641489 | 4556 | if (!HONOR_NANS (mode) |
eeef0e45 ILT |
4557 | && (!INTEGRAL_MODE_P (mode) |
4558 | || (!flag_wrapv && !flag_trapv && flag_strict_overflow))) | |
0da65b89 | 4559 | { |
39641489 PB |
4560 | if (INTEGRAL_MODE_P (mode) |
4561 | && (issue_strict_overflow_warning | |
4562 | (WARN_STRICT_OVERFLOW_CONDITIONAL))) | |
4563 | warning (OPT_Wstrict_overflow, | |
4564 | ("assuming signed overflow does not occur when " | |
4565 | "assuming abs (x) >= 0 is true")); | |
4566 | return const_true_rtx; | |
0da65b89 RS |
4567 | } |
4568 | break; | |
4569 | ||
8d90f9c0 GK |
4570 | case UNGE: |
4571 | /* Optimize ! (abs(x) < 0.0). */ | |
39641489 | 4572 | return const_true_rtx; |
46c5ad27 | 4573 | |
0cedb36c JL |
4574 | default: |
4575 | break; | |
4576 | } | |
0cedb36c JL |
4577 | } |
4578 | ||
39641489 | 4579 | return 0; |
0cedb36c JL |
4580 | } |
4581 | \f | |
4582 | /* Simplify CODE, an operation with result mode MODE and three operands, | |
4583 | OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became | |
4584 | a constant. Return 0 if no simplifications is possible. */ | |
4585 | ||
4586 | rtx | |
46c5ad27 AJ |
4587 | simplify_ternary_operation (enum rtx_code code, enum machine_mode mode, |
4588 | enum machine_mode op0_mode, rtx op0, rtx op1, | |
4589 | rtx op2) | |
0cedb36c | 4590 | { |
749a2da1 | 4591 | unsigned int width = GET_MODE_BITSIZE (mode); |
0cedb36c JL |
4592 | |
4593 | /* VOIDmode means "infinite" precision. */ | |
4594 | if (width == 0) | |
4595 | width = HOST_BITS_PER_WIDE_INT; | |
4596 | ||
4597 | switch (code) | |
4598 | { | |
4599 | case SIGN_EXTRACT: | |
4600 | case ZERO_EXTRACT: | |
481683e1 SZ |
4601 | if (CONST_INT_P (op0) |
4602 | && CONST_INT_P (op1) | |
4603 | && CONST_INT_P (op2) | |
d882fe51 | 4604 | && ((unsigned) INTVAL (op1) + (unsigned) INTVAL (op2) <= width) |
f9e158c3 | 4605 | && width <= (unsigned) HOST_BITS_PER_WIDE_INT) |
0cedb36c JL |
4606 | { |
4607 | /* Extracting a bit-field from a constant */ | |
4608 | HOST_WIDE_INT val = INTVAL (op0); | |
4609 | ||
4610 | if (BITS_BIG_ENDIAN) | |
4611 | val >>= (GET_MODE_BITSIZE (op0_mode) | |
4612 | - INTVAL (op2) - INTVAL (op1)); | |
4613 | else | |
4614 | val >>= INTVAL (op2); | |
4615 | ||
4616 | if (HOST_BITS_PER_WIDE_INT != INTVAL (op1)) | |
4617 | { | |
4618 | /* First zero-extend. */ | |
4619 | val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1; | |
4620 | /* If desired, propagate sign bit. */ | |
4621 | if (code == SIGN_EXTRACT | |
4622 | && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1)))) | |
4623 | val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1); | |
4624 | } | |
4625 | ||
4626 | /* Clear the bits that don't belong in our mode, | |
4627 | unless they and our sign bit are all one. | |
4628 | So we get either a reasonable negative value or a reasonable | |
4629 | unsigned value for this mode. */ | |
4630 | if (width < HOST_BITS_PER_WIDE_INT | |
4631 | && ((val & ((HOST_WIDE_INT) (-1) << (width - 1))) | |
4632 | != ((HOST_WIDE_INT) (-1) << (width - 1)))) | |
4633 | val &= ((HOST_WIDE_INT) 1 << width) - 1; | |
4634 | ||
449ecb09 | 4635 | return gen_int_mode (val, mode); |
0cedb36c JL |
4636 | } |
4637 | break; | |
4638 | ||
4639 | case IF_THEN_ELSE: | |
481683e1 | 4640 | if (CONST_INT_P (op0)) |
0cedb36c JL |
4641 | return op0 != const0_rtx ? op1 : op2; |
4642 | ||
31f0f571 RS |
4643 | /* Convert c ? a : a into "a". */ |
4644 | if (rtx_equal_p (op1, op2) && ! side_effects_p (op0)) | |
0cedb36c | 4645 | return op1; |
31f0f571 RS |
4646 | |
4647 | /* Convert a != b ? a : b into "a". */ | |
4648 | if (GET_CODE (op0) == NE | |
4649 | && ! side_effects_p (op0) | |
4650 | && ! HONOR_NANS (mode) | |
4651 | && ! HONOR_SIGNED_ZEROS (mode) | |
4652 | && ((rtx_equal_p (XEXP (op0, 0), op1) | |
4653 | && rtx_equal_p (XEXP (op0, 1), op2)) | |
4654 | || (rtx_equal_p (XEXP (op0, 0), op2) | |
4655 | && rtx_equal_p (XEXP (op0, 1), op1)))) | |
4656 | return op1; | |
4657 | ||
4658 | /* Convert a == b ? a : b into "b". */ | |
4659 | if (GET_CODE (op0) == EQ | |
4660 | && ! side_effects_p (op0) | |
4661 | && ! HONOR_NANS (mode) | |
4662 | && ! HONOR_SIGNED_ZEROS (mode) | |
4663 | && ((rtx_equal_p (XEXP (op0, 0), op1) | |
4664 | && rtx_equal_p (XEXP (op0, 1), op2)) | |
4665 | || (rtx_equal_p (XEXP (op0, 0), op2) | |
4666 | && rtx_equal_p (XEXP (op0, 1), op1)))) | |
0cedb36c | 4667 | return op2; |
31f0f571 | 4668 | |
ec8e098d | 4669 | if (COMPARISON_P (op0) && ! side_effects_p (op0)) |
0cedb36c | 4670 | { |
47b1e19b JH |
4671 | enum machine_mode cmp_mode = (GET_MODE (XEXP (op0, 0)) == VOIDmode |
4672 | ? GET_MODE (XEXP (op0, 1)) | |
4673 | : GET_MODE (XEXP (op0, 0))); | |
3e882897 | 4674 | rtx temp; |
a774e06e RH |
4675 | |
4676 | /* Look for happy constants in op1 and op2. */ | |
481683e1 | 4677 | if (CONST_INT_P (op1) && CONST_INT_P (op2)) |
a774e06e RH |
4678 | { |
4679 | HOST_WIDE_INT t = INTVAL (op1); | |
4680 | HOST_WIDE_INT f = INTVAL (op2); | |
786de7eb | 4681 | |
a774e06e RH |
4682 | if (t == STORE_FLAG_VALUE && f == 0) |
4683 | code = GET_CODE (op0); | |
261efdef JH |
4684 | else if (t == 0 && f == STORE_FLAG_VALUE) |
4685 | { | |
4686 | enum rtx_code tmp; | |
4687 | tmp = reversed_comparison_code (op0, NULL_RTX); | |
4688 | if (tmp == UNKNOWN) | |
4689 | break; | |
4690 | code = tmp; | |
4691 | } | |
a774e06e RH |
4692 | else |
4693 | break; | |
4694 | ||
77306e3e | 4695 | return simplify_gen_relational (code, mode, cmp_mode, |
c6fb08ad PB |
4696 | XEXP (op0, 0), XEXP (op0, 1)); |
4697 | } | |
4698 | ||
4699 | if (cmp_mode == VOIDmode) | |
4700 | cmp_mode = op0_mode; | |
4701 | temp = simplify_relational_operation (GET_CODE (op0), op0_mode, | |
4702 | cmp_mode, XEXP (op0, 0), | |
4703 | XEXP (op0, 1)); | |
4704 | ||
4705 | /* See if any simplifications were possible. */ | |
4706 | if (temp) | |
4707 | { | |
481683e1 | 4708 | if (CONST_INT_P (temp)) |
c6fb08ad PB |
4709 | return temp == const0_rtx ? op2 : op1; |
4710 | else if (temp) | |
4711 | return gen_rtx_IF_THEN_ELSE (mode, temp, op1, op2); | |
a774e06e | 4712 | } |
0cedb36c JL |
4713 | } |
4714 | break; | |
31f0f571 | 4715 | |
d9deed68 | 4716 | case VEC_MERGE: |
41374e13 NS |
4717 | gcc_assert (GET_MODE (op0) == mode); |
4718 | gcc_assert (GET_MODE (op1) == mode); | |
4719 | gcc_assert (VECTOR_MODE_P (mode)); | |
d9deed68 | 4720 | op2 = avoid_constant_pool_reference (op2); |
481683e1 | 4721 | if (CONST_INT_P (op2)) |
d9deed68 JH |
4722 | { |
4723 | int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode)); | |
4724 | unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size); | |
5be86fec | 4725 | int mask = (1 << n_elts) - 1; |
d9deed68 | 4726 | |
852c8ba1 JH |
4727 | if (!(INTVAL (op2) & mask)) |
4728 | return op1; | |
4729 | if ((INTVAL (op2) & mask) == mask) | |
4730 | return op0; | |
4731 | ||
4732 | op0 = avoid_constant_pool_reference (op0); | |
4733 | op1 = avoid_constant_pool_reference (op1); | |
4734 | if (GET_CODE (op0) == CONST_VECTOR | |
4735 | && GET_CODE (op1) == CONST_VECTOR) | |
4736 | { | |
4737 | rtvec v = rtvec_alloc (n_elts); | |
4738 | unsigned int i; | |
4739 | ||
4740 | for (i = 0; i < n_elts; i++) | |
4741 | RTVEC_ELT (v, i) = (INTVAL (op2) & (1 << i) | |
4742 | ? CONST_VECTOR_ELT (op0, i) | |
4743 | : CONST_VECTOR_ELT (op1, i)); | |
4744 | return gen_rtx_CONST_VECTOR (mode, v); | |
4745 | } | |
d9deed68 JH |
4746 | } |
4747 | break; | |
0cedb36c JL |
4748 | |
4749 | default: | |
41374e13 | 4750 | gcc_unreachable (); |
0cedb36c JL |
4751 | } |
4752 | ||
4753 | return 0; | |
4754 | } | |
4755 | ||
14c931f1 CF |
4756 | /* Evaluate a SUBREG of a CONST_INT or CONST_DOUBLE or CONST_FIXED |
4757 | or CONST_VECTOR, | |
4758 | returning another CONST_INT or CONST_DOUBLE or CONST_FIXED or CONST_VECTOR. | |
eea50aa0 | 4759 | |
550d1387 GK |
4760 | Works by unpacking OP into a collection of 8-bit values |
4761 | represented as a little-endian array of 'unsigned char', selecting by BYTE, | |
4762 | and then repacking them again for OUTERMODE. */ | |
eea50aa0 | 4763 | |
550d1387 GK |
4764 | static rtx |
4765 | simplify_immed_subreg (enum machine_mode outermode, rtx op, | |
4766 | enum machine_mode innermode, unsigned int byte) | |
4767 | { | |
4768 | /* We support up to 512-bit values (for V8DFmode). */ | |
4769 | enum { | |
4770 | max_bitsize = 512, | |
4771 | value_bit = 8, | |
4772 | value_mask = (1 << value_bit) - 1 | |
4773 | }; | |
4774 | unsigned char value[max_bitsize / value_bit]; | |
4775 | int value_start; | |
4776 | int i; | |
4777 | int elem; | |
4778 | ||
4779 | int num_elem; | |
4780 | rtx * elems; | |
4781 | int elem_bitsize; | |
4782 | rtx result_s; | |
4783 | rtvec result_v = NULL; | |
4784 | enum mode_class outer_class; | |
4785 | enum machine_mode outer_submode; | |
4786 | ||
4787 | /* Some ports misuse CCmode. */ | |
481683e1 | 4788 | if (GET_MODE_CLASS (outermode) == MODE_CC && CONST_INT_P (op)) |
e5c56fd9 JH |
4789 | return op; |
4790 | ||
6e4b5aaf RH |
4791 | /* We have no way to represent a complex constant at the rtl level. */ |
4792 | if (COMPLEX_MODE_P (outermode)) | |
4793 | return NULL_RTX; | |
4794 | ||
550d1387 GK |
4795 | /* Unpack the value. */ |
4796 | ||
cb2a532e AH |
4797 | if (GET_CODE (op) == CONST_VECTOR) |
4798 | { | |
550d1387 GK |
4799 | num_elem = CONST_VECTOR_NUNITS (op); |
4800 | elems = &CONST_VECTOR_ELT (op, 0); | |
4801 | elem_bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (innermode)); | |
4802 | } | |
4803 | else | |
4804 | { | |
4805 | num_elem = 1; | |
4806 | elems = &op; | |
4807 | elem_bitsize = max_bitsize; | |
4808 | } | |
41374e13 NS |
4809 | /* If this asserts, it is too complicated; reducing value_bit may help. */ |
4810 | gcc_assert (BITS_PER_UNIT % value_bit == 0); | |
4811 | /* I don't know how to handle endianness of sub-units. */ | |
4812 | gcc_assert (elem_bitsize % BITS_PER_UNIT == 0); | |
550d1387 GK |
4813 | |
4814 | for (elem = 0; elem < num_elem; elem++) | |
4815 | { | |
4816 | unsigned char * vp; | |
4817 | rtx el = elems[elem]; | |
4818 | ||
4819 | /* Vectors are kept in target memory order. (This is probably | |
4820 | a mistake.) */ | |
4821 | { | |
4822 | unsigned byte = (elem * elem_bitsize) / BITS_PER_UNIT; | |
4823 | unsigned ibyte = (((num_elem - 1 - elem) * elem_bitsize) | |
4824 | / BITS_PER_UNIT); | |
4825 | unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte; | |
4826 | unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte; | |
4827 | unsigned bytele = (subword_byte % UNITS_PER_WORD | |
4828 | + (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD); | |
4829 | vp = value + (bytele * BITS_PER_UNIT) / value_bit; | |
4830 | } | |
4831 | ||
4832 | switch (GET_CODE (el)) | |
34a80643 | 4833 | { |
550d1387 GK |
4834 | case CONST_INT: |
4835 | for (i = 0; | |
4836 | i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize; | |
4837 | i += value_bit) | |
4838 | *vp++ = INTVAL (el) >> i; | |
4839 | /* CONST_INTs are always logically sign-extended. */ | |
4840 | for (; i < elem_bitsize; i += value_bit) | |
4841 | *vp++ = INTVAL (el) < 0 ? -1 : 0; | |
4842 | break; | |
4843 | ||
4844 | case CONST_DOUBLE: | |
4845 | if (GET_MODE (el) == VOIDmode) | |
4846 | { | |
4847 | /* If this triggers, someone should have generated a | |
4848 | CONST_INT instead. */ | |
41374e13 | 4849 | gcc_assert (elem_bitsize > HOST_BITS_PER_WIDE_INT); |
cb2a532e | 4850 | |
550d1387 GK |
4851 | for (i = 0; i < HOST_BITS_PER_WIDE_INT; i += value_bit) |
4852 | *vp++ = CONST_DOUBLE_LOW (el) >> i; | |
4853 | while (i < HOST_BITS_PER_WIDE_INT * 2 && i < elem_bitsize) | |
4854 | { | |
8064d930 RE |
4855 | *vp++ |
4856 | = CONST_DOUBLE_HIGH (el) >> (i - HOST_BITS_PER_WIDE_INT); | |
550d1387 GK |
4857 | i += value_bit; |
4858 | } | |
4859 | /* It shouldn't matter what's done here, so fill it with | |
4860 | zero. */ | |
1125164c | 4861 | for (; i < elem_bitsize; i += value_bit) |
550d1387 GK |
4862 | *vp++ = 0; |
4863 | } | |
41374e13 | 4864 | else |
34a80643 | 4865 | { |
550d1387 GK |
4866 | long tmp[max_bitsize / 32]; |
4867 | int bitsize = GET_MODE_BITSIZE (GET_MODE (el)); | |
41374e13 | 4868 | |
3d8bf70f | 4869 | gcc_assert (SCALAR_FLOAT_MODE_P (GET_MODE (el))); |
41374e13 NS |
4870 | gcc_assert (bitsize <= elem_bitsize); |
4871 | gcc_assert (bitsize % value_bit == 0); | |
550d1387 GK |
4872 | |
4873 | real_to_target (tmp, CONST_DOUBLE_REAL_VALUE (el), | |
4874 | GET_MODE (el)); | |
4875 | ||
4876 | /* real_to_target produces its result in words affected by | |
4877 | FLOAT_WORDS_BIG_ENDIAN. However, we ignore this, | |
4878 | and use WORDS_BIG_ENDIAN instead; see the documentation | |
4879 | of SUBREG in rtl.texi. */ | |
4880 | for (i = 0; i < bitsize; i += value_bit) | |
226cfe61 | 4881 | { |
550d1387 GK |
4882 | int ibase; |
4883 | if (WORDS_BIG_ENDIAN) | |
4884 | ibase = bitsize - 1 - i; | |
4885 | else | |
4886 | ibase = i; | |
4887 | *vp++ = tmp[ibase / 32] >> i % 32; | |
226cfe61 | 4888 | } |
550d1387 GK |
4889 | |
4890 | /* It shouldn't matter what's done here, so fill it with | |
4891 | zero. */ | |
4892 | for (; i < elem_bitsize; i += value_bit) | |
4893 | *vp++ = 0; | |
34a80643 | 4894 | } |
550d1387 | 4895 | break; |
14c931f1 CF |
4896 | |
4897 | case CONST_FIXED: | |
4898 | if (elem_bitsize <= HOST_BITS_PER_WIDE_INT) | |
4899 | { | |
4900 | for (i = 0; i < elem_bitsize; i += value_bit) | |
4901 | *vp++ = CONST_FIXED_VALUE_LOW (el) >> i; | |
4902 | } | |
4903 | else | |
4904 | { | |
4905 | for (i = 0; i < HOST_BITS_PER_WIDE_INT; i += value_bit) | |
4906 | *vp++ = CONST_FIXED_VALUE_LOW (el) >> i; | |
4907 | for (; i < 2 * HOST_BITS_PER_WIDE_INT && i < elem_bitsize; | |
4908 | i += value_bit) | |
4909 | *vp++ = CONST_FIXED_VALUE_HIGH (el) | |
4910 | >> (i - HOST_BITS_PER_WIDE_INT); | |
4911 | for (; i < elem_bitsize; i += value_bit) | |
4912 | *vp++ = 0; | |
4913 | } | |
4914 | break; | |
550d1387 GK |
4915 | |
4916 | default: | |
41374e13 | 4917 | gcc_unreachable (); |
226cfe61 | 4918 | } |
cb2a532e AH |
4919 | } |
4920 | ||
550d1387 GK |
4921 | /* Now, pick the right byte to start with. */ |
4922 | /* Renumber BYTE so that the least-significant byte is byte 0. A special | |
4923 | case is paradoxical SUBREGs, which shouldn't be adjusted since they | |
4924 | will already have offset 0. */ | |
4925 | if (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode)) | |
eea50aa0 | 4926 | { |
550d1387 GK |
4927 | unsigned ibyte = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode) |
4928 | - byte); | |
4929 | unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte; | |
4930 | unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte; | |
4931 | byte = (subword_byte % UNITS_PER_WORD | |
4932 | + (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD); | |
4933 | } | |
eea50aa0 | 4934 | |
550d1387 GK |
4935 | /* BYTE should still be inside OP. (Note that BYTE is unsigned, |
4936 | so if it's become negative it will instead be very large.) */ | |
41374e13 | 4937 | gcc_assert (byte < GET_MODE_SIZE (innermode)); |
3767c0fd | 4938 | |
550d1387 GK |
4939 | /* Convert from bytes to chunks of size value_bit. */ |
4940 | value_start = byte * (BITS_PER_UNIT / value_bit); | |
eea50aa0 | 4941 | |
550d1387 GK |
4942 | /* Re-pack the value. */ |
4943 | ||
4944 | if (VECTOR_MODE_P (outermode)) | |
4945 | { | |
4946 | num_elem = GET_MODE_NUNITS (outermode); | |
4947 | result_v = rtvec_alloc (num_elem); | |
4948 | elems = &RTVEC_ELT (result_v, 0); | |
4949 | outer_submode = GET_MODE_INNER (outermode); | |
4950 | } | |
4951 | else | |
4952 | { | |
4953 | num_elem = 1; | |
4954 | elems = &result_s; | |
4955 | outer_submode = outermode; | |
4956 | } | |
eea50aa0 | 4957 | |
550d1387 GK |
4958 | outer_class = GET_MODE_CLASS (outer_submode); |
4959 | elem_bitsize = GET_MODE_BITSIZE (outer_submode); | |
451f86fd | 4960 | |
41374e13 NS |
4961 | gcc_assert (elem_bitsize % value_bit == 0); |
4962 | gcc_assert (elem_bitsize + value_start * value_bit <= max_bitsize); | |
451f86fd | 4963 | |
550d1387 GK |
4964 | for (elem = 0; elem < num_elem; elem++) |
4965 | { | |
4966 | unsigned char *vp; | |
4967 | ||
4968 | /* Vectors are stored in target memory order. (This is probably | |
4969 | a mistake.) */ | |
4970 | { | |
4971 | unsigned byte = (elem * elem_bitsize) / BITS_PER_UNIT; | |
4972 | unsigned ibyte = (((num_elem - 1 - elem) * elem_bitsize) | |
4973 | / BITS_PER_UNIT); | |
4974 | unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte; | |
4975 | unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte; | |
4976 | unsigned bytele = (subword_byte % UNITS_PER_WORD | |
4977 | + (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD); | |
4978 | vp = value + value_start + (bytele * BITS_PER_UNIT) / value_bit; | |
4979 | } | |
4980 | ||
4981 | switch (outer_class) | |
eea50aa0 | 4982 | { |
550d1387 GK |
4983 | case MODE_INT: |
4984 | case MODE_PARTIAL_INT: | |
4985 | { | |
4986 | unsigned HOST_WIDE_INT hi = 0, lo = 0; | |
4987 | ||
4988 | for (i = 0; | |
4989 | i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize; | |
4990 | i += value_bit) | |
4991 | lo |= (HOST_WIDE_INT)(*vp++ & value_mask) << i; | |
4992 | for (; i < elem_bitsize; i += value_bit) | |
4993 | hi |= ((HOST_WIDE_INT)(*vp++ & value_mask) | |
4994 | << (i - HOST_BITS_PER_WIDE_INT)); | |
4995 | ||
4996 | /* immed_double_const doesn't call trunc_int_for_mode. I don't | |
4997 | know why. */ | |
4998 | if (elem_bitsize <= HOST_BITS_PER_WIDE_INT) | |
4999 | elems[elem] = gen_int_mode (lo, outer_submode); | |
3242fbd8 | 5000 | else if (elem_bitsize <= 2 * HOST_BITS_PER_WIDE_INT) |
550d1387 | 5001 | elems[elem] = immed_double_const (lo, hi, outer_submode); |
3242fbd8 UB |
5002 | else |
5003 | return NULL_RTX; | |
550d1387 GK |
5004 | } |
5005 | break; | |
5006 | ||
5007 | case MODE_FLOAT: | |
15ed7b52 | 5008 | case MODE_DECIMAL_FLOAT: |
550d1387 GK |
5009 | { |
5010 | REAL_VALUE_TYPE r; | |
5011 | long tmp[max_bitsize / 32]; | |
5012 | ||
5013 | /* real_from_target wants its input in words affected by | |
5014 | FLOAT_WORDS_BIG_ENDIAN. However, we ignore this, | |
5015 | and use WORDS_BIG_ENDIAN instead; see the documentation | |
5016 | of SUBREG in rtl.texi. */ | |
5017 | for (i = 0; i < max_bitsize / 32; i++) | |
5018 | tmp[i] = 0; | |
5019 | for (i = 0; i < elem_bitsize; i += value_bit) | |
5020 | { | |
5021 | int ibase; | |
5022 | if (WORDS_BIG_ENDIAN) | |
5023 | ibase = elem_bitsize - 1 - i; | |
5024 | else | |
5025 | ibase = i; | |
effdb493 | 5026 | tmp[ibase / 32] |= (*vp++ & value_mask) << i % 32; |
550d1387 | 5027 | } |
eea50aa0 | 5028 | |
550d1387 GK |
5029 | real_from_target (&r, tmp, outer_submode); |
5030 | elems[elem] = CONST_DOUBLE_FROM_REAL_VALUE (r, outer_submode); | |
5031 | } | |
5032 | break; | |
14c931f1 CF |
5033 | |
5034 | case MODE_FRACT: | |
5035 | case MODE_UFRACT: | |
5036 | case MODE_ACCUM: | |
5037 | case MODE_UACCUM: | |
5038 | { | |
5039 | FIXED_VALUE_TYPE f; | |
5040 | f.data.low = 0; | |
5041 | f.data.high = 0; | |
5042 | f.mode = outer_submode; | |
5043 | ||
5044 | for (i = 0; | |
5045 | i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize; | |
5046 | i += value_bit) | |
5047 | f.data.low |= (HOST_WIDE_INT)(*vp++ & value_mask) << i; | |
5048 | for (; i < elem_bitsize; i += value_bit) | |
5049 | f.data.high |= ((HOST_WIDE_INT)(*vp++ & value_mask) | |
5050 | << (i - HOST_BITS_PER_WIDE_INT)); | |
5051 | ||
5052 | elems[elem] = CONST_FIXED_FROM_FIXED_VALUE (f, outer_submode); | |
5053 | } | |
5054 | break; | |
550d1387 GK |
5055 | |
5056 | default: | |
41374e13 | 5057 | gcc_unreachable (); |
550d1387 GK |
5058 | } |
5059 | } | |
5060 | if (VECTOR_MODE_P (outermode)) | |
5061 | return gen_rtx_CONST_VECTOR (outermode, result_v); | |
5062 | else | |
5063 | return result_s; | |
5064 | } | |
eea50aa0 | 5065 | |
550d1387 GK |
5066 | /* Simplify SUBREG:OUTERMODE(OP:INNERMODE, BYTE) |
5067 | Return 0 if no simplifications are possible. */ | |
5068 | rtx | |
5069 | simplify_subreg (enum machine_mode outermode, rtx op, | |
5070 | enum machine_mode innermode, unsigned int byte) | |
5071 | { | |
5072 | /* Little bit of sanity checking. */ | |
41374e13 NS |
5073 | gcc_assert (innermode != VOIDmode); |
5074 | gcc_assert (outermode != VOIDmode); | |
5075 | gcc_assert (innermode != BLKmode); | |
5076 | gcc_assert (outermode != BLKmode); | |
eea50aa0 | 5077 | |
41374e13 NS |
5078 | gcc_assert (GET_MODE (op) == innermode |
5079 | || GET_MODE (op) == VOIDmode); | |
eea50aa0 | 5080 | |
41374e13 NS |
5081 | gcc_assert ((byte % GET_MODE_SIZE (outermode)) == 0); |
5082 | gcc_assert (byte < GET_MODE_SIZE (innermode)); | |
eea50aa0 | 5083 | |
550d1387 GK |
5084 | if (outermode == innermode && !byte) |
5085 | return op; | |
eea50aa0 | 5086 | |
481683e1 | 5087 | if (CONST_INT_P (op) |
550d1387 | 5088 | || GET_CODE (op) == CONST_DOUBLE |
14c931f1 | 5089 | || GET_CODE (op) == CONST_FIXED |
550d1387 GK |
5090 | || GET_CODE (op) == CONST_VECTOR) |
5091 | return simplify_immed_subreg (outermode, op, innermode, byte); | |
eea50aa0 JH |
5092 | |
5093 | /* Changing mode twice with SUBREG => just change it once, | |
5094 | or not at all if changing back op starting mode. */ | |
5095 | if (GET_CODE (op) == SUBREG) | |
5096 | { | |
5097 | enum machine_mode innermostmode = GET_MODE (SUBREG_REG (op)); | |
1ffb3f9a | 5098 | int final_offset = byte + SUBREG_BYTE (op); |
53ed1a12 | 5099 | rtx newx; |
eea50aa0 JH |
5100 | |
5101 | if (outermode == innermostmode | |
5102 | && byte == 0 && SUBREG_BYTE (op) == 0) | |
5103 | return SUBREG_REG (op); | |
5104 | ||
1ffb3f9a JH |
5105 | /* The SUBREG_BYTE represents offset, as if the value were stored |
5106 | in memory. Irritating exception is paradoxical subreg, where | |
5107 | we define SUBREG_BYTE to be 0. On big endian machines, this | |
2d76cb1a | 5108 | value should be negative. For a moment, undo this exception. */ |
1ffb3f9a | 5109 | if (byte == 0 && GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode)) |
eea50aa0 | 5110 | { |
1ffb3f9a JH |
5111 | int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)); |
5112 | if (WORDS_BIG_ENDIAN) | |
5113 | final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; | |
5114 | if (BYTES_BIG_ENDIAN) | |
5115 | final_offset += difference % UNITS_PER_WORD; | |
5116 | } | |
5117 | if (SUBREG_BYTE (op) == 0 | |
5118 | && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode)) | |
5119 | { | |
5120 | int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode)); | |
5121 | if (WORDS_BIG_ENDIAN) | |
5122 | final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; | |
5123 | if (BYTES_BIG_ENDIAN) | |
5124 | final_offset += difference % UNITS_PER_WORD; | |
5125 | } | |
5126 | ||
5127 | /* See whether resulting subreg will be paradoxical. */ | |
2fe7bb35 | 5128 | if (GET_MODE_SIZE (innermostmode) > GET_MODE_SIZE (outermode)) |
1ffb3f9a JH |
5129 | { |
5130 | /* In nonparadoxical subregs we can't handle negative offsets. */ | |
5131 | if (final_offset < 0) | |
5132 | return NULL_RTX; | |
5133 | /* Bail out in case resulting subreg would be incorrect. */ | |
5134 | if (final_offset % GET_MODE_SIZE (outermode) | |
ae0ed63a JM |
5135 | || (unsigned) final_offset >= GET_MODE_SIZE (innermostmode)) |
5136 | return NULL_RTX; | |
1ffb3f9a JH |
5137 | } |
5138 | else | |
5139 | { | |
5140 | int offset = 0; | |
5141 | int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (outermode)); | |
5142 | ||
5143 | /* In paradoxical subreg, see if we are still looking on lower part. | |
5144 | If so, our SUBREG_BYTE will be 0. */ | |
5145 | if (WORDS_BIG_ENDIAN) | |
5146 | offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; | |
5147 | if (BYTES_BIG_ENDIAN) | |
5148 | offset += difference % UNITS_PER_WORD; | |
5149 | if (offset == final_offset) | |
5150 | final_offset = 0; | |
eea50aa0 | 5151 | else |
ae0ed63a | 5152 | return NULL_RTX; |
eea50aa0 JH |
5153 | } |
5154 | ||
4d6922ee | 5155 | /* Recurse for further possible simplifications. */ |
beb72684 RH |
5156 | newx = simplify_subreg (outermode, SUBREG_REG (op), innermostmode, |
5157 | final_offset); | |
53ed1a12 BI |
5158 | if (newx) |
5159 | return newx; | |
beb72684 RH |
5160 | if (validate_subreg (outermode, innermostmode, |
5161 | SUBREG_REG (op), final_offset)) | |
4613543f RS |
5162 | { |
5163 | newx = gen_rtx_SUBREG (outermode, SUBREG_REG (op), final_offset); | |
5164 | if (SUBREG_PROMOTED_VAR_P (op) | |
5165 | && SUBREG_PROMOTED_UNSIGNED_P (op) >= 0 | |
5166 | && GET_MODE_CLASS (outermode) == MODE_INT | |
5167 | && IN_RANGE (GET_MODE_SIZE (outermode), | |
5168 | GET_MODE_SIZE (innermode), | |
5169 | GET_MODE_SIZE (innermostmode)) | |
5170 | && subreg_lowpart_p (newx)) | |
5171 | { | |
5172 | SUBREG_PROMOTED_VAR_P (newx) = 1; | |
5173 | SUBREG_PROMOTED_UNSIGNED_SET | |
5174 | (newx, SUBREG_PROMOTED_UNSIGNED_P (op)); | |
5175 | } | |
5176 | return newx; | |
5177 | } | |
beb72684 | 5178 | return NULL_RTX; |
eea50aa0 JH |
5179 | } |
5180 | ||
de7f492b AN |
5181 | /* Merge implicit and explicit truncations. */ |
5182 | ||
5183 | if (GET_CODE (op) == TRUNCATE | |
5184 | && GET_MODE_SIZE (outermode) < GET_MODE_SIZE (innermode) | |
5185 | && subreg_lowpart_offset (outermode, innermode) == byte) | |
5186 | return simplify_gen_unary (TRUNCATE, outermode, XEXP (op, 0), | |
5187 | GET_MODE (XEXP (op, 0))); | |
5188 | ||
eea50aa0 JH |
5189 | /* SUBREG of a hard register => just change the register number |
5190 | and/or mode. If the hard register is not valid in that mode, | |
5191 | suppress this simplification. If the hard register is the stack, | |
5192 | frame, or argument pointer, leave this as a SUBREG. */ | |
5193 | ||
eef302d2 | 5194 | if (REG_P (op) && HARD_REGISTER_P (op)) |
eea50aa0 | 5195 | { |
eef302d2 RS |
5196 | unsigned int regno, final_regno; |
5197 | ||
5198 | regno = REGNO (op); | |
5199 | final_regno = simplify_subreg_regno (regno, innermode, byte, outermode); | |
5200 | if (HARD_REGISTER_NUM_P (final_regno)) | |
49d801d3 | 5201 | { |
dedc1e6d AO |
5202 | rtx x; |
5203 | int final_offset = byte; | |
5204 | ||
5205 | /* Adjust offset for paradoxical subregs. */ | |
5206 | if (byte == 0 | |
5207 | && GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode)) | |
5208 | { | |
5209 | int difference = (GET_MODE_SIZE (innermode) | |
5210 | - GET_MODE_SIZE (outermode)); | |
5211 | if (WORDS_BIG_ENDIAN) | |
5212 | final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; | |
5213 | if (BYTES_BIG_ENDIAN) | |
5214 | final_offset += difference % UNITS_PER_WORD; | |
5215 | } | |
5216 | ||
5217 | x = gen_rtx_REG_offset (op, outermode, final_regno, final_offset); | |
49d801d3 JH |
5218 | |
5219 | /* Propagate original regno. We don't have any way to specify | |
14b493d6 | 5220 | the offset inside original regno, so do so only for lowpart. |
49d801d3 JH |
5221 | The information is used only by alias analysis that can not |
5222 | grog partial register anyway. */ | |
5223 | ||
5224 | if (subreg_lowpart_offset (outermode, innermode) == byte) | |
5225 | ORIGINAL_REGNO (x) = ORIGINAL_REGNO (op); | |
5226 | return x; | |
5227 | } | |
eea50aa0 JH |
5228 | } |
5229 | ||
5230 | /* If we have a SUBREG of a register that we are replacing and we are | |
5231 | replacing it with a MEM, make a new MEM and try replacing the | |
5232 | SUBREG with it. Don't do this if the MEM has a mode-dependent address | |
5233 | or if we would be widening it. */ | |
5234 | ||
3c0cb5de | 5235 | if (MEM_P (op) |
eea50aa0 | 5236 | && ! mode_dependent_address_p (XEXP (op, 0)) |
04864a46 JH |
5237 | /* Allow splitting of volatile memory references in case we don't |
5238 | have instruction to move the whole thing. */ | |
5239 | && (! MEM_VOLATILE_P (op) | |
ef89d648 | 5240 | || ! have_insn_for (SET, innermode)) |
eea50aa0 | 5241 | && GET_MODE_SIZE (outermode) <= GET_MODE_SIZE (GET_MODE (op))) |
f1ec5147 | 5242 | return adjust_address_nv (op, outermode, byte); |
e5c56fd9 JH |
5243 | |
5244 | /* Handle complex values represented as CONCAT | |
5245 | of real and imaginary part. */ | |
5246 | if (GET_CODE (op) == CONCAT) | |
5247 | { | |
a957d77f | 5248 | unsigned int part_size, final_offset; |
4f1da2e9 RS |
5249 | rtx part, res; |
5250 | ||
a957d77f RS |
5251 | part_size = GET_MODE_UNIT_SIZE (GET_MODE (XEXP (op, 0))); |
5252 | if (byte < part_size) | |
5253 | { | |
5254 | part = XEXP (op, 0); | |
5255 | final_offset = byte; | |
5256 | } | |
5257 | else | |
5258 | { | |
5259 | part = XEXP (op, 1); | |
5260 | final_offset = byte - part_size; | |
5261 | } | |
5262 | ||
5263 | if (final_offset + GET_MODE_SIZE (outermode) > part_size) | |
4f1da2e9 | 5264 | return NULL_RTX; |
e5c56fd9 | 5265 | |
9199d62b DD |
5266 | res = simplify_subreg (outermode, part, GET_MODE (part), final_offset); |
5267 | if (res) | |
5268 | return res; | |
beb72684 | 5269 | if (validate_subreg (outermode, GET_MODE (part), part, final_offset)) |
4f1da2e9 | 5270 | return gen_rtx_SUBREG (outermode, part, final_offset); |
beb72684 | 5271 | return NULL_RTX; |
e5c56fd9 JH |
5272 | } |
5273 | ||
bb51e270 RS |
5274 | /* Optimize SUBREG truncations of zero and sign extended values. */ |
5275 | if ((GET_CODE (op) == ZERO_EXTEND | |
5276 | || GET_CODE (op) == SIGN_EXTEND) | |
5277 | && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)) | |
5278 | { | |
5279 | unsigned int bitpos = subreg_lsb_1 (outermode, innermode, byte); | |
5280 | ||
5281 | /* If we're requesting the lowpart of a zero or sign extension, | |
5282 | there are three possibilities. If the outermode is the same | |
5283 | as the origmode, we can omit both the extension and the subreg. | |
5284 | If the outermode is not larger than the origmode, we can apply | |
5285 | the truncation without the extension. Finally, if the outermode | |
5286 | is larger than the origmode, but both are integer modes, we | |
5287 | can just extend to the appropriate mode. */ | |
5288 | if (bitpos == 0) | |
5289 | { | |
5290 | enum machine_mode origmode = GET_MODE (XEXP (op, 0)); | |
5291 | if (outermode == origmode) | |
5292 | return XEXP (op, 0); | |
5293 | if (GET_MODE_BITSIZE (outermode) <= GET_MODE_BITSIZE (origmode)) | |
dc4bbaf7 RS |
5294 | return simplify_gen_subreg (outermode, XEXP (op, 0), origmode, |
5295 | subreg_lowpart_offset (outermode, | |
5296 | origmode)); | |
bb51e270 RS |
5297 | if (SCALAR_INT_MODE_P (outermode)) |
5298 | return simplify_gen_unary (GET_CODE (op), outermode, | |
5299 | XEXP (op, 0), origmode); | |
5300 | } | |
5301 | ||
5302 | /* A SUBREG resulting from a zero extension may fold to zero if | |
5303 | it extracts higher bits that the ZERO_EXTEND's source bits. */ | |
5304 | if (GET_CODE (op) == ZERO_EXTEND | |
5305 | && bitpos >= GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0)))) | |
5306 | return CONST0_RTX (outermode); | |
5307 | } | |
5308 | ||
c79fc296 RS |
5309 | /* Simplify (subreg:QI (lshiftrt:SI (sign_extend:SI (x:QI)) C), 0) into |
5310 | to (ashiftrt:QI (x:QI) C), where C is a suitable small constant and | |
5311 | the outer subreg is effectively a truncation to the original mode. */ | |
5312 | if ((GET_CODE (op) == LSHIFTRT | |
5313 | || GET_CODE (op) == ASHIFTRT) | |
5314 | && SCALAR_INT_MODE_P (outermode) | |
5315 | /* Ensure that OUTERMODE is at least twice as wide as the INNERMODE | |
5316 | to avoid the possibility that an outer LSHIFTRT shifts by more | |
5317 | than the sign extension's sign_bit_copies and introduces zeros | |
5318 | into the high bits of the result. */ | |
5319 | && (2 * GET_MODE_BITSIZE (outermode)) <= GET_MODE_BITSIZE (innermode) | |
481683e1 | 5320 | && CONST_INT_P (XEXP (op, 1)) |
c79fc296 RS |
5321 | && GET_CODE (XEXP (op, 0)) == SIGN_EXTEND |
5322 | && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode | |
5323 | && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode) | |
5324 | && subreg_lsb_1 (outermode, innermode, byte) == 0) | |
5325 | return simplify_gen_binary (ASHIFTRT, outermode, | |
5326 | XEXP (XEXP (op, 0), 0), XEXP (op, 1)); | |
5327 | ||
5328 | /* Likewise (subreg:QI (lshiftrt:SI (zero_extend:SI (x:QI)) C), 0) into | |
5329 | to (lshiftrt:QI (x:QI) C), where C is a suitable small constant and | |
5330 | the outer subreg is effectively a truncation to the original mode. */ | |
5331 | if ((GET_CODE (op) == LSHIFTRT | |
5332 | || GET_CODE (op) == ASHIFTRT) | |
5333 | && SCALAR_INT_MODE_P (outermode) | |
5334 | && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode) | |
481683e1 | 5335 | && CONST_INT_P (XEXP (op, 1)) |
c79fc296 RS |
5336 | && GET_CODE (XEXP (op, 0)) == ZERO_EXTEND |
5337 | && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode | |
5338 | && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode) | |
5339 | && subreg_lsb_1 (outermode, innermode, byte) == 0) | |
5340 | return simplify_gen_binary (LSHIFTRT, outermode, | |
5341 | XEXP (XEXP (op, 0), 0), XEXP (op, 1)); | |
5342 | ||
5343 | /* Likewise (subreg:QI (ashift:SI (zero_extend:SI (x:QI)) C), 0) into | |
5344 | to (ashift:QI (x:QI) C), where C is a suitable small constant and | |
5345 | the outer subreg is effectively a truncation to the original mode. */ | |
5346 | if (GET_CODE (op) == ASHIFT | |
5347 | && SCALAR_INT_MODE_P (outermode) | |
5348 | && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode) | |
481683e1 | 5349 | && CONST_INT_P (XEXP (op, 1)) |
c79fc296 RS |
5350 | && (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND |
5351 | || GET_CODE (XEXP (op, 0)) == SIGN_EXTEND) | |
5352 | && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode | |
5353 | && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode) | |
5354 | && subreg_lsb_1 (outermode, innermode, byte) == 0) | |
5355 | return simplify_gen_binary (ASHIFT, outermode, | |
5356 | XEXP (XEXP (op, 0), 0), XEXP (op, 1)); | |
5357 | ||
97efb03a PB |
5358 | /* Recognize a word extraction from a multi-word subreg. */ |
5359 | if ((GET_CODE (op) == LSHIFTRT | |
5360 | || GET_CODE (op) == ASHIFTRT) | |
5361 | && SCALAR_INT_MODE_P (outermode) | |
5362 | && GET_MODE_BITSIZE (outermode) >= BITS_PER_WORD | |
5363 | && GET_MODE_BITSIZE (innermode) >= (2 * GET_MODE_BITSIZE (outermode)) | |
481683e1 | 5364 | && CONST_INT_P (XEXP (op, 1)) |
97efb03a | 5365 | && (INTVAL (XEXP (op, 1)) & (GET_MODE_BITSIZE (outermode) - 1)) == 0 |
2b3b22d3 | 5366 | && INTVAL (XEXP (op, 1)) >= 0 |
3115c00d | 5367 | && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (innermode) |
97efb03a PB |
5368 | && byte == subreg_lowpart_offset (outermode, innermode)) |
5369 | { | |
5370 | int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT; | |
5371 | return simplify_gen_subreg (outermode, XEXP (op, 0), innermode, | |
5372 | (WORDS_BIG_ENDIAN | |
2b3b22d3 UB |
5373 | ? byte - shifted_bytes |
5374 | : byte + shifted_bytes)); | |
97efb03a PB |
5375 | } |
5376 | ||
eea50aa0 JH |
5377 | return NULL_RTX; |
5378 | } | |
550d1387 | 5379 | |
949c5d62 JH |
5380 | /* Make a SUBREG operation or equivalent if it folds. */ |
5381 | ||
5382 | rtx | |
46c5ad27 AJ |
5383 | simplify_gen_subreg (enum machine_mode outermode, rtx op, |
5384 | enum machine_mode innermode, unsigned int byte) | |
949c5d62 | 5385 | { |
53ed1a12 | 5386 | rtx newx; |
949c5d62 | 5387 | |
53ed1a12 BI |
5388 | newx = simplify_subreg (outermode, op, innermode, byte); |
5389 | if (newx) | |
5390 | return newx; | |
949c5d62 | 5391 | |
4f1da2e9 RS |
5392 | if (GET_CODE (op) == SUBREG |
5393 | || GET_CODE (op) == CONCAT | |
5394 | || GET_MODE (op) == VOIDmode) | |
949c5d62 JH |
5395 | return NULL_RTX; |
5396 | ||
beb72684 RH |
5397 | if (validate_subreg (outermode, innermode, op, byte)) |
5398 | return gen_rtx_SUBREG (outermode, op, byte); | |
5399 | ||
5400 | return NULL_RTX; | |
949c5d62 | 5401 | } |
beb72684 | 5402 | |
0cedb36c JL |
5403 | /* Simplify X, an rtx expression. |
5404 | ||
5405 | Return the simplified expression or NULL if no simplifications | |
5406 | were possible. | |
5407 | ||
5408 | This is the preferred entry point into the simplification routines; | |
5409 | however, we still allow passes to call the more specific routines. | |
5410 | ||
14b493d6 | 5411 | Right now GCC has three (yes, three) major bodies of RTL simplification |
0cedb36c JL |
5412 | code that need to be unified. |
5413 | ||
5414 | 1. fold_rtx in cse.c. This code uses various CSE specific | |
5415 | information to aid in RTL simplification. | |
5416 | ||
5417 | 2. simplify_rtx in combine.c. Similar to fold_rtx, except that | |
5418 | it uses combine specific information to aid in RTL | |
5419 | simplification. | |
5420 | ||
5421 | 3. The routines in this file. | |
5422 | ||
5423 | ||
5424 | Long term we want to only have one body of simplification code; to | |
5425 | get to that state I recommend the following steps: | |
5426 | ||
5427 | 1. Pour over fold_rtx & simplify_rtx and move any simplifications | |
5428 | which are not pass dependent state into these routines. | |
5429 | ||
5430 | 2. As code is moved by #1, change fold_rtx & simplify_rtx to | |
5431 | use this routine whenever possible. | |
5432 | ||
5433 | 3. Allow for pass dependent state to be provided to these | |
5434 | routines and add simplifications based on the pass dependent | |
5435 | state. Remove code from cse.c & combine.c that becomes | |
5436 | redundant/dead. | |
5437 | ||
5438 | It will take time, but ultimately the compiler will be easier to | |
5439 | maintain and improve. It's totally silly that when we add a | |
5440 | simplification that it needs to be added to 4 places (3 for RTL | |
5441 | simplification and 1 for tree simplification. */ | |
786de7eb | 5442 | |
0cedb36c | 5443 | rtx |
58f9752a | 5444 | simplify_rtx (const_rtx x) |
0cedb36c | 5445 | { |
58f9752a KG |
5446 | const enum rtx_code code = GET_CODE (x); |
5447 | const enum machine_mode mode = GET_MODE (x); | |
0cedb36c JL |
5448 | |
5449 | switch (GET_RTX_CLASS (code)) | |
5450 | { | |
ec8e098d | 5451 | case RTX_UNARY: |
0cedb36c JL |
5452 | return simplify_unary_operation (code, mode, |
5453 | XEXP (x, 0), GET_MODE (XEXP (x, 0))); | |
ec8e098d | 5454 | case RTX_COMM_ARITH: |
df0afdbe | 5455 | if (swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))) |
cf6bcbd0 | 5456 | return simplify_gen_binary (code, mode, XEXP (x, 1), XEXP (x, 0)); |
b42abad8 | 5457 | |
2b72593e | 5458 | /* Fall through.... */ |
b42abad8 | 5459 | |
ec8e098d | 5460 | case RTX_BIN_ARITH: |
0cedb36c JL |
5461 | return simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1)); |
5462 | ||
ec8e098d PB |
5463 | case RTX_TERNARY: |
5464 | case RTX_BITFIELD_OPS: | |
0cedb36c | 5465 | return simplify_ternary_operation (code, mode, GET_MODE (XEXP (x, 0)), |
d9c695ff RK |
5466 | XEXP (x, 0), XEXP (x, 1), |
5467 | XEXP (x, 2)); | |
0cedb36c | 5468 | |
ec8e098d PB |
5469 | case RTX_COMPARE: |
5470 | case RTX_COMM_COMPARE: | |
c6fb08ad PB |
5471 | return simplify_relational_operation (code, mode, |
5472 | ((GET_MODE (XEXP (x, 0)) | |
5473 | != VOIDmode) | |
5474 | ? GET_MODE (XEXP (x, 0)) | |
5475 | : GET_MODE (XEXP (x, 1))), | |
5476 | XEXP (x, 0), | |
5477 | XEXP (x, 1)); | |
d41ba56f | 5478 | |
ec8e098d | 5479 | case RTX_EXTRA: |
949c5d62 | 5480 | if (code == SUBREG) |
e2561558 RS |
5481 | return simplify_subreg (mode, SUBREG_REG (x), |
5482 | GET_MODE (SUBREG_REG (x)), | |
5483 | SUBREG_BYTE (x)); | |
d41ba56f RS |
5484 | break; |
5485 | ||
ec8e098d | 5486 | case RTX_OBJ: |
d41ba56f RS |
5487 | if (code == LO_SUM) |
5488 | { | |
5489 | /* Convert (lo_sum (high FOO) FOO) to FOO. */ | |
5490 | if (GET_CODE (XEXP (x, 0)) == HIGH | |
5491 | && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))) | |
5492 | return XEXP (x, 1); | |
5493 | } | |
5494 | break; | |
5495 | ||
0cedb36c | 5496 | default: |
d41ba56f | 5497 | break; |
0cedb36c | 5498 | } |
d41ba56f | 5499 | return NULL; |
0cedb36c | 5500 | } |