]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* Conditional constant propagation pass for the GNU compiler. |
66647d44 | 2 | Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 |
1e052c19 | 3 | Free Software Foundation, Inc. |
6de9cd9a DN |
4 | Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org> |
5 | Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com> | |
6 | ||
7 | This file is part of GCC. | |
8 | ||
9 | GCC is free software; you can redistribute it and/or modify it | |
10 | under the terms of the GNU General Public License as published by the | |
9dcd6f09 | 11 | Free Software Foundation; either version 3, or (at your option) any |
6de9cd9a DN |
12 | later version. |
13 | ||
14 | GCC is distributed in the hope that it will be useful, but WITHOUT | |
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
17 | for more details. | |
18 | ||
19 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a | 22 | |
0bca51f0 DN |
23 | /* Conditional constant propagation (CCP) is based on the SSA |
24 | propagation engine (tree-ssa-propagate.c). Constant assignments of | |
25 | the form VAR = CST are propagated from the assignments into uses of | |
26 | VAR, which in turn may generate new constants. The simulation uses | |
27 | a four level lattice to keep track of constant values associated | |
28 | with SSA names. Given an SSA name V_i, it may take one of the | |
29 | following values: | |
30 | ||
106dec71 ZD |
31 | UNINITIALIZED -> the initial state of the value. This value |
32 | is replaced with a correct initial value | |
33 | the first time the value is used, so the | |
34 | rest of the pass does not need to care about | |
35 | it. Using this value simplifies initialization | |
36 | of the pass, and prevents us from needlessly | |
37 | scanning statements that are never reached. | |
0bca51f0 DN |
38 | |
39 | UNDEFINED -> V_i is a local variable whose definition | |
40 | has not been processed yet. Therefore we | |
41 | don't yet know if its value is a constant | |
42 | or not. | |
43 | ||
44 | CONSTANT -> V_i has been found to hold a constant | |
45 | value C. | |
46 | ||
47 | VARYING -> V_i cannot take a constant value, or if it | |
48 | does, it is not possible to determine it | |
49 | at compile time. | |
50 | ||
51 | The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node: | |
52 | ||
53 | 1- In ccp_visit_stmt, we are interested in assignments whose RHS | |
54 | evaluates into a constant and conditional jumps whose predicate | |
55 | evaluates into a boolean true or false. When an assignment of | |
56 | the form V_i = CONST is found, V_i's lattice value is set to | |
57 | CONSTANT and CONST is associated with it. This causes the | |
58 | propagation engine to add all the SSA edges coming out the | |
59 | assignment into the worklists, so that statements that use V_i | |
60 | can be visited. | |
61 | ||
62 | If the statement is a conditional with a constant predicate, we | |
63 | mark the outgoing edges as executable or not executable | |
64 | depending on the predicate's value. This is then used when | |
65 | visiting PHI nodes to know when a PHI argument can be ignored. | |
66 | ||
67 | ||
68 | 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the | |
69 | same constant C, then the LHS of the PHI is set to C. This | |
70 | evaluation is known as the "meet operation". Since one of the | |
71 | goals of this evaluation is to optimistically return constant | |
72 | values as often as possible, it uses two main short cuts: | |
73 | ||
74 | - If an argument is flowing in through a non-executable edge, it | |
75 | is ignored. This is useful in cases like this: | |
76 | ||
77 | if (PRED) | |
78 | a_9 = 3; | |
79 | else | |
80 | a_10 = 100; | |
81 | a_11 = PHI (a_9, a_10) | |
82 | ||
83 | If PRED is known to always evaluate to false, then we can | |
84 | assume that a_11 will always take its value from a_10, meaning | |
85 | that instead of consider it VARYING (a_9 and a_10 have | |
86 | different values), we can consider it CONSTANT 100. | |
87 | ||
88 | - If an argument has an UNDEFINED value, then it does not affect | |
89 | the outcome of the meet operation. If a variable V_i has an | |
90 | UNDEFINED value, it means that either its defining statement | |
91 | hasn't been visited yet or V_i has no defining statement, in | |
92 | which case the original symbol 'V' is being used | |
93 | uninitialized. Since 'V' is a local variable, the compiler | |
94 | may assume any initial value for it. | |
95 | ||
96 | ||
97 | After propagation, every variable V_i that ends up with a lattice | |
98 | value of CONSTANT will have the associated constant value in the | |
99 | array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for | |
100 | final substitution and folding. | |
101 | ||
102 | ||
103 | Constant propagation in stores and loads (STORE-CCP) | |
104 | ---------------------------------------------------- | |
105 | ||
106 | While CCP has all the logic to propagate constants in GIMPLE | |
107 | registers, it is missing the ability to associate constants with | |
108 | stores and loads (i.e., pointer dereferences, structures and | |
109 | global/aliased variables). We don't keep loads and stores in | |
110 | SSA, but we do build a factored use-def web for them (in the | |
111 | virtual operands). | |
112 | ||
113 | For instance, consider the following code fragment: | |
114 | ||
115 | struct A a; | |
116 | const int B = 42; | |
117 | ||
118 | void foo (int i) | |
119 | { | |
120 | if (i > 10) | |
121 | a.a = 42; | |
122 | else | |
123 | { | |
124 | a.b = 21; | |
125 | a.a = a.b + 21; | |
126 | } | |
127 | ||
128 | if (a.a != B) | |
129 | never_executed (); | |
130 | } | |
131 | ||
132 | We should be able to deduce that the predicate 'a.a != B' is always | |
133 | false. To achieve this, we associate constant values to the SSA | |
38635499 DN |
134 | names in the VDEF operands for each store. Additionally, |
135 | since we also glob partial loads/stores with the base symbol, we | |
136 | also keep track of the memory reference where the constant value | |
137 | was stored (in the MEM_REF field of PROP_VALUE_T). For instance, | |
0bca51f0 | 138 | |
38635499 | 139 | # a_5 = VDEF <a_4> |
0bca51f0 DN |
140 | a.a = 2; |
141 | ||
142 | # VUSE <a_5> | |
143 | x_3 = a.b; | |
144 | ||
145 | In the example above, CCP will associate value '2' with 'a_5', but | |
146 | it would be wrong to replace the load from 'a.b' with '2', because | |
147 | '2' had been stored into a.a. | |
148 | ||
106dec71 ZD |
149 | Note that the initial value of virtual operands is VARYING, not |
150 | UNDEFINED. Consider, for instance global variables: | |
0bca51f0 DN |
151 | |
152 | int A; | |
153 | ||
154 | foo (int i) | |
155 | { | |
156 | if (i_3 > 10) | |
157 | A_4 = 3; | |
158 | # A_5 = PHI (A_4, A_2); | |
159 | ||
160 | # VUSE <A_5> | |
161 | A.0_6 = A; | |
162 | ||
163 | return A.0_6; | |
164 | } | |
165 | ||
166 | The value of A_2 cannot be assumed to be UNDEFINED, as it may have | |
167 | been defined outside of foo. If we were to assume it UNDEFINED, we | |
106dec71 | 168 | would erroneously optimize the above into 'return 3;'. |
0bca51f0 DN |
169 | |
170 | Though STORE-CCP is not too expensive, it does have to do more work | |
171 | than regular CCP, so it is only enabled at -O2. Both regular CCP | |
172 | and STORE-CCP use the exact same algorithm. The only distinction | |
173 | is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is | |
174 | set to true. This affects the evaluation of statements and PHI | |
175 | nodes. | |
6de9cd9a DN |
176 | |
177 | References: | |
178 | ||
179 | Constant propagation with conditional branches, | |
180 | Wegman and Zadeck, ACM TOPLAS 13(2):181-210. | |
181 | ||
182 | Building an Optimizing Compiler, | |
183 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
184 | ||
185 | Advanced Compiler Design and Implementation, | |
186 | Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */ | |
187 | ||
188 | #include "config.h" | |
189 | #include "system.h" | |
190 | #include "coretypes.h" | |
191 | #include "tm.h" | |
6de9cd9a | 192 | #include "tree.h" |
750628d8 | 193 | #include "flags.h" |
6de9cd9a DN |
194 | #include "rtl.h" |
195 | #include "tm_p.h" | |
750628d8 | 196 | #include "ggc.h" |
6de9cd9a | 197 | #include "basic-block.h" |
750628d8 | 198 | #include "output.h" |
750628d8 DN |
199 | #include "expr.h" |
200 | #include "function.h" | |
6de9cd9a | 201 | #include "diagnostic.h" |
750628d8 | 202 | #include "timevar.h" |
6de9cd9a | 203 | #include "tree-dump.h" |
750628d8 | 204 | #include "tree-flow.h" |
6de9cd9a | 205 | #include "tree-pass.h" |
750628d8 | 206 | #include "tree-ssa-propagate.h" |
53a8f709 | 207 | #include "value-prof.h" |
750628d8 | 208 | #include "langhooks.h" |
ae3df618 | 209 | #include "target.h" |
6ac01510 | 210 | #include "toplev.h" |
6de9cd9a DN |
211 | |
212 | ||
213 | /* Possible lattice values. */ | |
214 | typedef enum | |
215 | { | |
106dec71 | 216 | UNINITIALIZED, |
6de9cd9a DN |
217 | UNDEFINED, |
218 | CONSTANT, | |
219 | VARYING | |
0bca51f0 | 220 | } ccp_lattice_t; |
6de9cd9a | 221 | |
0bca51f0 DN |
222 | /* Array of propagated constant values. After propagation, |
223 | CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If | |
224 | the constant is held in an SSA name representing a memory store | |
38635499 DN |
225 | (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual |
226 | memory reference used to store (i.e., the LHS of the assignment | |
227 | doing the store). */ | |
404f4351 | 228 | static prop_value_t *const_val; |
6de9cd9a | 229 | |
0bca51f0 | 230 | /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */ |
95eec0d6 DB |
231 | |
232 | static void | |
0bca51f0 | 233 | dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val) |
95eec0d6 | 234 | { |
750628d8 | 235 | switch (val.lattice_val) |
95eec0d6 | 236 | { |
0bca51f0 DN |
237 | case UNINITIALIZED: |
238 | fprintf (outf, "%sUNINITIALIZED", prefix); | |
239 | break; | |
750628d8 DN |
240 | case UNDEFINED: |
241 | fprintf (outf, "%sUNDEFINED", prefix); | |
242 | break; | |
243 | case VARYING: | |
244 | fprintf (outf, "%sVARYING", prefix); | |
245 | break; | |
750628d8 DN |
246 | case CONSTANT: |
247 | fprintf (outf, "%sCONSTANT ", prefix); | |
0bca51f0 | 248 | print_generic_expr (outf, val.value, dump_flags); |
750628d8 DN |
249 | break; |
250 | default: | |
1e128c5f | 251 | gcc_unreachable (); |
750628d8 | 252 | } |
95eec0d6 | 253 | } |
6de9cd9a | 254 | |
6de9cd9a | 255 | |
0bca51f0 DN |
256 | /* Print lattice value VAL to stderr. */ |
257 | ||
258 | void debug_lattice_value (prop_value_t val); | |
259 | ||
260 | void | |
261 | debug_lattice_value (prop_value_t val) | |
262 | { | |
263 | dump_lattice_value (stderr, "", val); | |
264 | fprintf (stderr, "\n"); | |
265 | } | |
6de9cd9a | 266 | |
6de9cd9a | 267 | |
726a989a | 268 | |
fc9962ee ZD |
269 | /* If SYM is a constant variable with known value, return the value. |
270 | NULL_TREE is returned otherwise. */ | |
271 | ||
ed97ddc6 | 272 | tree |
fc9962ee ZD |
273 | get_symbol_constant_value (tree sym) |
274 | { | |
275 | if (TREE_STATIC (sym) | |
276 | && TREE_READONLY (sym) | |
ed97ddc6 | 277 | && !MTAG_P (sym)) |
fc9962ee ZD |
278 | { |
279 | tree val = DECL_INITIAL (sym); | |
62bc00e2 RG |
280 | if (val) |
281 | { | |
282 | STRIP_USELESS_TYPE_CONVERSION (val); | |
283 | if (is_gimple_min_invariant (val)) | |
284 | return val; | |
285 | } | |
b0940154 | 286 | /* Variables declared 'const' without an initializer |
fa10beec | 287 | have zero as the initializer if they may not be |
ed97ddc6 | 288 | overridden at link or run time. */ |
b0940154 | 289 | if (!val |
98b80692 | 290 | && !DECL_EXTERNAL (sym) |
ed97ddc6 | 291 | && targetm.binds_local_p (sym) |
b0940154 AP |
292 | && (INTEGRAL_TYPE_P (TREE_TYPE (sym)) |
293 | || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym)))) | |
98b80692 | 294 | return fold_convert (TREE_TYPE (sym), integer_zero_node); |
fc9962ee ZD |
295 | } |
296 | ||
297 | return NULL_TREE; | |
298 | } | |
688e936d | 299 | |
0bca51f0 DN |
300 | /* Compute a default value for variable VAR and store it in the |
301 | CONST_VAL array. The following rules are used to get default | |
302 | values: | |
95eec0d6 | 303 | |
0bca51f0 DN |
304 | 1- Global and static variables that are declared constant are |
305 | considered CONSTANT. | |
306 | ||
307 | 2- Any other value is considered UNDEFINED. This is useful when | |
750628d8 DN |
308 | considering PHI nodes. PHI arguments that are undefined do not |
309 | change the constant value of the PHI node, which allows for more | |
0bca51f0 | 310 | constants to be propagated. |
6de9cd9a | 311 | |
caf55296 | 312 | 3- Variables defined by statements other than assignments and PHI |
0bca51f0 | 313 | nodes are considered VARYING. |
6de9cd9a | 314 | |
caf55296 | 315 | 4- Initial values of variables that are not GIMPLE registers are |
106dec71 | 316 | considered VARYING. */ |
6de9cd9a | 317 | |
0bca51f0 DN |
318 | static prop_value_t |
319 | get_default_value (tree var) | |
320 | { | |
321 | tree sym = SSA_NAME_VAR (var); | |
dce2b2f6 | 322 | prop_value_t val = { UNINITIALIZED, NULL_TREE }; |
fc9962ee | 323 | tree cst_val; |
106dec71 | 324 | |
dce2b2f6 | 325 | if (!is_gimple_reg (var)) |
6de9cd9a | 326 | { |
0bca51f0 DN |
327 | /* Short circuit for regular CCP. We are not interested in any |
328 | non-register when DO_STORE_CCP is false. */ | |
750628d8 | 329 | val.lattice_val = VARYING; |
6de9cd9a | 330 | } |
fc9962ee | 331 | else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE) |
750628d8 | 332 | { |
0bca51f0 DN |
333 | /* Globals and static variables declared 'const' take their |
334 | initial value. */ | |
335 | val.lattice_val = CONSTANT; | |
fc9962ee | 336 | val.value = cst_val; |
750628d8 DN |
337 | } |
338 | else | |
339 | { | |
726a989a | 340 | gimple stmt = SSA_NAME_DEF_STMT (var); |
6de9cd9a | 341 | |
726a989a | 342 | if (gimple_nop_p (stmt)) |
0bca51f0 DN |
343 | { |
344 | /* Variables defined by an empty statement are those used | |
345 | before being initialized. If VAR is a local variable, we | |
106dec71 ZD |
346 | can assume initially that it is UNDEFINED, otherwise we must |
347 | consider it VARYING. */ | |
0bca51f0 DN |
348 | if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL) |
349 | val.lattice_val = UNDEFINED; | |
0bca51f0 | 350 | else |
750628d8 DN |
351 | val.lattice_val = VARYING; |
352 | } | |
726a989a RB |
353 | else if (is_gimple_assign (stmt) |
354 | /* Value-returning GIMPLE_CALL statements assign to | |
355 | a variable, and are treated similarly to GIMPLE_ASSIGN. */ | |
356 | || (is_gimple_call (stmt) | |
357 | && gimple_call_lhs (stmt) != NULL_TREE) | |
358 | || gimple_code (stmt) == GIMPLE_PHI) | |
359 | { | |
0bca51f0 | 360 | /* Any other variable defined by an assignment or a PHI node |
106dec71 ZD |
361 | is considered UNDEFINED. */ |
362 | val.lattice_val = UNDEFINED; | |
0bca51f0 DN |
363 | } |
364 | else | |
365 | { | |
366 | /* Otherwise, VAR will never take on a constant value. */ | |
367 | val.lattice_val = VARYING; | |
368 | } | |
750628d8 | 369 | } |
6de9cd9a | 370 | |
750628d8 DN |
371 | return val; |
372 | } | |
6de9cd9a | 373 | |
6de9cd9a | 374 | |
106dec71 | 375 | /* Get the constant value associated with variable VAR. */ |
6de9cd9a | 376 | |
106dec71 ZD |
377 | static inline prop_value_t * |
378 | get_value (tree var) | |
0bca51f0 | 379 | { |
ed97ddc6 | 380 | prop_value_t *val; |
106dec71 | 381 | |
ed97ddc6 RG |
382 | if (const_val == NULL) |
383 | return NULL; | |
384 | ||
385 | val = &const_val[SSA_NAME_VERSION (var)]; | |
106dec71 | 386 | if (val->lattice_val == UNINITIALIZED) |
6de9cd9a DN |
387 | *val = get_default_value (var); |
388 | ||
389 | return val; | |
390 | } | |
391 | ||
106dec71 ZD |
392 | /* Sets the value associated with VAR to VARYING. */ |
393 | ||
394 | static inline void | |
395 | set_value_varying (tree var) | |
396 | { | |
397 | prop_value_t *val = &const_val[SSA_NAME_VERSION (var)]; | |
398 | ||
399 | val->lattice_val = VARYING; | |
400 | val->value = NULL_TREE; | |
106dec71 | 401 | } |
6de9cd9a | 402 | |
fbb5445b L |
403 | /* For float types, modify the value of VAL to make ccp work correctly |
404 | for non-standard values (-0, NaN): | |
405 | ||
406 | If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0. | |
407 | If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED. | |
408 | This is to fix the following problem (see PR 29921): Suppose we have | |
409 | ||
410 | x = 0.0 * y | |
411 | ||
412 | and we set value of y to NaN. This causes value of x to be set to NaN. | |
413 | When we later determine that y is in fact VARYING, fold uses the fact | |
414 | that HONOR_NANS is false, and we try to change the value of x to 0, | |
415 | causing an ICE. With HONOR_NANS being false, the real appearance of | |
416 | NaN would cause undefined behavior, though, so claiming that y (and x) | |
417 | are UNDEFINED initially is correct. */ | |
418 | ||
419 | static void | |
420 | canonicalize_float_value (prop_value_t *val) | |
421 | { | |
422 | enum machine_mode mode; | |
423 | tree type; | |
424 | REAL_VALUE_TYPE d; | |
425 | ||
426 | if (val->lattice_val != CONSTANT | |
427 | || TREE_CODE (val->value) != REAL_CST) | |
428 | return; | |
429 | ||
430 | d = TREE_REAL_CST (val->value); | |
431 | type = TREE_TYPE (val->value); | |
432 | mode = TYPE_MODE (type); | |
433 | ||
434 | if (!HONOR_SIGNED_ZEROS (mode) | |
435 | && REAL_VALUE_MINUS_ZERO (d)) | |
436 | { | |
437 | val->value = build_real (type, dconst0); | |
438 | return; | |
439 | } | |
440 | ||
441 | if (!HONOR_NANS (mode) | |
442 | && REAL_VALUE_ISNAN (d)) | |
443 | { | |
444 | val->lattice_val = UNDEFINED; | |
445 | val->value = NULL; | |
fbb5445b L |
446 | return; |
447 | } | |
448 | } | |
449 | ||
0bca51f0 DN |
450 | /* Set the value for variable VAR to NEW_VAL. Return true if the new |
451 | value is different from VAR's previous value. */ | |
6de9cd9a | 452 | |
750628d8 | 453 | static bool |
0bca51f0 | 454 | set_lattice_value (tree var, prop_value_t new_val) |
6de9cd9a | 455 | { |
106dec71 | 456 | prop_value_t *old_val = get_value (var); |
0bca51f0 | 457 | |
fbb5445b L |
458 | canonicalize_float_value (&new_val); |
459 | ||
0bca51f0 | 460 | /* Lattice transitions must always be monotonically increasing in |
106dec71 ZD |
461 | value. If *OLD_VAL and NEW_VAL are the same, return false to |
462 | inform the caller that this was a non-transition. */ | |
463 | ||
fc9962ee | 464 | gcc_assert (old_val->lattice_val < new_val.lattice_val |
0bca51f0 | 465 | || (old_val->lattice_val == new_val.lattice_val |
fc9962ee | 466 | && ((!old_val->value && !new_val.value) |
dce2b2f6 | 467 | || operand_equal_p (old_val->value, new_val.value, 0)))); |
0bca51f0 DN |
468 | |
469 | if (old_val->lattice_val != new_val.lattice_val) | |
6de9cd9a | 470 | { |
750628d8 DN |
471 | if (dump_file && (dump_flags & TDF_DETAILS)) |
472 | { | |
0bca51f0 | 473 | dump_lattice_value (dump_file, "Lattice value changed to ", new_val); |
106dec71 | 474 | fprintf (dump_file, ". Adding SSA edges to worklist.\n"); |
750628d8 DN |
475 | } |
476 | ||
0bca51f0 DN |
477 | *old_val = new_val; |
478 | ||
106dec71 ZD |
479 | gcc_assert (new_val.lattice_val != UNDEFINED); |
480 | return true; | |
6de9cd9a | 481 | } |
750628d8 DN |
482 | |
483 | return false; | |
6de9cd9a DN |
484 | } |
485 | ||
486 | ||
0bca51f0 | 487 | /* Return the likely CCP lattice value for STMT. |
6de9cd9a | 488 | |
750628d8 | 489 | If STMT has no operands, then return CONSTANT. |
6de9cd9a | 490 | |
7f879c96 RG |
491 | Else if undefinedness of operands of STMT cause its value to be |
492 | undefined, then return UNDEFINED. | |
6de9cd9a | 493 | |
750628d8 | 494 | Else if any operands of STMT are constants, then return CONSTANT. |
6de9cd9a | 495 | |
750628d8 | 496 | Else return VARYING. */ |
6de9cd9a | 497 | |
0bca51f0 | 498 | static ccp_lattice_t |
726a989a | 499 | likely_value (gimple stmt) |
750628d8 | 500 | { |
7f879c96 | 501 | bool has_constant_operand, has_undefined_operand, all_undefined_operands; |
750628d8 DN |
502 | tree use; |
503 | ssa_op_iter iter; | |
6de9cd9a | 504 | |
e0c68ce9 | 505 | enum gimple_code code = gimple_code (stmt); |
726a989a RB |
506 | |
507 | /* This function appears to be called only for assignments, calls, | |
508 | conditionals, and switches, due to the logic in visit_stmt. */ | |
509 | gcc_assert (code == GIMPLE_ASSIGN | |
510 | || code == GIMPLE_CALL | |
511 | || code == GIMPLE_COND | |
512 | || code == GIMPLE_SWITCH); | |
0bca51f0 DN |
513 | |
514 | /* If the statement has volatile operands, it won't fold to a | |
515 | constant value. */ | |
726a989a | 516 | if (gimple_has_volatile_ops (stmt)) |
0bca51f0 DN |
517 | return VARYING; |
518 | ||
519 | /* If we are not doing store-ccp, statements with loads | |
520 | and/or stores will never fold into a constant. */ | |
dce2b2f6 | 521 | if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)) |
750628d8 | 522 | return VARYING; |
6de9cd9a | 523 | |
726a989a RB |
524 | /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy |
525 | is_gimple_min_invariant, so we do not consider calls or | |
526 | other forms of assignment. */ | |
174ef36d | 527 | if (gimple_assign_single_p (stmt) |
726a989a RB |
528 | && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))) |
529 | return CONSTANT; | |
0bca51f0 | 530 | |
726a989a RB |
531 | if (code == GIMPLE_COND |
532 | && is_gimple_min_invariant (gimple_cond_lhs (stmt)) | |
533 | && is_gimple_min_invariant (gimple_cond_rhs (stmt))) | |
a318e3ac SB |
534 | return CONSTANT; |
535 | ||
726a989a RB |
536 | if (code == GIMPLE_SWITCH |
537 | && is_gimple_min_invariant (gimple_switch_index (stmt))) | |
538 | return CONSTANT; | |
539 | ||
540 | /* Arrive here for more complex cases. */ | |
541 | ||
106dec71 | 542 | has_constant_operand = false; |
7f879c96 RG |
543 | has_undefined_operand = false; |
544 | all_undefined_operands = true; | |
106dec71 | 545 | FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE) |
750628d8 | 546 | { |
106dec71 | 547 | prop_value_t *val = get_value (use); |
750628d8 | 548 | |
106dec71 | 549 | if (val->lattice_val == UNDEFINED) |
7f879c96 RG |
550 | has_undefined_operand = true; |
551 | else | |
552 | all_undefined_operands = false; | |
0bca51f0 | 553 | |
750628d8 | 554 | if (val->lattice_val == CONSTANT) |
106dec71 | 555 | has_constant_operand = true; |
6de9cd9a | 556 | } |
750628d8 | 557 | |
7f879c96 RG |
558 | /* If the operation combines operands like COMPLEX_EXPR make sure to |
559 | not mark the result UNDEFINED if only one part of the result is | |
560 | undefined. */ | |
726a989a | 561 | if (has_undefined_operand && all_undefined_operands) |
7f879c96 | 562 | return UNDEFINED; |
726a989a | 563 | else if (code == GIMPLE_ASSIGN && has_undefined_operand) |
7f879c96 | 564 | { |
726a989a | 565 | switch (gimple_assign_rhs_code (stmt)) |
7f879c96 RG |
566 | { |
567 | /* Unary operators are handled with all_undefined_operands. */ | |
568 | case PLUS_EXPR: | |
569 | case MINUS_EXPR: | |
7f879c96 | 570 | case POINTER_PLUS_EXPR: |
7f879c96 RG |
571 | /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected. |
572 | Not bitwise operators, one VARYING operand may specify the | |
573 | result completely. Not logical operators for the same reason. | |
0cedb9e9 RG |
574 | Not COMPLEX_EXPR as one VARYING operand makes the result partly |
575 | not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because | |
576 | the undefined operand may be promoted. */ | |
7f879c96 RG |
577 | return UNDEFINED; |
578 | ||
579 | default: | |
580 | ; | |
581 | } | |
582 | } | |
583 | /* If there was an UNDEFINED operand but the result may be not UNDEFINED | |
584 | fall back to VARYING even if there were CONSTANT operands. */ | |
585 | if (has_undefined_operand) | |
586 | return VARYING; | |
587 | ||
106dec71 ZD |
588 | if (has_constant_operand |
589 | /* We do not consider virtual operands here -- load from read-only | |
590 | memory may have only VARYING virtual operands, but still be | |
591 | constant. */ | |
592 | || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE)) | |
0bca51f0 DN |
593 | return CONSTANT; |
594 | ||
106dec71 | 595 | return VARYING; |
6de9cd9a DN |
596 | } |
597 | ||
106dec71 ZD |
598 | /* Returns true if STMT cannot be constant. */ |
599 | ||
600 | static bool | |
726a989a | 601 | surely_varying_stmt_p (gimple stmt) |
106dec71 ZD |
602 | { |
603 | /* If the statement has operands that we cannot handle, it cannot be | |
604 | constant. */ | |
726a989a | 605 | if (gimple_has_volatile_ops (stmt)) |
106dec71 ZD |
606 | return true; |
607 | ||
608 | if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)) | |
dce2b2f6 | 609 | return true; |
106dec71 | 610 | |
174ef36d RG |
611 | /* If it is a call and does not return a value or is not a |
612 | builtin and not an indirect call, it is varying. */ | |
726a989a | 613 | if (is_gimple_call (stmt)) |
174ef36d RG |
614 | { |
615 | tree fndecl; | |
616 | if (!gimple_call_lhs (stmt) | |
617 | || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE | |
99f536cc | 618 | && !DECL_BUILT_IN (fndecl))) |
174ef36d RG |
619 | return true; |
620 | } | |
106dec71 ZD |
621 | |
622 | /* Anything other than assignments and conditional jumps are not | |
623 | interesting for CCP. */ | |
726a989a | 624 | if (gimple_code (stmt) != GIMPLE_ASSIGN |
174ef36d RG |
625 | && gimple_code (stmt) != GIMPLE_COND |
626 | && gimple_code (stmt) != GIMPLE_SWITCH | |
627 | && gimple_code (stmt) != GIMPLE_CALL) | |
106dec71 ZD |
628 | return true; |
629 | ||
630 | return false; | |
631 | } | |
6de9cd9a | 632 | |
750628d8 | 633 | /* Initialize local data structures for CCP. */ |
6de9cd9a DN |
634 | |
635 | static void | |
750628d8 | 636 | ccp_initialize (void) |
6de9cd9a | 637 | { |
750628d8 | 638 | basic_block bb; |
6de9cd9a | 639 | |
b9eae1a9 | 640 | const_val = XCNEWVEC (prop_value_t, num_ssa_names); |
6de9cd9a | 641 | |
750628d8 DN |
642 | /* Initialize simulation flags for PHI nodes and statements. */ |
643 | FOR_EACH_BB (bb) | |
6de9cd9a | 644 | { |
726a989a | 645 | gimple_stmt_iterator i; |
6de9cd9a | 646 | |
726a989a | 647 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) |
750628d8 | 648 | { |
726a989a | 649 | gimple stmt = gsi_stmt (i); |
106dec71 | 650 | bool is_varying = surely_varying_stmt_p (stmt); |
6de9cd9a | 651 | |
106dec71 | 652 | if (is_varying) |
750628d8 | 653 | { |
0bca51f0 DN |
654 | tree def; |
655 | ssa_op_iter iter; | |
656 | ||
657 | /* If the statement will not produce a constant, mark | |
658 | all its outputs VARYING. */ | |
659 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) | |
106dec71 ZD |
660 | { |
661 | if (is_varying) | |
662 | set_value_varying (def); | |
663 | } | |
750628d8 | 664 | } |
726a989a | 665 | prop_set_simulate_again (stmt, !is_varying); |
750628d8 | 666 | } |
6de9cd9a DN |
667 | } |
668 | ||
726a989a RB |
669 | /* Now process PHI nodes. We never clear the simulate_again flag on |
670 | phi nodes, since we do not know which edges are executable yet, | |
671 | except for phi nodes for virtual operands when we do not do store ccp. */ | |
750628d8 | 672 | FOR_EACH_BB (bb) |
6de9cd9a | 673 | { |
726a989a | 674 | gimple_stmt_iterator i; |
750628d8 | 675 | |
726a989a RB |
676 | for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i)) |
677 | { | |
678 | gimple phi = gsi_stmt (i); | |
679 | ||
dce2b2f6 | 680 | if (!is_gimple_reg (gimple_phi_result (phi))) |
726a989a | 681 | prop_set_simulate_again (phi, false); |
106dec71 | 682 | else |
726a989a | 683 | prop_set_simulate_again (phi, true); |
750628d8 | 684 | } |
6de9cd9a | 685 | } |
750628d8 | 686 | } |
6de9cd9a | 687 | |
6de9cd9a | 688 | |
0bca51f0 | 689 | /* Do final substitution of propagated values, cleanup the flowgraph and |
3253eafb | 690 | free allocated storage. |
6de9cd9a | 691 | |
3253eafb JH |
692 | Return TRUE when something was optimized. */ |
693 | ||
694 | static bool | |
0bca51f0 | 695 | ccp_finalize (void) |
6de9cd9a | 696 | { |
0bca51f0 | 697 | /* Perform substitutions based on the known constant values. */ |
3253eafb | 698 | bool something_changed = substitute_and_fold (const_val, false); |
6de9cd9a | 699 | |
0bca51f0 | 700 | free (const_val); |
ed97ddc6 | 701 | const_val = NULL; |
3253eafb | 702 | return something_changed;; |
6de9cd9a DN |
703 | } |
704 | ||
705 | ||
0bca51f0 DN |
706 | /* Compute the meet operator between *VAL1 and *VAL2. Store the result |
707 | in VAL1. | |
708 | ||
709 | any M UNDEFINED = any | |
0bca51f0 DN |
710 | any M VARYING = VARYING |
711 | Ci M Cj = Ci if (i == j) | |
712 | Ci M Cj = VARYING if (i != j) | |
106dec71 | 713 | */ |
6de9cd9a DN |
714 | |
715 | static void | |
0bca51f0 | 716 | ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2) |
6de9cd9a | 717 | { |
0bca51f0 | 718 | if (val1->lattice_val == UNDEFINED) |
6de9cd9a | 719 | { |
0bca51f0 DN |
720 | /* UNDEFINED M any = any */ |
721 | *val1 = *val2; | |
750628d8 | 722 | } |
0bca51f0 | 723 | else if (val2->lattice_val == UNDEFINED) |
195da47b | 724 | { |
0bca51f0 DN |
725 | /* any M UNDEFINED = any |
726 | Nothing to do. VAL1 already contains the value we want. */ | |
727 | ; | |
195da47b | 728 | } |
0bca51f0 DN |
729 | else if (val1->lattice_val == VARYING |
730 | || val2->lattice_val == VARYING) | |
750628d8 | 731 | { |
0bca51f0 DN |
732 | /* any M VARYING = VARYING. */ |
733 | val1->lattice_val = VARYING; | |
734 | val1->value = NULL_TREE; | |
750628d8 | 735 | } |
0bca51f0 DN |
736 | else if (val1->lattice_val == CONSTANT |
737 | && val2->lattice_val == CONSTANT | |
dce2b2f6 | 738 | && simple_cst_equal (val1->value, val2->value) == 1) |
750628d8 | 739 | { |
0bca51f0 DN |
740 | /* Ci M Cj = Ci if (i == j) |
741 | Ci M Cj = VARYING if (i != j) | |
742 | ||
743 | If these two values come from memory stores, make sure that | |
744 | they come from the same memory reference. */ | |
745 | val1->lattice_val = CONSTANT; | |
746 | val1->value = val1->value; | |
750628d8 DN |
747 | } |
748 | else | |
749 | { | |
0bca51f0 DN |
750 | /* Any other combination is VARYING. */ |
751 | val1->lattice_val = VARYING; | |
752 | val1->value = NULL_TREE; | |
750628d8 | 753 | } |
6de9cd9a DN |
754 | } |
755 | ||
756 | ||
750628d8 DN |
757 | /* Loop through the PHI_NODE's parameters for BLOCK and compare their |
758 | lattice values to determine PHI_NODE's lattice value. The value of a | |
0bca51f0 | 759 | PHI node is determined calling ccp_lattice_meet with all the arguments |
750628d8 | 760 | of the PHI node that are incoming via executable edges. */ |
6de9cd9a | 761 | |
750628d8 | 762 | static enum ssa_prop_result |
726a989a | 763 | ccp_visit_phi_node (gimple phi) |
6de9cd9a | 764 | { |
726a989a | 765 | unsigned i; |
0bca51f0 | 766 | prop_value_t *old_val, new_val; |
6de9cd9a | 767 | |
750628d8 | 768 | if (dump_file && (dump_flags & TDF_DETAILS)) |
6de9cd9a | 769 | { |
750628d8 | 770 | fprintf (dump_file, "\nVisiting PHI node: "); |
726a989a | 771 | print_gimple_stmt (dump_file, phi, 0, dump_flags); |
6de9cd9a | 772 | } |
6de9cd9a | 773 | |
726a989a | 774 | old_val = get_value (gimple_phi_result (phi)); |
750628d8 DN |
775 | switch (old_val->lattice_val) |
776 | { | |
777 | case VARYING: | |
0bca51f0 | 778 | return SSA_PROP_VARYING; |
6de9cd9a | 779 | |
750628d8 DN |
780 | case CONSTANT: |
781 | new_val = *old_val; | |
782 | break; | |
6de9cd9a | 783 | |
750628d8 | 784 | case UNDEFINED: |
750628d8 | 785 | new_val.lattice_val = UNDEFINED; |
0bca51f0 | 786 | new_val.value = NULL_TREE; |
750628d8 | 787 | break; |
6de9cd9a | 788 | |
750628d8 | 789 | default: |
1e128c5f | 790 | gcc_unreachable (); |
750628d8 | 791 | } |
6de9cd9a | 792 | |
726a989a | 793 | for (i = 0; i < gimple_phi_num_args (phi); i++) |
750628d8 | 794 | { |
0bca51f0 DN |
795 | /* Compute the meet operator over all the PHI arguments flowing |
796 | through executable edges. */ | |
726a989a | 797 | edge e = gimple_phi_arg_edge (phi, i); |
6de9cd9a | 798 | |
750628d8 DN |
799 | if (dump_file && (dump_flags & TDF_DETAILS)) |
800 | { | |
801 | fprintf (dump_file, | |
802 | "\n Argument #%d (%d -> %d %sexecutable)\n", | |
803 | i, e->src->index, e->dest->index, | |
804 | (e->flags & EDGE_EXECUTABLE) ? "" : "not "); | |
805 | } | |
806 | ||
807 | /* If the incoming edge is executable, Compute the meet operator for | |
808 | the existing value of the PHI node and the current PHI argument. */ | |
809 | if (e->flags & EDGE_EXECUTABLE) | |
810 | { | |
726a989a | 811 | tree arg = gimple_phi_arg (phi, i)->def; |
0bca51f0 | 812 | prop_value_t arg_val; |
6de9cd9a | 813 | |
0bca51f0 | 814 | if (is_gimple_min_invariant (arg)) |
750628d8 | 815 | { |
0bca51f0 DN |
816 | arg_val.lattice_val = CONSTANT; |
817 | arg_val.value = arg; | |
750628d8 DN |
818 | } |
819 | else | |
106dec71 | 820 | arg_val = *(get_value (arg)); |
6de9cd9a | 821 | |
0bca51f0 | 822 | ccp_lattice_meet (&new_val, &arg_val); |
6de9cd9a | 823 | |
750628d8 DN |
824 | if (dump_file && (dump_flags & TDF_DETAILS)) |
825 | { | |
826 | fprintf (dump_file, "\t"); | |
0bca51f0 DN |
827 | print_generic_expr (dump_file, arg, dump_flags); |
828 | dump_lattice_value (dump_file, "\tValue: ", arg_val); | |
750628d8 DN |
829 | fprintf (dump_file, "\n"); |
830 | } | |
6de9cd9a | 831 | |
750628d8 DN |
832 | if (new_val.lattice_val == VARYING) |
833 | break; | |
834 | } | |
835 | } | |
6de9cd9a DN |
836 | |
837 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
750628d8 DN |
838 | { |
839 | dump_lattice_value (dump_file, "\n PHI node value: ", new_val); | |
840 | fprintf (dump_file, "\n\n"); | |
841 | } | |
842 | ||
106dec71 | 843 | /* Make the transition to the new value. */ |
726a989a | 844 | if (set_lattice_value (gimple_phi_result (phi), new_val)) |
750628d8 DN |
845 | { |
846 | if (new_val.lattice_val == VARYING) | |
847 | return SSA_PROP_VARYING; | |
848 | else | |
849 | return SSA_PROP_INTERESTING; | |
850 | } | |
851 | else | |
852 | return SSA_PROP_NOT_INTERESTING; | |
6de9cd9a DN |
853 | } |
854 | ||
f7c0ffb4 RG |
855 | /* Return true if we may propagate the address expression ADDR into the |
856 | dereference DEREF and cancel them. */ | |
857 | ||
858 | bool | |
859 | may_propagate_address_into_dereference (tree addr, tree deref) | |
860 | { | |
861 | gcc_assert (INDIRECT_REF_P (deref) | |
862 | && TREE_CODE (addr) == ADDR_EXPR); | |
863 | ||
03375368 JJ |
864 | /* Don't propagate if ADDR's operand has incomplete type. */ |
865 | if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0)))) | |
866 | return false; | |
867 | ||
f7c0ffb4 RG |
868 | /* If the address is invariant then we do not need to preserve restrict |
869 | qualifications. But we do need to preserve volatile qualifiers until | |
870 | we can annotate the folded dereference itself properly. */ | |
871 | if (is_gimple_min_invariant (addr) | |
872 | && (!TREE_THIS_VOLATILE (deref) | |
873 | || TYPE_VOLATILE (TREE_TYPE (addr)))) | |
874 | return useless_type_conversion_p (TREE_TYPE (deref), | |
875 | TREE_TYPE (TREE_OPERAND (addr, 0))); | |
876 | ||
877 | /* Else both the address substitution and the folding must result in | |
878 | a valid useless type conversion sequence. */ | |
879 | return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)), | |
880 | TREE_TYPE (addr)) | |
881 | && useless_type_conversion_p (TREE_TYPE (deref), | |
882 | TREE_TYPE (TREE_OPERAND (addr, 0)))); | |
883 | } | |
6de9cd9a | 884 | |
750628d8 DN |
885 | /* CCP specific front-end to the non-destructive constant folding |
886 | routines. | |
6de9cd9a DN |
887 | |
888 | Attempt to simplify the RHS of STMT knowing that one or more | |
889 | operands are constants. | |
890 | ||
891 | If simplification is possible, return the simplified RHS, | |
726a989a | 892 | otherwise return the original RHS or NULL_TREE. */ |
6de9cd9a DN |
893 | |
894 | static tree | |
726a989a | 895 | ccp_fold (gimple stmt) |
6de9cd9a | 896 | { |
726a989a | 897 | switch (gimple_code (stmt)) |
0bca51f0 | 898 | { |
726a989a RB |
899 | case GIMPLE_ASSIGN: |
900 | { | |
901 | enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
902 | ||
903 | switch (get_gimple_rhs_class (subcode)) | |
904 | { | |
905 | case GIMPLE_SINGLE_RHS: | |
906 | { | |
907 | tree rhs = gimple_assign_rhs1 (stmt); | |
908 | enum tree_code_class kind = TREE_CODE_CLASS (subcode); | |
909 | ||
910 | if (TREE_CODE (rhs) == SSA_NAME) | |
911 | { | |
912 | /* If the RHS is an SSA_NAME, return its known constant value, | |
913 | if any. */ | |
914 | return get_value (rhs)->value; | |
915 | } | |
916 | /* Handle propagating invariant addresses into address operations. | |
917 | The folding we do here matches that in tree-ssa-forwprop.c. */ | |
918 | else if (TREE_CODE (rhs) == ADDR_EXPR) | |
919 | { | |
920 | tree *base; | |
921 | base = &TREE_OPERAND (rhs, 0); | |
922 | while (handled_component_p (*base)) | |
923 | base = &TREE_OPERAND (*base, 0); | |
924 | if (TREE_CODE (*base) == INDIRECT_REF | |
925 | && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME) | |
926 | { | |
927 | prop_value_t *val = get_value (TREE_OPERAND (*base, 0)); | |
928 | if (val->lattice_val == CONSTANT | |
929 | && TREE_CODE (val->value) == ADDR_EXPR | |
f7c0ffb4 RG |
930 | && may_propagate_address_into_dereference |
931 | (val->value, *base)) | |
726a989a RB |
932 | { |
933 | /* We need to return a new tree, not modify the IL | |
934 | or share parts of it. So play some tricks to | |
935 | avoid manually building it. */ | |
936 | tree ret, save = *base; | |
937 | *base = TREE_OPERAND (val->value, 0); | |
938 | ret = unshare_expr (rhs); | |
939 | recompute_tree_invariant_for_addr_expr (ret); | |
940 | *base = save; | |
941 | return ret; | |
942 | } | |
943 | } | |
944 | } | |
6de9cd9a | 945 | |
726a989a | 946 | if (kind == tcc_reference) |
c76a1f18 RG |
947 | { |
948 | if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR | |
949 | && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) | |
950 | { | |
951 | prop_value_t *val = get_value (TREE_OPERAND (rhs, 0)); | |
952 | if (val->lattice_val == CONSTANT) | |
953 | return fold_unary (VIEW_CONVERT_EXPR, | |
954 | TREE_TYPE (rhs), val->value); | |
955 | } | |
956 | return fold_const_aggregate_ref (rhs); | |
957 | } | |
726a989a RB |
958 | else if (kind == tcc_declaration) |
959 | return get_symbol_constant_value (rhs); | |
960 | return rhs; | |
961 | } | |
962 | ||
963 | case GIMPLE_UNARY_RHS: | |
964 | { | |
965 | /* Handle unary operators that can appear in GIMPLE form. | |
966 | Note that we know the single operand must be a constant, | |
967 | so this should almost always return a simplified RHS. */ | |
968 | tree lhs = gimple_assign_lhs (stmt); | |
969 | tree op0 = gimple_assign_rhs1 (stmt); | |
970 | ||
971 | /* Simplify the operand down to a constant. */ | |
972 | if (TREE_CODE (op0) == SSA_NAME) | |
973 | { | |
974 | prop_value_t *val = get_value (op0); | |
975 | if (val->lattice_val == CONSTANT) | |
976 | op0 = get_value (op0)->value; | |
977 | } | |
978 | ||
979 | /* Conversions are useless for CCP purposes if they are | |
980 | value-preserving. Thus the restrictions that | |
981 | useless_type_conversion_p places for pointer type conversions | |
982 | do not apply here. Substitution later will only substitute to | |
983 | allowed places. */ | |
1a87cf0c | 984 | if (CONVERT_EXPR_CODE_P (subcode) |
99f536cc RG |
985 | && POINTER_TYPE_P (TREE_TYPE (lhs)) |
986 | && POINTER_TYPE_P (TREE_TYPE (op0)) | |
987 | /* Do not allow differences in volatile qualification | |
988 | as this might get us confused as to whether a | |
989 | propagation destination statement is volatile | |
990 | or not. See PR36988. */ | |
991 | && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs))) | |
992 | == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0))))) | |
993 | { | |
994 | tree tem; | |
995 | /* Still try to generate a constant of correct type. */ | |
996 | if (!useless_type_conversion_p (TREE_TYPE (lhs), | |
997 | TREE_TYPE (op0)) | |
998 | && ((tem = maybe_fold_offset_to_address | |
999 | (op0, integer_zero_node, TREE_TYPE (lhs))) | |
1000 | != NULL_TREE)) | |
1001 | return tem; | |
1002 | return op0; | |
1003 | } | |
726a989a | 1004 | |
9bacafeb PB |
1005 | return fold_unary_ignore_overflow (subcode, |
1006 | gimple_expr_type (stmt), op0); | |
ff8b183b | 1007 | } |
726a989a RB |
1008 | |
1009 | case GIMPLE_BINARY_RHS: | |
1010 | { | |
1011 | /* Handle binary operators that can appear in GIMPLE form. */ | |
1012 | tree op0 = gimple_assign_rhs1 (stmt); | |
1013 | tree op1 = gimple_assign_rhs2 (stmt); | |
1014 | ||
1015 | /* Simplify the operands down to constants when appropriate. */ | |
1016 | if (TREE_CODE (op0) == SSA_NAME) | |
1017 | { | |
1018 | prop_value_t *val = get_value (op0); | |
1019 | if (val->lattice_val == CONSTANT) | |
1020 | op0 = val->value; | |
1021 | } | |
1022 | ||
1023 | if (TREE_CODE (op1) == SSA_NAME) | |
1024 | { | |
1025 | prop_value_t *val = get_value (op1); | |
1026 | if (val->lattice_val == CONSTANT) | |
1027 | op1 = val->value; | |
1028 | } | |
1029 | ||
99f536cc RG |
1030 | /* Fold &foo + CST into an invariant reference if possible. */ |
1031 | if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR | |
1032 | && TREE_CODE (op0) == ADDR_EXPR | |
1033 | && TREE_CODE (op1) == INTEGER_CST) | |
1034 | { | |
1035 | tree lhs = gimple_assign_lhs (stmt); | |
1036 | tree tem = maybe_fold_offset_to_address (op0, op1, | |
1037 | TREE_TYPE (lhs)); | |
1038 | if (tem != NULL_TREE) | |
1039 | return tem; | |
1040 | } | |
1041 | ||
726a989a RB |
1042 | return fold_binary (subcode, gimple_expr_type (stmt), op0, op1); |
1043 | } | |
1044 | ||
1045 | default: | |
1046 | gcc_unreachable (); | |
1047 | } | |
1048 | } | |
1049 | break; | |
6de9cd9a | 1050 | |
726a989a | 1051 | case GIMPLE_CALL: |
174ef36d RG |
1052 | { |
1053 | tree fn = gimple_call_fn (stmt); | |
1054 | prop_value_t *val; | |
1055 | ||
1056 | if (TREE_CODE (fn) == SSA_NAME) | |
1057 | { | |
1058 | val = get_value (fn); | |
1059 | if (val->lattice_val == CONSTANT) | |
1060 | fn = val->value; | |
1061 | } | |
1062 | if (TREE_CODE (fn) == ADDR_EXPR | |
a135b1c4 | 1063 | && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL |
174ef36d RG |
1064 | && DECL_BUILT_IN (TREE_OPERAND (fn, 0))) |
1065 | { | |
1066 | tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt)); | |
1067 | tree call, retval; | |
1068 | unsigned i; | |
1069 | for (i = 0; i < gimple_call_num_args (stmt); ++i) | |
1070 | { | |
1071 | args[i] = gimple_call_arg (stmt, i); | |
1072 | if (TREE_CODE (args[i]) == SSA_NAME) | |
1073 | { | |
1074 | val = get_value (args[i]); | |
1075 | if (val->lattice_val == CONSTANT) | |
1076 | args[i] = val->value; | |
1077 | } | |
1078 | } | |
1079 | call = build_call_array (gimple_call_return_type (stmt), | |
1080 | fn, gimple_call_num_args (stmt), args); | |
1081 | retval = fold_call_expr (call, false); | |
1082 | if (retval) | |
1083 | /* fold_call_expr wraps the result inside a NOP_EXPR. */ | |
1084 | STRIP_NOPS (retval); | |
1085 | return retval; | |
1086 | } | |
1087 | return NULL_TREE; | |
1088 | } | |
6de9cd9a | 1089 | |
726a989a RB |
1090 | case GIMPLE_COND: |
1091 | { | |
1092 | /* Handle comparison operators that can appear in GIMPLE form. */ | |
1093 | tree op0 = gimple_cond_lhs (stmt); | |
1094 | tree op1 = gimple_cond_rhs (stmt); | |
1095 | enum tree_code code = gimple_cond_code (stmt); | |
1096 | ||
1097 | /* Simplify the operands down to constants when appropriate. */ | |
1098 | if (TREE_CODE (op0) == SSA_NAME) | |
1099 | { | |
1100 | prop_value_t *val = get_value (op0); | |
1101 | if (val->lattice_val == CONSTANT) | |
1102 | op0 = val->value; | |
1103 | } | |
1104 | ||
1105 | if (TREE_CODE (op1) == SSA_NAME) | |
1106 | { | |
1107 | prop_value_t *val = get_value (op1); | |
1108 | if (val->lattice_val == CONSTANT) | |
1109 | op1 = val->value; | |
1110 | } | |
1111 | ||
1112 | return fold_binary (code, boolean_type_node, op0, op1); | |
1113 | } | |
6de9cd9a | 1114 | |
726a989a RB |
1115 | case GIMPLE_SWITCH: |
1116 | { | |
1117 | tree rhs = gimple_switch_index (stmt); | |
87e1e42b | 1118 | |
726a989a RB |
1119 | if (TREE_CODE (rhs) == SSA_NAME) |
1120 | { | |
1121 | /* If the RHS is an SSA_NAME, return its known constant value, | |
1122 | if any. */ | |
1123 | return get_value (rhs)->value; | |
1124 | } | |
87e1e42b | 1125 | |
726a989a RB |
1126 | return rhs; |
1127 | } | |
00d382a8 | 1128 | |
726a989a RB |
1129 | default: |
1130 | gcc_unreachable (); | |
6de9cd9a | 1131 | } |
6de9cd9a DN |
1132 | } |
1133 | ||
1134 | ||
ae3df618 SB |
1135 | /* Return the tree representing the element referenced by T if T is an |
1136 | ARRAY_REF or COMPONENT_REF into constant aggregates. Return | |
1137 | NULL_TREE otherwise. */ | |
1138 | ||
ed97ddc6 | 1139 | tree |
ae3df618 SB |
1140 | fold_const_aggregate_ref (tree t) |
1141 | { | |
1142 | prop_value_t *value; | |
4038c495 GB |
1143 | tree base, ctor, idx, field; |
1144 | unsigned HOST_WIDE_INT cnt; | |
1145 | tree cfield, cval; | |
ae3df618 SB |
1146 | |
1147 | switch (TREE_CODE (t)) | |
1148 | { | |
1149 | case ARRAY_REF: | |
1150 | /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its | |
1151 | DECL_INITIAL. If BASE is a nested reference into another | |
1152 | ARRAY_REF or COMPONENT_REF, make a recursive call to resolve | |
1153 | the inner reference. */ | |
1154 | base = TREE_OPERAND (t, 0); | |
1155 | switch (TREE_CODE (base)) | |
1156 | { | |
1157 | case VAR_DECL: | |
1158 | if (!TREE_READONLY (base) | |
1159 | || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE | |
1160 | || !targetm.binds_local_p (base)) | |
1161 | return NULL_TREE; | |
1162 | ||
1163 | ctor = DECL_INITIAL (base); | |
1164 | break; | |
1165 | ||
1166 | case ARRAY_REF: | |
1167 | case COMPONENT_REF: | |
1168 | ctor = fold_const_aggregate_ref (base); | |
1169 | break; | |
1170 | ||
87e1e42b RG |
1171 | case STRING_CST: |
1172 | case CONSTRUCTOR: | |
1173 | ctor = base; | |
1174 | break; | |
1175 | ||
ae3df618 SB |
1176 | default: |
1177 | return NULL_TREE; | |
1178 | } | |
1179 | ||
1180 | if (ctor == NULL_TREE | |
faaf1436 RG |
1181 | || (TREE_CODE (ctor) != CONSTRUCTOR |
1182 | && TREE_CODE (ctor) != STRING_CST) | |
ae3df618 SB |
1183 | || !TREE_STATIC (ctor)) |
1184 | return NULL_TREE; | |
1185 | ||
1186 | /* Get the index. If we have an SSA_NAME, try to resolve it | |
1187 | with the current lattice value for the SSA_NAME. */ | |
1188 | idx = TREE_OPERAND (t, 1); | |
1189 | switch (TREE_CODE (idx)) | |
1190 | { | |
1191 | case SSA_NAME: | |
106dec71 | 1192 | if ((value = get_value (idx)) |
ae3df618 SB |
1193 | && value->lattice_val == CONSTANT |
1194 | && TREE_CODE (value->value) == INTEGER_CST) | |
1195 | idx = value->value; | |
1196 | else | |
1197 | return NULL_TREE; | |
1198 | break; | |
1199 | ||
1200 | case INTEGER_CST: | |
1201 | break; | |
1202 | ||
1203 | default: | |
1204 | return NULL_TREE; | |
1205 | } | |
1206 | ||
faaf1436 RG |
1207 | /* Fold read from constant string. */ |
1208 | if (TREE_CODE (ctor) == STRING_CST) | |
1209 | { | |
1210 | if ((TYPE_MODE (TREE_TYPE (t)) | |
1211 | == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1212 | && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1213 | == MODE_INT) | |
1214 | && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1 | |
1215 | && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0) | |
0c4d4efb DJ |
1216 | return build_int_cst_type (TREE_TYPE (t), |
1217 | (TREE_STRING_POINTER (ctor) | |
1218 | [TREE_INT_CST_LOW (idx)])); | |
faaf1436 RG |
1219 | return NULL_TREE; |
1220 | } | |
1221 | ||
ae3df618 | 1222 | /* Whoo-hoo! I'll fold ya baby. Yeah! */ |
4038c495 GB |
1223 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) |
1224 | if (tree_int_cst_equal (cfield, idx)) | |
62bc00e2 RG |
1225 | { |
1226 | STRIP_USELESS_TYPE_CONVERSION (cval); | |
1227 | return cval; | |
1228 | } | |
ae3df618 SB |
1229 | break; |
1230 | ||
1231 | case COMPONENT_REF: | |
1232 | /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its | |
1233 | DECL_INITIAL. If BASE is a nested reference into another | |
1234 | ARRAY_REF or COMPONENT_REF, make a recursive call to resolve | |
1235 | the inner reference. */ | |
1236 | base = TREE_OPERAND (t, 0); | |
1237 | switch (TREE_CODE (base)) | |
1238 | { | |
1239 | case VAR_DECL: | |
1240 | if (!TREE_READONLY (base) | |
1241 | || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE | |
1242 | || !targetm.binds_local_p (base)) | |
1243 | return NULL_TREE; | |
1244 | ||
1245 | ctor = DECL_INITIAL (base); | |
1246 | break; | |
1247 | ||
1248 | case ARRAY_REF: | |
1249 | case COMPONENT_REF: | |
1250 | ctor = fold_const_aggregate_ref (base); | |
1251 | break; | |
1252 | ||
1253 | default: | |
1254 | return NULL_TREE; | |
1255 | } | |
1256 | ||
1257 | if (ctor == NULL_TREE | |
1258 | || TREE_CODE (ctor) != CONSTRUCTOR | |
1259 | || !TREE_STATIC (ctor)) | |
1260 | return NULL_TREE; | |
1261 | ||
1262 | field = TREE_OPERAND (t, 1); | |
1263 | ||
4038c495 GB |
1264 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) |
1265 | if (cfield == field | |
ae3df618 | 1266 | /* FIXME: Handle bit-fields. */ |
4038c495 | 1267 | && ! DECL_BIT_FIELD (cfield)) |
62bc00e2 RG |
1268 | { |
1269 | STRIP_USELESS_TYPE_CONVERSION (cval); | |
1270 | return cval; | |
1271 | } | |
ae3df618 SB |
1272 | break; |
1273 | ||
1ebd8d9a SB |
1274 | case REALPART_EXPR: |
1275 | case IMAGPART_EXPR: | |
1276 | { | |
1277 | tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0)); | |
1278 | if (c && TREE_CODE (c) == COMPLEX_CST) | |
1279 | return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c); | |
1280 | break; | |
1281 | } | |
87e1e42b RG |
1282 | |
1283 | case INDIRECT_REF: | |
1284 | { | |
1285 | tree base = TREE_OPERAND (t, 0); | |
1286 | if (TREE_CODE (base) == SSA_NAME | |
1287 | && (value = get_value (base)) | |
1288 | && value->lattice_val == CONSTANT | |
1289 | && TREE_CODE (value->value) == ADDR_EXPR) | |
1290 | return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0)); | |
1291 | break; | |
1292 | } | |
1293 | ||
ae3df618 SB |
1294 | default: |
1295 | break; | |
1296 | } | |
1297 | ||
1298 | return NULL_TREE; | |
1299 | } | |
726a989a RB |
1300 | |
1301 | /* Evaluate statement STMT. | |
1302 | Valid only for assignments, calls, conditionals, and switches. */ | |
6de9cd9a | 1303 | |
0bca51f0 | 1304 | static prop_value_t |
726a989a | 1305 | evaluate_stmt (gimple stmt) |
6de9cd9a | 1306 | { |
0bca51f0 | 1307 | prop_value_t val; |
faaf1436 | 1308 | tree simplified = NULL_TREE; |
0bca51f0 | 1309 | ccp_lattice_t likelyvalue = likely_value (stmt); |
6ac01510 | 1310 | bool is_constant; |
0bca51f0 | 1311 | |
6ac01510 ILT |
1312 | fold_defer_overflow_warnings (); |
1313 | ||
6de9cd9a DN |
1314 | /* If the statement is likely to have a CONSTANT result, then try |
1315 | to fold the statement to determine the constant value. */ | |
726a989a RB |
1316 | /* FIXME. This is the only place that we call ccp_fold. |
1317 | Since likely_value never returns CONSTANT for calls, we will | |
1318 | not attempt to fold them, including builtins that may profit. */ | |
6de9cd9a DN |
1319 | if (likelyvalue == CONSTANT) |
1320 | simplified = ccp_fold (stmt); | |
1321 | /* If the statement is likely to have a VARYING result, then do not | |
1322 | bother folding the statement. */ | |
87e1e42b | 1323 | else if (likelyvalue == VARYING) |
726a989a | 1324 | { |
e0c68ce9 | 1325 | enum gimple_code code = gimple_code (stmt); |
726a989a RB |
1326 | if (code == GIMPLE_ASSIGN) |
1327 | { | |
1328 | enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
1329 | ||
1330 | /* Other cases cannot satisfy is_gimple_min_invariant | |
1331 | without folding. */ | |
1332 | if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS) | |
1333 | simplified = gimple_assign_rhs1 (stmt); | |
1334 | } | |
1335 | else if (code == GIMPLE_SWITCH) | |
1336 | simplified = gimple_switch_index (stmt); | |
1337 | else | |
1338 | /* These cannot satisfy is_gimple_min_invariant without folding. */ | |
1339 | gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND); | |
1340 | } | |
6de9cd9a | 1341 | |
6ac01510 ILT |
1342 | is_constant = simplified && is_gimple_min_invariant (simplified); |
1343 | ||
1344 | fold_undefer_overflow_warnings (is_constant, stmt, 0); | |
1345 | ||
00d382a8 RG |
1346 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1347 | { | |
1348 | fprintf (dump_file, "which is likely "); | |
1349 | switch (likelyvalue) | |
1350 | { | |
1351 | case CONSTANT: | |
1352 | fprintf (dump_file, "CONSTANT"); | |
1353 | break; | |
1354 | case UNDEFINED: | |
1355 | fprintf (dump_file, "UNDEFINED"); | |
1356 | break; | |
1357 | case VARYING: | |
1358 | fprintf (dump_file, "VARYING"); | |
1359 | break; | |
1360 | default:; | |
1361 | } | |
1362 | fprintf (dump_file, "\n"); | |
1363 | } | |
1364 | ||
6ac01510 | 1365 | if (is_constant) |
6de9cd9a DN |
1366 | { |
1367 | /* The statement produced a constant value. */ | |
1368 | val.lattice_val = CONSTANT; | |
0bca51f0 | 1369 | val.value = simplified; |
6de9cd9a DN |
1370 | } |
1371 | else | |
1372 | { | |
1373 | /* The statement produced a nonconstant value. If the statement | |
0bca51f0 DN |
1374 | had UNDEFINED operands, then the result of the statement |
1375 | should be UNDEFINED. Otherwise, the statement is VARYING. */ | |
106dec71 | 1376 | if (likelyvalue == UNDEFINED) |
a318e3ac SB |
1377 | val.lattice_val = likelyvalue; |
1378 | else | |
1379 | val.lattice_val = VARYING; | |
1380 | ||
0bca51f0 | 1381 | val.value = NULL_TREE; |
6de9cd9a | 1382 | } |
750628d8 DN |
1383 | |
1384 | return val; | |
6de9cd9a DN |
1385 | } |
1386 | ||
750628d8 | 1387 | /* Visit the assignment statement STMT. Set the value of its LHS to the |
0bca51f0 DN |
1388 | value computed by the RHS and store LHS in *OUTPUT_P. If STMT |
1389 | creates virtual definitions, set the value of each new name to that | |
726a989a RB |
1390 | of the RHS (if we can derive a constant out of the RHS). |
1391 | Value-returning call statements also perform an assignment, and | |
1392 | are handled here. */ | |
6de9cd9a | 1393 | |
750628d8 | 1394 | static enum ssa_prop_result |
726a989a | 1395 | visit_assignment (gimple stmt, tree *output_p) |
6de9cd9a | 1396 | { |
0bca51f0 | 1397 | prop_value_t val; |
0bca51f0 | 1398 | enum ssa_prop_result retval; |
6de9cd9a | 1399 | |
726a989a | 1400 | tree lhs = gimple_get_lhs (stmt); |
6de9cd9a | 1401 | |
726a989a RB |
1402 | gcc_assert (gimple_code (stmt) != GIMPLE_CALL |
1403 | || gimple_call_lhs (stmt) != NULL_TREE); | |
1404 | ||
1405 | if (gimple_assign_copy_p (stmt)) | |
750628d8 | 1406 | { |
726a989a | 1407 | tree rhs = gimple_assign_rhs1 (stmt); |
0bca51f0 | 1408 | |
726a989a RB |
1409 | if (TREE_CODE (rhs) == SSA_NAME) |
1410 | { | |
1411 | /* For a simple copy operation, we copy the lattice values. */ | |
1412 | prop_value_t *nval = get_value (rhs); | |
1413 | val = *nval; | |
1414 | } | |
0bca51f0 | 1415 | else |
726a989a | 1416 | val = evaluate_stmt (stmt); |
750628d8 DN |
1417 | } |
1418 | else | |
726a989a RB |
1419 | /* Evaluate the statement, which could be |
1420 | either a GIMPLE_ASSIGN or a GIMPLE_CALL. */ | |
87e1e42b | 1421 | val = evaluate_stmt (stmt); |
6de9cd9a | 1422 | |
0bca51f0 | 1423 | retval = SSA_PROP_NOT_INTERESTING; |
6de9cd9a | 1424 | |
750628d8 | 1425 | /* Set the lattice value of the statement's output. */ |
0bca51f0 | 1426 | if (TREE_CODE (lhs) == SSA_NAME) |
6de9cd9a | 1427 | { |
0bca51f0 DN |
1428 | /* If STMT is an assignment to an SSA_NAME, we only have one |
1429 | value to set. */ | |
1430 | if (set_lattice_value (lhs, val)) | |
1431 | { | |
1432 | *output_p = lhs; | |
1433 | if (val.lattice_val == VARYING) | |
1434 | retval = SSA_PROP_VARYING; | |
1435 | else | |
1436 | retval = SSA_PROP_INTERESTING; | |
1437 | } | |
6de9cd9a | 1438 | } |
0bca51f0 DN |
1439 | |
1440 | return retval; | |
6de9cd9a DN |
1441 | } |
1442 | ||
6de9cd9a | 1443 | |
750628d8 DN |
1444 | /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING |
1445 | if it can determine which edge will be taken. Otherwise, return | |
1446 | SSA_PROP_VARYING. */ | |
1447 | ||
1448 | static enum ssa_prop_result | |
726a989a | 1449 | visit_cond_stmt (gimple stmt, edge *taken_edge_p) |
6de9cd9a | 1450 | { |
0bca51f0 | 1451 | prop_value_t val; |
750628d8 DN |
1452 | basic_block block; |
1453 | ||
726a989a | 1454 | block = gimple_bb (stmt); |
750628d8 DN |
1455 | val = evaluate_stmt (stmt); |
1456 | ||
1457 | /* Find which edge out of the conditional block will be taken and add it | |
1458 | to the worklist. If no single edge can be determined statically, | |
1459 | return SSA_PROP_VARYING to feed all the outgoing edges to the | |
1460 | propagation engine. */ | |
0bca51f0 | 1461 | *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0; |
750628d8 DN |
1462 | if (*taken_edge_p) |
1463 | return SSA_PROP_INTERESTING; | |
1464 | else | |
1465 | return SSA_PROP_VARYING; | |
6de9cd9a DN |
1466 | } |
1467 | ||
6de9cd9a | 1468 | |
750628d8 DN |
1469 | /* Evaluate statement STMT. If the statement produces an output value and |
1470 | its evaluation changes the lattice value of its output, return | |
1471 | SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the | |
1472 | output value. | |
1473 | ||
1474 | If STMT is a conditional branch and we can determine its truth | |
1475 | value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying | |
1476 | value, return SSA_PROP_VARYING. */ | |
6de9cd9a | 1477 | |
750628d8 | 1478 | static enum ssa_prop_result |
726a989a | 1479 | ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p) |
750628d8 | 1480 | { |
750628d8 DN |
1481 | tree def; |
1482 | ssa_op_iter iter; | |
6de9cd9a | 1483 | |
750628d8 | 1484 | if (dump_file && (dump_flags & TDF_DETAILS)) |
6de9cd9a | 1485 | { |
0bca51f0 | 1486 | fprintf (dump_file, "\nVisiting statement:\n"); |
726a989a | 1487 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
6de9cd9a | 1488 | } |
6de9cd9a | 1489 | |
726a989a | 1490 | switch (gimple_code (stmt)) |
6de9cd9a | 1491 | { |
726a989a RB |
1492 | case GIMPLE_ASSIGN: |
1493 | /* If the statement is an assignment that produces a single | |
1494 | output value, evaluate its RHS to see if the lattice value of | |
1495 | its output has changed. */ | |
1496 | return visit_assignment (stmt, output_p); | |
1497 | ||
1498 | case GIMPLE_CALL: | |
1499 | /* A value-returning call also performs an assignment. */ | |
1500 | if (gimple_call_lhs (stmt) != NULL_TREE) | |
1501 | return visit_assignment (stmt, output_p); | |
1502 | break; | |
1503 | ||
1504 | case GIMPLE_COND: | |
1505 | case GIMPLE_SWITCH: | |
1506 | /* If STMT is a conditional branch, see if we can determine | |
1507 | which branch will be taken. */ | |
1508 | /* FIXME. It appears that we should be able to optimize | |
1509 | computed GOTOs here as well. */ | |
1510 | return visit_cond_stmt (stmt, taken_edge_p); | |
1511 | ||
1512 | default: | |
1513 | break; | |
6de9cd9a | 1514 | } |
6de9cd9a | 1515 | |
750628d8 DN |
1516 | /* Any other kind of statement is not interesting for constant |
1517 | propagation and, therefore, not worth simulating. */ | |
750628d8 DN |
1518 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1519 | fprintf (dump_file, "No interesting values produced. Marked VARYING.\n"); | |
6de9cd9a | 1520 | |
750628d8 DN |
1521 | /* Definitions made by statements other than assignments to |
1522 | SSA_NAMEs represent unknown modifications to their outputs. | |
1523 | Mark them VARYING. */ | |
0bca51f0 DN |
1524 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) |
1525 | { | |
dce2b2f6 | 1526 | prop_value_t v = { VARYING, NULL_TREE }; |
0bca51f0 DN |
1527 | set_lattice_value (def, v); |
1528 | } | |
6de9cd9a | 1529 | |
750628d8 DN |
1530 | return SSA_PROP_VARYING; |
1531 | } | |
6de9cd9a | 1532 | |
6de9cd9a | 1533 | |
0bca51f0 | 1534 | /* Main entry point for SSA Conditional Constant Propagation. */ |
750628d8 | 1535 | |
3253eafb | 1536 | static unsigned int |
dce2b2f6 | 1537 | do_ssa_ccp (void) |
750628d8 DN |
1538 | { |
1539 | ccp_initialize (); | |
1540 | ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node); | |
3253eafb | 1541 | if (ccp_finalize ()) |
706ca88e | 1542 | return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals); |
3253eafb JH |
1543 | else |
1544 | return 0; | |
6de9cd9a DN |
1545 | } |
1546 | ||
173b818d BB |
1547 | |
1548 | static bool | |
750628d8 | 1549 | gate_ccp (void) |
173b818d | 1550 | { |
750628d8 | 1551 | return flag_tree_ccp != 0; |
173b818d BB |
1552 | } |
1553 | ||
6de9cd9a | 1554 | |
8ddbbcae | 1555 | struct gimple_opt_pass pass_ccp = |
750628d8 | 1556 | { |
8ddbbcae JH |
1557 | { |
1558 | GIMPLE_PASS, | |
750628d8 DN |
1559 | "ccp", /* name */ |
1560 | gate_ccp, /* gate */ | |
0bca51f0 | 1561 | do_ssa_ccp, /* execute */ |
750628d8 DN |
1562 | NULL, /* sub */ |
1563 | NULL, /* next */ | |
1564 | 0, /* static_pass_number */ | |
1565 | TV_TREE_CCP, /* tv_id */ | |
7faade0f | 1566 | PROP_cfg | PROP_ssa, /* properties_required */ |
750628d8 | 1567 | 0, /* properties_provided */ |
ae07b463 | 1568 | 0, /* properties_destroyed */ |
750628d8 | 1569 | 0, /* todo_flags_start */ |
3253eafb | 1570 | TODO_dump_func | TODO_verify_ssa |
8ddbbcae JH |
1571 | | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */ |
1572 | } | |
750628d8 | 1573 | }; |
6de9cd9a | 1574 | |
6de9cd9a | 1575 | |
6de9cd9a DN |
1576 | /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X]. |
1577 | BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE | |
9cf737f8 | 1578 | is the desired result type. */ |
6de9cd9a DN |
1579 | |
1580 | static tree | |
1fd89060 RG |
1581 | maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type, |
1582 | bool allow_negative_idx) | |
6de9cd9a | 1583 | { |
891fc5e9 | 1584 | tree min_idx, idx, idx_type, elt_offset = integer_zero_node; |
44de5aeb | 1585 | tree array_type, elt_type, elt_size; |
3097760b | 1586 | tree domain_type; |
44de5aeb RK |
1587 | |
1588 | /* If BASE is an ARRAY_REF, we can pick up another offset (this time | |
1589 | measured in units of the size of elements type) from that ARRAY_REF). | |
1590 | We can't do anything if either is variable. | |
1591 | ||
1592 | The case we handle here is *(&A[N]+O). */ | |
1593 | if (TREE_CODE (base) == ARRAY_REF) | |
1594 | { | |
1595 | tree low_bound = array_ref_low_bound (base); | |
1596 | ||
1597 | elt_offset = TREE_OPERAND (base, 1); | |
1598 | if (TREE_CODE (low_bound) != INTEGER_CST | |
1599 | || TREE_CODE (elt_offset) != INTEGER_CST) | |
1600 | return NULL_TREE; | |
1601 | ||
1602 | elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0); | |
1603 | base = TREE_OPERAND (base, 0); | |
1604 | } | |
6de9cd9a DN |
1605 | |
1606 | /* Ignore stupid user tricks of indexing non-array variables. */ | |
1607 | array_type = TREE_TYPE (base); | |
1608 | if (TREE_CODE (array_type) != ARRAY_TYPE) | |
1609 | return NULL_TREE; | |
1610 | elt_type = TREE_TYPE (array_type); | |
f4088621 | 1611 | if (!useless_type_conversion_p (orig_type, elt_type)) |
6de9cd9a | 1612 | return NULL_TREE; |
891fc5e9 RG |
1613 | |
1614 | /* Use signed size type for intermediate computation on the index. */ | |
1615 | idx_type = signed_type_for (size_type_node); | |
1616 | ||
44de5aeb RK |
1617 | /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the |
1618 | element type (so we can use the alignment if it's not constant). | |
1619 | Otherwise, compute the offset as an index by using a division. If the | |
1620 | division isn't exact, then don't do anything. */ | |
6de9cd9a | 1621 | elt_size = TYPE_SIZE_UNIT (elt_type); |
fe9821b8 JH |
1622 | if (!elt_size) |
1623 | return NULL; | |
44de5aeb RK |
1624 | if (integer_zerop (offset)) |
1625 | { | |
1626 | if (TREE_CODE (elt_size) != INTEGER_CST) | |
1627 | elt_size = size_int (TYPE_ALIGN (elt_type)); | |
6de9cd9a | 1628 | |
891fc5e9 | 1629 | idx = build_int_cst (idx_type, 0); |
44de5aeb RK |
1630 | } |
1631 | else | |
1632 | { | |
1633 | unsigned HOST_WIDE_INT lquo, lrem; | |
1634 | HOST_WIDE_INT hquo, hrem; | |
891fc5e9 | 1635 | double_int soffset; |
44de5aeb | 1636 | |
891fc5e9 RG |
1637 | /* The final array offset should be signed, so we need |
1638 | to sign-extend the (possibly pointer) offset here | |
1639 | and use signed division. */ | |
1640 | soffset = double_int_sext (tree_to_double_int (offset), | |
1641 | TYPE_PRECISION (TREE_TYPE (offset))); | |
44de5aeb | 1642 | if (TREE_CODE (elt_size) != INTEGER_CST |
891fc5e9 RG |
1643 | || div_and_round_double (TRUNC_DIV_EXPR, 0, |
1644 | soffset.low, soffset.high, | |
44de5aeb RK |
1645 | TREE_INT_CST_LOW (elt_size), |
1646 | TREE_INT_CST_HIGH (elt_size), | |
1647 | &lquo, &hquo, &lrem, &hrem) | |
1648 | || lrem || hrem) | |
1649 | return NULL_TREE; | |
6de9cd9a | 1650 | |
891fc5e9 | 1651 | idx = build_int_cst_wide (idx_type, lquo, hquo); |
44de5aeb RK |
1652 | } |
1653 | ||
1654 | /* Assume the low bound is zero. If there is a domain type, get the | |
1655 | low bound, if any, convert the index into that type, and add the | |
1656 | low bound. */ | |
891fc5e9 | 1657 | min_idx = build_int_cst (idx_type, 0); |
3097760b JH |
1658 | domain_type = TYPE_DOMAIN (array_type); |
1659 | if (domain_type) | |
6de9cd9a | 1660 | { |
3097760b | 1661 | idx_type = domain_type; |
891fc5e9 RG |
1662 | if (TYPE_MIN_VALUE (idx_type)) |
1663 | min_idx = TYPE_MIN_VALUE (idx_type); | |
44de5aeb | 1664 | else |
891fc5e9 | 1665 | min_idx = fold_convert (idx_type, min_idx); |
44de5aeb RK |
1666 | |
1667 | if (TREE_CODE (min_idx) != INTEGER_CST) | |
1668 | return NULL_TREE; | |
1669 | ||
891fc5e9 | 1670 | elt_offset = fold_convert (idx_type, elt_offset); |
6de9cd9a DN |
1671 | } |
1672 | ||
44de5aeb RK |
1673 | if (!integer_zerop (min_idx)) |
1674 | idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0); | |
1675 | if (!integer_zerop (elt_offset)) | |
1676 | idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0); | |
1677 | ||
891fc5e9 RG |
1678 | /* Make sure to possibly truncate late after offsetting. */ |
1679 | idx = fold_convert (idx_type, idx); | |
1680 | ||
3097760b | 1681 | /* We don't want to construct access past array bounds. For example |
1fd89060 RG |
1682 | char *(c[4]); |
1683 | c[3][2]; | |
1684 | should not be simplified into (*c)[14] or tree-vrp will | |
1685 | give false warnings. The same is true for | |
1686 | struct A { long x; char d[0]; } *a; | |
1687 | (char *)a - 4; | |
1688 | which should be not folded to &a->d[-8]. */ | |
1689 | if (domain_type | |
1690 | && TYPE_MAX_VALUE (domain_type) | |
3097760b JH |
1691 | && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST) |
1692 | { | |
1693 | tree up_bound = TYPE_MAX_VALUE (domain_type); | |
1694 | ||
1695 | if (tree_int_cst_lt (up_bound, idx) | |
1696 | /* Accesses after the end of arrays of size 0 (gcc | |
1697 | extension) and 1 are likely intentional ("struct | |
1698 | hack"). */ | |
1699 | && compare_tree_int (up_bound, 1) > 0) | |
1700 | return NULL_TREE; | |
1701 | } | |
1fd89060 RG |
1702 | if (domain_type |
1703 | && TYPE_MIN_VALUE (domain_type)) | |
1704 | { | |
1705 | if (!allow_negative_idx | |
1706 | && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST | |
1707 | && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type))) | |
1708 | return NULL_TREE; | |
1709 | } | |
1710 | else if (!allow_negative_idx | |
1711 | && compare_tree_int (idx, 0) < 0) | |
1712 | return NULL_TREE; | |
3097760b | 1713 | |
04d86531 | 1714 | return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE); |
6de9cd9a DN |
1715 | } |
1716 | ||
750628d8 | 1717 | |
fe9821b8 | 1718 | /* Attempt to fold *(S+O) to S.X. |
6de9cd9a DN |
1719 | BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE |
1720 | is the desired result type. */ | |
6de9cd9a | 1721 | |
fe9821b8 | 1722 | static tree |
6de9cd9a DN |
1723 | maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset, |
1724 | tree orig_type, bool base_is_ptr) | |
1725 | { | |
f34fa464 | 1726 | tree f, t, field_type, tail_array_field, field_offset; |
6e6e19cb JH |
1727 | tree ret; |
1728 | tree new_base; | |
6de9cd9a DN |
1729 | |
1730 | if (TREE_CODE (record_type) != RECORD_TYPE | |
1731 | && TREE_CODE (record_type) != UNION_TYPE | |
1732 | && TREE_CODE (record_type) != QUAL_UNION_TYPE) | |
1733 | return NULL_TREE; | |
1734 | ||
1735 | /* Short-circuit silly cases. */ | |
f4088621 | 1736 | if (useless_type_conversion_p (record_type, orig_type)) |
6de9cd9a DN |
1737 | return NULL_TREE; |
1738 | ||
1739 | tail_array_field = NULL_TREE; | |
1740 | for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f)) | |
1741 | { | |
1742 | int cmp; | |
1743 | ||
1744 | if (TREE_CODE (f) != FIELD_DECL) | |
1745 | continue; | |
1746 | if (DECL_BIT_FIELD (f)) | |
1747 | continue; | |
f34fa464 | 1748 | |
fe9821b8 JH |
1749 | if (!DECL_FIELD_OFFSET (f)) |
1750 | continue; | |
f34fa464 ZD |
1751 | field_offset = byte_position (f); |
1752 | if (TREE_CODE (field_offset) != INTEGER_CST) | |
6de9cd9a DN |
1753 | continue; |
1754 | ||
1755 | /* ??? Java creates "interesting" fields for representing base classes. | |
1756 | They have no name, and have no context. With no context, we get into | |
1757 | trouble with nonoverlapping_component_refs_p. Skip them. */ | |
1758 | if (!DECL_FIELD_CONTEXT (f)) | |
1759 | continue; | |
1760 | ||
1761 | /* The previous array field isn't at the end. */ | |
1762 | tail_array_field = NULL_TREE; | |
1763 | ||
1764 | /* Check to see if this offset overlaps with the field. */ | |
f34fa464 | 1765 | cmp = tree_int_cst_compare (field_offset, offset); |
6de9cd9a DN |
1766 | if (cmp > 0) |
1767 | continue; | |
1768 | ||
1769 | field_type = TREE_TYPE (f); | |
6de9cd9a DN |
1770 | |
1771 | /* Here we exactly match the offset being checked. If the types match, | |
1772 | then we can return that field. */ | |
53dba802 | 1773 | if (cmp == 0 |
f4088621 | 1774 | && useless_type_conversion_p (orig_type, field_type)) |
6de9cd9a DN |
1775 | { |
1776 | if (base_is_ptr) | |
1777 | base = build1 (INDIRECT_REF, record_type, base); | |
b4257cfc | 1778 | t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); |
6de9cd9a DN |
1779 | return t; |
1780 | } | |
53dba802 ZD |
1781 | |
1782 | /* Don't care about offsets into the middle of scalars. */ | |
1783 | if (!AGGREGATE_TYPE_P (field_type)) | |
1784 | continue; | |
6de9cd9a | 1785 | |
53dba802 ZD |
1786 | /* Check for array at the end of the struct. This is often |
1787 | used as for flexible array members. We should be able to | |
1788 | turn this into an array access anyway. */ | |
1789 | if (TREE_CODE (field_type) == ARRAY_TYPE) | |
1790 | tail_array_field = f; | |
1791 | ||
1792 | /* Check the end of the field against the offset. */ | |
1793 | if (!DECL_SIZE_UNIT (f) | |
1794 | || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST) | |
1795 | continue; | |
1796 | t = int_const_binop (MINUS_EXPR, offset, field_offset, 1); | |
1797 | if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f))) | |
1798 | continue; | |
6de9cd9a | 1799 | |
53dba802 ZD |
1800 | /* If we matched, then set offset to the displacement into |
1801 | this field. */ | |
6e6e19cb JH |
1802 | if (base_is_ptr) |
1803 | new_base = build1 (INDIRECT_REF, record_type, base); | |
1804 | else | |
1805 | new_base = base; | |
1806 | new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE); | |
1807 | ||
1808 | /* Recurse to possibly find the match. */ | |
1fd89060 RG |
1809 | ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type, |
1810 | f == TYPE_FIELDS (record_type)); | |
6e6e19cb JH |
1811 | if (ret) |
1812 | return ret; | |
1813 | ret = maybe_fold_offset_to_component_ref (field_type, new_base, t, | |
1814 | orig_type, false); | |
1815 | if (ret) | |
1816 | return ret; | |
6de9cd9a DN |
1817 | } |
1818 | ||
1819 | if (!tail_array_field) | |
1820 | return NULL_TREE; | |
1821 | ||
1822 | f = tail_array_field; | |
1823 | field_type = TREE_TYPE (f); | |
53dba802 | 1824 | offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1); |
6de9cd9a | 1825 | |
6de9cd9a | 1826 | /* If we get here, we've got an aggregate field, and a possibly |
1ea7e6ad | 1827 | nonzero offset into them. Recurse and hope for a valid match. */ |
6de9cd9a DN |
1828 | if (base_is_ptr) |
1829 | base = build1 (INDIRECT_REF, record_type, base); | |
b4257cfc | 1830 | base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); |
6de9cd9a | 1831 | |
1fd89060 RG |
1832 | t = maybe_fold_offset_to_array_ref (base, offset, orig_type, |
1833 | f == TYPE_FIELDS (record_type)); | |
6de9cd9a DN |
1834 | if (t) |
1835 | return t; | |
1836 | return maybe_fold_offset_to_component_ref (field_type, base, offset, | |
1837 | orig_type, false); | |
1838 | } | |
1839 | ||
fe9821b8 JH |
1840 | /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type |
1841 | or BASE[index] or by combination of those. | |
1842 | ||
1843 | Before attempting the conversion strip off existing ADDR_EXPRs and | |
1844 | handled component refs. */ | |
1845 | ||
1846 | tree | |
1847 | maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type) | |
1848 | { | |
1849 | tree ret; | |
1850 | tree type; | |
1851 | bool base_is_ptr = true; | |
1852 | ||
1853 | STRIP_NOPS (base); | |
1854 | if (TREE_CODE (base) == ADDR_EXPR) | |
1855 | { | |
1856 | base_is_ptr = false; | |
1857 | ||
1858 | base = TREE_OPERAND (base, 0); | |
1859 | ||
1860 | /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union, | |
1861 | so it needs to be removed and new COMPONENT_REF constructed. | |
1862 | The wrong COMPONENT_REF are often constructed by folding the | |
1863 | (type *)&object within the expression (type *)&object+offset */ | |
99f536cc | 1864 | if (handled_component_p (base)) |
fe9821b8 JH |
1865 | { |
1866 | HOST_WIDE_INT sub_offset, size, maxsize; | |
1867 | tree newbase; | |
1868 | newbase = get_ref_base_and_extent (base, &sub_offset, | |
1869 | &size, &maxsize); | |
1870 | gcc_assert (newbase); | |
99f536cc | 1871 | if (size == maxsize |
a214f4d6 | 1872 | && size != -1 |
99f536cc | 1873 | && !(sub_offset & (BITS_PER_UNIT - 1))) |
fe9821b8 JH |
1874 | { |
1875 | base = newbase; | |
1876 | if (sub_offset) | |
1877 | offset = int_const_binop (PLUS_EXPR, offset, | |
1878 | build_int_cst (TREE_TYPE (offset), | |
1879 | sub_offset / BITS_PER_UNIT), 1); | |
1880 | } | |
1881 | } | |
f4088621 | 1882 | if (useless_type_conversion_p (orig_type, TREE_TYPE (base)) |
fe9821b8 JH |
1883 | && integer_zerop (offset)) |
1884 | return base; | |
1885 | type = TREE_TYPE (base); | |
1886 | } | |
1887 | else | |
1888 | { | |
1889 | base_is_ptr = true; | |
1890 | if (!POINTER_TYPE_P (TREE_TYPE (base))) | |
1891 | return NULL_TREE; | |
1892 | type = TREE_TYPE (TREE_TYPE (base)); | |
1893 | } | |
1894 | ret = maybe_fold_offset_to_component_ref (type, base, offset, | |
1895 | orig_type, base_is_ptr); | |
1896 | if (!ret) | |
1897 | { | |
1898 | if (base_is_ptr) | |
1899 | base = build1 (INDIRECT_REF, type, base); | |
1fd89060 | 1900 | ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true); |
fe9821b8 JH |
1901 | } |
1902 | return ret; | |
1903 | } | |
750628d8 | 1904 | |
99f536cc RG |
1905 | /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type |
1906 | or &BASE[index] or by combination of those. | |
1907 | ||
1908 | Before attempting the conversion strip off existing component refs. */ | |
1909 | ||
1910 | tree | |
1911 | maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type) | |
1912 | { | |
1913 | tree t; | |
1914 | ||
1915 | gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr)) | |
1916 | && POINTER_TYPE_P (orig_type)); | |
1917 | ||
1918 | t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type)); | |
1919 | if (t != NULL_TREE) | |
1920 | { | |
1921 | tree orig = addr; | |
1922 | tree ptr_type; | |
1923 | ||
1924 | /* For __builtin_object_size to function correctly we need to | |
1925 | make sure not to fold address arithmetic so that we change | |
1926 | reference from one array to another. This would happen for | |
1927 | example for | |
1928 | ||
1929 | struct X { char s1[10]; char s2[10] } s; | |
1930 | char *foo (void) { return &s.s2[-4]; } | |
1931 | ||
1932 | where we need to avoid generating &s.s1[6]. As the C and | |
1933 | C++ frontends create different initial trees | |
1934 | (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some | |
1935 | sophisticated comparisons here. Note that checking for the | |
1936 | condition after the fact is easier than trying to avoid doing | |
1937 | the folding. */ | |
1938 | STRIP_NOPS (orig); | |
1939 | if (TREE_CODE (orig) == ADDR_EXPR) | |
1940 | orig = TREE_OPERAND (orig, 0); | |
1941 | if ((TREE_CODE (orig) == ARRAY_REF | |
1942 | || (TREE_CODE (orig) == COMPONENT_REF | |
1943 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE)) | |
1944 | && (TREE_CODE (t) == ARRAY_REF | |
f07049c8 | 1945 | || TREE_CODE (t) == COMPONENT_REF) |
99f536cc RG |
1946 | && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF |
1947 | ? TREE_OPERAND (orig, 0) : orig, | |
1948 | TREE_CODE (t) == ARRAY_REF | |
1949 | ? TREE_OPERAND (t, 0) : t, 0)) | |
1950 | return NULL_TREE; | |
1951 | ||
1952 | ptr_type = build_pointer_type (TREE_TYPE (t)); | |
1953 | if (!useless_type_conversion_p (orig_type, ptr_type)) | |
1954 | return NULL_TREE; | |
1955 | return build_fold_addr_expr_with_type (t, ptr_type); | |
1956 | } | |
1957 | ||
1958 | return NULL_TREE; | |
1959 | } | |
1960 | ||
6de9cd9a DN |
1961 | /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET). |
1962 | Return the simplified expression, or NULL if nothing could be done. */ | |
1963 | ||
1964 | static tree | |
1965 | maybe_fold_stmt_indirect (tree expr, tree base, tree offset) | |
1966 | { | |
1967 | tree t; | |
cbf8d355 | 1968 | bool volatile_p = TREE_THIS_VOLATILE (expr); |
6de9cd9a DN |
1969 | |
1970 | /* We may well have constructed a double-nested PLUS_EXPR via multiple | |
1971 | substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that | |
1972 | are sometimes added. */ | |
1973 | base = fold (base); | |
ff7c8597 | 1974 | STRIP_TYPE_NOPS (base); |
6de9cd9a DN |
1975 | TREE_OPERAND (expr, 0) = base; |
1976 | ||
1977 | /* One possibility is that the address reduces to a string constant. */ | |
1978 | t = fold_read_from_constant_string (expr); | |
1979 | if (t) | |
1980 | return t; | |
1981 | ||
5be014d5 AP |
1982 | /* Add in any offset from a POINTER_PLUS_EXPR. */ |
1983 | if (TREE_CODE (base) == POINTER_PLUS_EXPR) | |
6de9cd9a DN |
1984 | { |
1985 | tree offset2; | |
1986 | ||
1987 | offset2 = TREE_OPERAND (base, 1); | |
1988 | if (TREE_CODE (offset2) != INTEGER_CST) | |
1989 | return NULL_TREE; | |
1990 | base = TREE_OPERAND (base, 0); | |
1991 | ||
5be014d5 AP |
1992 | offset = fold_convert (sizetype, |
1993 | int_const_binop (PLUS_EXPR, offset, offset2, 1)); | |
6de9cd9a DN |
1994 | } |
1995 | ||
1996 | if (TREE_CODE (base) == ADDR_EXPR) | |
1997 | { | |
fe9821b8 JH |
1998 | tree base_addr = base; |
1999 | ||
6de9cd9a DN |
2000 | /* Strip the ADDR_EXPR. */ |
2001 | base = TREE_OPERAND (base, 0); | |
2002 | ||
0534fa56 RH |
2003 | /* Fold away CONST_DECL to its value, if the type is scalar. */ |
2004 | if (TREE_CODE (base) == CONST_DECL | |
1b738915 | 2005 | && is_gimple_min_invariant (DECL_INITIAL (base))) |
0534fa56 RH |
2006 | return DECL_INITIAL (base); |
2007 | ||
6de9cd9a | 2008 | /* Try folding *(&B+O) to B.X. */ |
fe9821b8 JH |
2009 | t = maybe_fold_offset_to_reference (base_addr, offset, |
2010 | TREE_TYPE (expr)); | |
6de9cd9a | 2011 | if (t) |
cbf8d355 | 2012 | { |
e2104f59 RG |
2013 | /* Preserve volatileness of the original expression. |
2014 | We can end up with a plain decl here which is shared | |
2015 | and we shouldn't mess with its flags. */ | |
2016 | if (!SSA_VAR_P (t)) | |
2017 | TREE_THIS_VOLATILE (t) = volatile_p; | |
cbf8d355 RG |
2018 | return t; |
2019 | } | |
6de9cd9a DN |
2020 | } |
2021 | else | |
2022 | { | |
2023 | /* We can get here for out-of-range string constant accesses, | |
2024 | such as "_"[3]. Bail out of the entire substitution search | |
2025 | and arrange for the entire statement to be replaced by a | |
aabcd309 | 2026 | call to __builtin_trap. In all likelihood this will all be |
6de9cd9a DN |
2027 | constant-folded away, but in the meantime we can't leave with |
2028 | something that get_expr_operands can't understand. */ | |
2029 | ||
2030 | t = base; | |
2031 | STRIP_NOPS (t); | |
2032 | if (TREE_CODE (t) == ADDR_EXPR | |
2033 | && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST) | |
2034 | { | |
2035 | /* FIXME: Except that this causes problems elsewhere with dead | |
0e61db61 | 2036 | code not being deleted, and we die in the rtl expanders |
6de9cd9a DN |
2037 | because we failed to remove some ssa_name. In the meantime, |
2038 | just return zero. */ | |
2039 | /* FIXME2: This condition should be signaled by | |
2040 | fold_read_from_constant_string directly, rather than | |
2041 | re-checking for it here. */ | |
2042 | return integer_zero_node; | |
2043 | } | |
2044 | ||
2045 | /* Try folding *(B+O) to B->X. Still an improvement. */ | |
2046 | if (POINTER_TYPE_P (TREE_TYPE (base))) | |
2047 | { | |
fe9821b8 JH |
2048 | t = maybe_fold_offset_to_reference (base, offset, |
2049 | TREE_TYPE (expr)); | |
6de9cd9a DN |
2050 | if (t) |
2051 | return t; | |
2052 | } | |
2053 | } | |
2054 | ||
2055 | /* Otherwise we had an offset that we could not simplify. */ | |
2056 | return NULL_TREE; | |
2057 | } | |
2058 | ||
750628d8 | 2059 | |
726a989a | 2060 | /* A quaint feature extant in our address arithmetic is that there |
6de9cd9a DN |
2061 | can be hidden type changes here. The type of the result need |
2062 | not be the same as the type of the input pointer. | |
2063 | ||
2064 | What we're after here is an expression of the form | |
2065 | (T *)(&array + const) | |
726a989a RB |
2066 | where array is OP0, const is OP1, RES_TYPE is T and |
2067 | the cast doesn't actually exist, but is implicit in the | |
5be014d5 | 2068 | type of the POINTER_PLUS_EXPR. We'd like to turn this into |
6de9cd9a DN |
2069 | &array[x] |
2070 | which may be able to propagate further. */ | |
2071 | ||
726a989a RB |
2072 | tree |
2073 | maybe_fold_stmt_addition (tree res_type, tree op0, tree op1) | |
6de9cd9a | 2074 | { |
6de9cd9a DN |
2075 | tree ptd_type; |
2076 | tree t; | |
6de9cd9a | 2077 | |
6de9cd9a DN |
2078 | /* It had better be a constant. */ |
2079 | if (TREE_CODE (op1) != INTEGER_CST) | |
2080 | return NULL_TREE; | |
2081 | /* The first operand should be an ADDR_EXPR. */ | |
2082 | if (TREE_CODE (op0) != ADDR_EXPR) | |
2083 | return NULL_TREE; | |
2084 | op0 = TREE_OPERAND (op0, 0); | |
2085 | ||
2086 | /* If the first operand is an ARRAY_REF, expand it so that we can fold | |
2087 | the offset into it. */ | |
2088 | while (TREE_CODE (op0) == ARRAY_REF) | |
2089 | { | |
2090 | tree array_obj = TREE_OPERAND (op0, 0); | |
2091 | tree array_idx = TREE_OPERAND (op0, 1); | |
2092 | tree elt_type = TREE_TYPE (op0); | |
2093 | tree elt_size = TYPE_SIZE_UNIT (elt_type); | |
2094 | tree min_idx; | |
2095 | ||
2096 | if (TREE_CODE (array_idx) != INTEGER_CST) | |
2097 | break; | |
2098 | if (TREE_CODE (elt_size) != INTEGER_CST) | |
2099 | break; | |
2100 | ||
2101 | /* Un-bias the index by the min index of the array type. */ | |
2102 | min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj)); | |
2103 | if (min_idx) | |
2104 | { | |
2105 | min_idx = TYPE_MIN_VALUE (min_idx); | |
2106 | if (min_idx) | |
2107 | { | |
44de5aeb RK |
2108 | if (TREE_CODE (min_idx) != INTEGER_CST) |
2109 | break; | |
2110 | ||
b6f65e3c | 2111 | array_idx = fold_convert (TREE_TYPE (min_idx), array_idx); |
6de9cd9a DN |
2112 | if (!integer_zerop (min_idx)) |
2113 | array_idx = int_const_binop (MINUS_EXPR, array_idx, | |
2114 | min_idx, 0); | |
2115 | } | |
2116 | } | |
2117 | ||
2118 | /* Convert the index to a byte offset. */ | |
b6f65e3c | 2119 | array_idx = fold_convert (sizetype, array_idx); |
6de9cd9a DN |
2120 | array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0); |
2121 | ||
2122 | /* Update the operands for the next round, or for folding. */ | |
5be014d5 | 2123 | op1 = int_const_binop (PLUS_EXPR, |
6de9cd9a | 2124 | array_idx, op1, 0); |
6de9cd9a DN |
2125 | op0 = array_obj; |
2126 | } | |
2127 | ||
726a989a | 2128 | ptd_type = TREE_TYPE (res_type); |
c4e5b5a8 RG |
2129 | /* If we want a pointer to void, reconstruct the reference from the |
2130 | array element type. A pointer to that can be trivially converted | |
2131 | to void *. This happens as we fold (void *)(ptr p+ off). */ | |
2132 | if (VOID_TYPE_P (ptd_type) | |
2133 | && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE) | |
2134 | ptd_type = TREE_TYPE (TREE_TYPE (op0)); | |
6de9cd9a DN |
2135 | |
2136 | /* At which point we can try some of the same things as for indirects. */ | |
1fd89060 | 2137 | t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true); |
6de9cd9a DN |
2138 | if (!t) |
2139 | t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1, | |
2140 | ptd_type, false); | |
2141 | if (t) | |
726a989a | 2142 | t = build1 (ADDR_EXPR, res_type, t); |
6de9cd9a DN |
2143 | |
2144 | return t; | |
2145 | } | |
2146 | ||
622f91ba JL |
2147 | /* For passing state through walk_tree into fold_stmt_r and its |
2148 | children. */ | |
2149 | ||
2150 | struct fold_stmt_r_data | |
2151 | { | |
726a989a | 2152 | gimple stmt; |
6ac01510 ILT |
2153 | bool *changed_p; |
2154 | bool *inside_addr_expr_p; | |
622f91ba JL |
2155 | }; |
2156 | ||
6de9cd9a DN |
2157 | /* Subroutine of fold_stmt called via walk_tree. We perform several |
2158 | simplifications of EXPR_P, mostly having to do with pointer arithmetic. */ | |
2159 | ||
2160 | static tree | |
2161 | fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data) | |
2162 | { | |
726a989a RB |
2163 | struct walk_stmt_info *wi = (struct walk_stmt_info *) data; |
2164 | struct fold_stmt_r_data *fold_stmt_r_data; | |
2165 | bool *inside_addr_expr_p; | |
2166 | bool *changed_p; | |
6de9cd9a | 2167 | tree expr = *expr_p, t; |
6aaa4013 | 2168 | bool volatile_p = TREE_THIS_VOLATILE (expr); |
6de9cd9a | 2169 | |
726a989a RB |
2170 | fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info; |
2171 | inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p; | |
2172 | changed_p = fold_stmt_r_data->changed_p; | |
2173 | ||
6de9cd9a DN |
2174 | /* ??? It'd be nice if walk_tree had a pre-order option. */ |
2175 | switch (TREE_CODE (expr)) | |
2176 | { | |
2177 | case INDIRECT_REF: | |
2178 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
2179 | if (t) | |
2180 | return t; | |
2181 | *walk_subtrees = 0; | |
2182 | ||
2183 | t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0), | |
2184 | integer_zero_node); | |
bab4e8fb JJ |
2185 | /* Avoid folding *"abc" = 5 into 'a' = 5. */ |
2186 | if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST) | |
2187 | t = NULL_TREE; | |
16ac8575 RG |
2188 | if (!t |
2189 | && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) | |
2190 | /* If we had a good reason for propagating the address here, | |
2191 | make sure we end up with valid gimple. See PR34989. */ | |
2192 | t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0); | |
6de9cd9a DN |
2193 | break; |
2194 | ||
fe9821b8 JH |
2195 | case NOP_EXPR: |
2196 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
2197 | if (t) | |
2198 | return t; | |
2199 | *walk_subtrees = 0; | |
2200 | ||
2201 | if (POINTER_TYPE_P (TREE_TYPE (expr)) | |
129a37fc | 2202 | && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr))) |
fe9821b8 | 2203 | && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0))) |
99f536cc RG |
2204 | && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0), |
2205 | integer_zero_node, | |
2206 | TREE_TYPE (TREE_TYPE (expr))))) | |
2207 | return t; | |
fe9821b8 JH |
2208 | break; |
2209 | ||
622f91ba | 2210 | /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF. |
6de9cd9a DN |
2211 | We'd only want to bother decomposing an existing ARRAY_REF if |
2212 | the base array is found to have another offset contained within. | |
2213 | Otherwise we'd be wasting time. */ | |
622f91ba JL |
2214 | case ARRAY_REF: |
2215 | /* If we are not processing expressions found within an | |
bab4e8fb JJ |
2216 | ADDR_EXPR, then we can fold constant array references. |
2217 | Don't fold on LHS either, to avoid folding "abc"[0] = 5 | |
2218 | into 'a' = 5. */ | |
2219 | if (!*inside_addr_expr_p && !wi->is_lhs) | |
622f91ba JL |
2220 | t = fold_read_from_constant_string (expr); |
2221 | else | |
2222 | t = NULL; | |
2223 | break; | |
6de9cd9a DN |
2224 | |
2225 | case ADDR_EXPR: | |
622f91ba | 2226 | *inside_addr_expr_p = true; |
6de9cd9a | 2227 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); |
622f91ba | 2228 | *inside_addr_expr_p = false; |
6de9cd9a DN |
2229 | if (t) |
2230 | return t; | |
2231 | *walk_subtrees = 0; | |
2232 | ||
51eed280 PB |
2233 | /* Make sure the value is properly considered constant, and so gets |
2234 | propagated as expected. */ | |
6de9cd9a | 2235 | if (*changed_p) |
127203ac | 2236 | recompute_tree_invariant_for_addr_expr (expr); |
6de9cd9a DN |
2237 | return NULL_TREE; |
2238 | ||
6de9cd9a DN |
2239 | case COMPONENT_REF: |
2240 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
2241 | if (t) | |
2242 | return t; | |
2243 | *walk_subtrees = 0; | |
2244 | ||
fa27426e RH |
2245 | /* Make sure the FIELD_DECL is actually a field in the type on the lhs. |
2246 | We've already checked that the records are compatible, so we should | |
2247 | come up with a set of compatible fields. */ | |
2248 | { | |
2249 | tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0)); | |
2250 | tree expr_field = TREE_OPERAND (expr, 1); | |
2251 | ||
2252 | if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record)) | |
2253 | { | |
2254 | expr_field = find_compatible_field (expr_record, expr_field); | |
2255 | TREE_OPERAND (expr, 1) = expr_field; | |
2256 | } | |
2257 | } | |
6de9cd9a DN |
2258 | break; |
2259 | ||
ac182688 ZD |
2260 | case TARGET_MEM_REF: |
2261 | t = maybe_fold_tmr (expr); | |
2262 | break; | |
2263 | ||
726a989a RB |
2264 | case POINTER_PLUS_EXPR: |
2265 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
2266 | if (t) | |
2267 | return t; | |
2268 | t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL); | |
2269 | if (t) | |
2270 | return t; | |
2271 | *walk_subtrees = 0; | |
2272 | ||
2273 | t = maybe_fold_stmt_addition (TREE_TYPE (expr), | |
2274 | TREE_OPERAND (expr, 0), | |
2275 | TREE_OPERAND (expr, 1)); | |
2276 | break; | |
2277 | ||
f393e7f5 RG |
2278 | case COND_EXPR: |
2279 | if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0))) | |
2280 | { | |
2281 | tree op0 = TREE_OPERAND (expr, 0); | |
6ac01510 ILT |
2282 | tree tem; |
2283 | bool set; | |
2284 | ||
2285 | fold_defer_overflow_warnings (); | |
2286 | tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0), | |
2287 | TREE_OPERAND (op0, 0), | |
2288 | TREE_OPERAND (op0, 1)); | |
726a989a RB |
2289 | /* This is actually a conditional expression, not a GIMPLE |
2290 | conditional statement, however, the valid_gimple_rhs_p | |
2291 | test still applies. */ | |
2292 | set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem); | |
6ac01510 ILT |
2293 | fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0); |
2294 | if (set) | |
5cdc4a26 | 2295 | { |
726a989a RB |
2296 | COND_EXPR_COND (expr) = tem; |
2297 | t = expr; | |
5cdc4a26 RS |
2298 | break; |
2299 | } | |
f393e7f5 | 2300 | } |
5cdc4a26 | 2301 | return NULL_TREE; |
f393e7f5 | 2302 | |
6de9cd9a DN |
2303 | default: |
2304 | return NULL_TREE; | |
2305 | } | |
2306 | ||
2307 | if (t) | |
2308 | { | |
e2104f59 RG |
2309 | /* Preserve volatileness of the original expression. |
2310 | We can end up with a plain decl here which is shared | |
2311 | and we shouldn't mess with its flags. */ | |
2312 | if (!SSA_VAR_P (t)) | |
2313 | TREE_THIS_VOLATILE (t) = volatile_p; | |
6de9cd9a DN |
2314 | *expr_p = t; |
2315 | *changed_p = true; | |
2316 | } | |
2317 | ||
2318 | return NULL_TREE; | |
2319 | } | |
2320 | ||
10a0d495 JJ |
2321 | /* Return the string length, maximum string length or maximum value of |
2322 | ARG in LENGTH. | |
2323 | If ARG is an SSA name variable, follow its use-def chains. If LENGTH | |
2324 | is not NULL and, for TYPE == 0, its value is not equal to the length | |
2325 | we determine or if we are unable to determine the length or value, | |
2326 | return false. VISITED is a bitmap of visited variables. | |
2327 | TYPE is 0 if string length should be returned, 1 for maximum string | |
2328 | length and 2 for maximum value ARG can have. */ | |
6de9cd9a | 2329 | |
06a9b53f | 2330 | static bool |
10a0d495 | 2331 | get_maxval_strlen (tree arg, tree *length, bitmap visited, int type) |
6de9cd9a | 2332 | { |
726a989a RB |
2333 | tree var, val; |
2334 | gimple def_stmt; | |
750628d8 DN |
2335 | |
2336 | if (TREE_CODE (arg) != SSA_NAME) | |
06a9b53f | 2337 | { |
f255541f RC |
2338 | if (TREE_CODE (arg) == COND_EXPR) |
2339 | return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type) | |
2340 | && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type); | |
c4e5b5a8 RG |
2341 | /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */ |
2342 | else if (TREE_CODE (arg) == ADDR_EXPR | |
2343 | && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF | |
2344 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1))) | |
2345 | { | |
2346 | tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); | |
2347 | if (TREE_CODE (aop0) == INDIRECT_REF | |
2348 | && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME) | |
2349 | return get_maxval_strlen (TREE_OPERAND (aop0, 0), | |
2350 | length, visited, type); | |
2351 | } | |
f255541f | 2352 | |
10a0d495 JJ |
2353 | if (type == 2) |
2354 | { | |
2355 | val = arg; | |
2356 | if (TREE_CODE (val) != INTEGER_CST | |
2357 | || tree_int_cst_sgn (val) < 0) | |
2358 | return false; | |
2359 | } | |
2360 | else | |
2361 | val = c_strlen (arg, 1); | |
750628d8 | 2362 | if (!val) |
06a9b53f | 2363 | return false; |
cd709752 | 2364 | |
10a0d495 JJ |
2365 | if (*length) |
2366 | { | |
2367 | if (type > 0) | |
2368 | { | |
2369 | if (TREE_CODE (*length) != INTEGER_CST | |
2370 | || TREE_CODE (val) != INTEGER_CST) | |
2371 | return false; | |
2372 | ||
2373 | if (tree_int_cst_lt (*length, val)) | |
2374 | *length = val; | |
2375 | return true; | |
2376 | } | |
2377 | else if (simple_cst_equal (val, *length) != 1) | |
2378 | return false; | |
2379 | } | |
6de9cd9a | 2380 | |
750628d8 DN |
2381 | *length = val; |
2382 | return true; | |
6de9cd9a | 2383 | } |
06a9b53f | 2384 | |
750628d8 DN |
2385 | /* If we were already here, break the infinite cycle. */ |
2386 | if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg))) | |
2387 | return true; | |
2388 | bitmap_set_bit (visited, SSA_NAME_VERSION (arg)); | |
2389 | ||
2390 | var = arg; | |
2391 | def_stmt = SSA_NAME_DEF_STMT (var); | |
6de9cd9a | 2392 | |
726a989a RB |
2393 | switch (gimple_code (def_stmt)) |
2394 | { | |
2395 | case GIMPLE_ASSIGN: | |
2396 | /* The RHS of the statement defining VAR must either have a | |
2397 | constant length or come from another SSA_NAME with a constant | |
2398 | length. */ | |
2399 | if (gimple_assign_single_p (def_stmt) | |
2400 | || gimple_assign_unary_nop_p (def_stmt)) | |
2401 | { | |
2402 | tree rhs = gimple_assign_rhs1 (def_stmt); | |
2403 | return get_maxval_strlen (rhs, length, visited, type); | |
2404 | } | |
2405 | return false; | |
2406 | ||
2407 | case GIMPLE_PHI: | |
750628d8 DN |
2408 | { |
2409 | /* All the arguments of the PHI node must have the same constant | |
2410 | length. */ | |
726a989a RB |
2411 | unsigned i; |
2412 | ||
2413 | for (i = 0; i < gimple_phi_num_args (def_stmt); i++) | |
2414 | { | |
2415 | tree arg = gimple_phi_arg (def_stmt, i)->def; | |
2416 | ||
2417 | /* If this PHI has itself as an argument, we cannot | |
2418 | determine the string length of this argument. However, | |
2419 | if we can find a constant string length for the other | |
2420 | PHI args then we can still be sure that this is a | |
2421 | constant string length. So be optimistic and just | |
2422 | continue with the next argument. */ | |
2423 | if (arg == gimple_phi_result (def_stmt)) | |
2424 | continue; | |
2425 | ||
2426 | if (!get_maxval_strlen (arg, length, visited, type)) | |
2427 | return false; | |
2428 | } | |
2429 | } | |
2430 | return true; | |
6de9cd9a | 2431 | |
750628d8 | 2432 | default: |
726a989a | 2433 | return false; |
6de9cd9a | 2434 | } |
6de9cd9a DN |
2435 | } |
2436 | ||
2437 | ||
726a989a RB |
2438 | /* Fold builtin call in statement STMT. Returns a simplified tree. |
2439 | We may return a non-constant expression, including another call | |
2440 | to a different function and with different arguments, e.g., | |
2441 | substituting memcpy for strcpy when the string length is known. | |
2442 | Note that some builtins expand into inline code that may not | |
2443 | be valid in GIMPLE. Callers must take care. */ | |
6de9cd9a DN |
2444 | |
2445 | static tree | |
726a989a | 2446 | ccp_fold_builtin (gimple stmt) |
6de9cd9a | 2447 | { |
10a0d495 | 2448 | tree result, val[3]; |
5039610b | 2449 | tree callee, a; |
d9cc481a | 2450 | int arg_idx, type; |
a32e70c3 RS |
2451 | bitmap visited; |
2452 | bool ignore; | |
5039610b | 2453 | int nargs; |
6de9cd9a | 2454 | |
726a989a RB |
2455 | gcc_assert (is_gimple_call (stmt)); |
2456 | ||
2457 | ignore = (gimple_call_lhs (stmt) == NULL); | |
6de9cd9a DN |
2458 | |
2459 | /* First try the generic builtin folder. If that succeeds, return the | |
2460 | result directly. */ | |
726a989a | 2461 | result = fold_call_stmt (stmt, ignore); |
6de9cd9a | 2462 | if (result) |
10a0d495 JJ |
2463 | { |
2464 | if (ignore) | |
2465 | STRIP_NOPS (result); | |
2466 | return result; | |
2467 | } | |
a32e70c3 RS |
2468 | |
2469 | /* Ignore MD builtins. */ | |
726a989a | 2470 | callee = gimple_call_fndecl (stmt); |
a32e70c3 RS |
2471 | if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD) |
2472 | return NULL_TREE; | |
6de9cd9a DN |
2473 | |
2474 | /* If the builtin could not be folded, and it has no argument list, | |
2475 | we're done. */ | |
726a989a | 2476 | nargs = gimple_call_num_args (stmt); |
5039610b | 2477 | if (nargs == 0) |
6de9cd9a DN |
2478 | return NULL_TREE; |
2479 | ||
2480 | /* Limit the work only for builtins we know how to simplify. */ | |
2481 | switch (DECL_FUNCTION_CODE (callee)) | |
2482 | { | |
2483 | case BUILT_IN_STRLEN: | |
2484 | case BUILT_IN_FPUTS: | |
2485 | case BUILT_IN_FPUTS_UNLOCKED: | |
d9cc481a | 2486 | arg_idx = 0; |
10a0d495 | 2487 | type = 0; |
6de9cd9a DN |
2488 | break; |
2489 | case BUILT_IN_STRCPY: | |
2490 | case BUILT_IN_STRNCPY: | |
d9cc481a | 2491 | arg_idx = 1; |
10a0d495 JJ |
2492 | type = 0; |
2493 | break; | |
2494 | case BUILT_IN_MEMCPY_CHK: | |
2495 | case BUILT_IN_MEMPCPY_CHK: | |
2496 | case BUILT_IN_MEMMOVE_CHK: | |
2497 | case BUILT_IN_MEMSET_CHK: | |
2498 | case BUILT_IN_STRNCPY_CHK: | |
d9cc481a | 2499 | arg_idx = 2; |
10a0d495 JJ |
2500 | type = 2; |
2501 | break; | |
2502 | case BUILT_IN_STRCPY_CHK: | |
2503 | case BUILT_IN_STPCPY_CHK: | |
d9cc481a | 2504 | arg_idx = 1; |
10a0d495 JJ |
2505 | type = 1; |
2506 | break; | |
2507 | case BUILT_IN_SNPRINTF_CHK: | |
2508 | case BUILT_IN_VSNPRINTF_CHK: | |
d9cc481a | 2509 | arg_idx = 1; |
10a0d495 | 2510 | type = 2; |
6de9cd9a DN |
2511 | break; |
2512 | default: | |
2513 | return NULL_TREE; | |
2514 | } | |
2515 | ||
f6ab27bb JJ |
2516 | if (arg_idx >= nargs) |
2517 | return NULL_TREE; | |
2518 | ||
6de9cd9a | 2519 | /* Try to use the dataflow information gathered by the CCP process. */ |
8bdbfff5 | 2520 | visited = BITMAP_ALLOC (NULL); |
d9cc481a | 2521 | bitmap_clear (visited); |
6de9cd9a | 2522 | |
10a0d495 | 2523 | memset (val, 0, sizeof (val)); |
d9cc481a AN |
2524 | a = gimple_call_arg (stmt, arg_idx); |
2525 | if (!get_maxval_strlen (a, &val[arg_idx], visited, type)) | |
2526 | val[arg_idx] = NULL_TREE; | |
6de9cd9a | 2527 | |
8bdbfff5 | 2528 | BITMAP_FREE (visited); |
6de9cd9a | 2529 | |
a32e70c3 | 2530 | result = NULL_TREE; |
6de9cd9a DN |
2531 | switch (DECL_FUNCTION_CODE (callee)) |
2532 | { | |
2533 | case BUILT_IN_STRLEN: | |
f6ab27bb | 2534 | if (val[0] && nargs == 1) |
6de9cd9a | 2535 | { |
726a989a RB |
2536 | tree new_val = |
2537 | fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]); | |
6de9cd9a DN |
2538 | |
2539 | /* If the result is not a valid gimple value, or not a cast | |
2540 | of a valid gimple value, then we can not use the result. */ | |
c22940cd TN |
2541 | if (is_gimple_val (new_val) |
2542 | || (is_gimple_cast (new_val) | |
2543 | && is_gimple_val (TREE_OPERAND (new_val, 0)))) | |
2544 | return new_val; | |
6de9cd9a | 2545 | } |
a32e70c3 RS |
2546 | break; |
2547 | ||
6de9cd9a | 2548 | case BUILT_IN_STRCPY: |
5039610b SL |
2549 | if (val[1] && is_gimple_val (val[1]) && nargs == 2) |
2550 | result = fold_builtin_strcpy (callee, | |
726a989a RB |
2551 | gimple_call_arg (stmt, 0), |
2552 | gimple_call_arg (stmt, 1), | |
5039610b | 2553 | val[1]); |
a32e70c3 RS |
2554 | break; |
2555 | ||
6de9cd9a | 2556 | case BUILT_IN_STRNCPY: |
5039610b SL |
2557 | if (val[1] && is_gimple_val (val[1]) && nargs == 3) |
2558 | result = fold_builtin_strncpy (callee, | |
726a989a RB |
2559 | gimple_call_arg (stmt, 0), |
2560 | gimple_call_arg (stmt, 1), | |
2561 | gimple_call_arg (stmt, 2), | |
5039610b | 2562 | val[1]); |
a32e70c3 RS |
2563 | break; |
2564 | ||
6de9cd9a | 2565 | case BUILT_IN_FPUTS: |
f6ab27bb JJ |
2566 | if (nargs == 2) |
2567 | result = fold_builtin_fputs (gimple_call_arg (stmt, 0), | |
2568 | gimple_call_arg (stmt, 1), | |
2569 | ignore, false, val[0]); | |
a32e70c3 RS |
2570 | break; |
2571 | ||
6de9cd9a | 2572 | case BUILT_IN_FPUTS_UNLOCKED: |
f6ab27bb JJ |
2573 | if (nargs == 2) |
2574 | result = fold_builtin_fputs (gimple_call_arg (stmt, 0), | |
2575 | gimple_call_arg (stmt, 1), | |
2576 | ignore, true, val[0]); | |
10a0d495 JJ |
2577 | break; |
2578 | ||
2579 | case BUILT_IN_MEMCPY_CHK: | |
2580 | case BUILT_IN_MEMPCPY_CHK: | |
2581 | case BUILT_IN_MEMMOVE_CHK: | |
2582 | case BUILT_IN_MEMSET_CHK: | |
f6ab27bb | 2583 | if (val[2] && is_gimple_val (val[2]) && nargs == 4) |
5039610b | 2584 | result = fold_builtin_memory_chk (callee, |
726a989a RB |
2585 | gimple_call_arg (stmt, 0), |
2586 | gimple_call_arg (stmt, 1), | |
2587 | gimple_call_arg (stmt, 2), | |
2588 | gimple_call_arg (stmt, 3), | |
5039610b | 2589 | val[2], ignore, |
10a0d495 JJ |
2590 | DECL_FUNCTION_CODE (callee)); |
2591 | break; | |
2592 | ||
2593 | case BUILT_IN_STRCPY_CHK: | |
2594 | case BUILT_IN_STPCPY_CHK: | |
f6ab27bb | 2595 | if (val[1] && is_gimple_val (val[1]) && nargs == 3) |
5039610b | 2596 | result = fold_builtin_stxcpy_chk (callee, |
726a989a RB |
2597 | gimple_call_arg (stmt, 0), |
2598 | gimple_call_arg (stmt, 1), | |
2599 | gimple_call_arg (stmt, 2), | |
5039610b | 2600 | val[1], ignore, |
10a0d495 JJ |
2601 | DECL_FUNCTION_CODE (callee)); |
2602 | break; | |
2603 | ||
2604 | case BUILT_IN_STRNCPY_CHK: | |
f6ab27bb | 2605 | if (val[2] && is_gimple_val (val[2]) && nargs == 4) |
726a989a RB |
2606 | result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0), |
2607 | gimple_call_arg (stmt, 1), | |
2608 | gimple_call_arg (stmt, 2), | |
2609 | gimple_call_arg (stmt, 3), | |
5039610b | 2610 | val[2]); |
10a0d495 JJ |
2611 | break; |
2612 | ||
2613 | case BUILT_IN_SNPRINTF_CHK: | |
2614 | case BUILT_IN_VSNPRINTF_CHK: | |
2615 | if (val[1] && is_gimple_val (val[1])) | |
726a989a RB |
2616 | result = gimple_fold_builtin_snprintf_chk (stmt, val[1], |
2617 | DECL_FUNCTION_CODE (callee)); | |
a32e70c3 | 2618 | break; |
6de9cd9a DN |
2619 | |
2620 | default: | |
1e128c5f | 2621 | gcc_unreachable (); |
6de9cd9a DN |
2622 | } |
2623 | ||
a32e70c3 | 2624 | if (result && ignore) |
9675412f | 2625 | result = fold_ignored_result (result); |
a32e70c3 | 2626 | return result; |
6de9cd9a DN |
2627 | } |
2628 | ||
726a989a RB |
2629 | /* Attempt to fold an assignment statement pointed-to by SI. Returns a |
2630 | replacement rhs for the statement or NULL_TREE if no simplification | |
2631 | could be made. It is assumed that the operands have been previously | |
2632 | folded. */ | |
2633 | ||
2634 | static tree | |
2635 | fold_gimple_assign (gimple_stmt_iterator *si) | |
2636 | { | |
2637 | gimple stmt = gsi_stmt (*si); | |
2638 | enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
2639 | ||
2640 | tree result = NULL; | |
2641 | ||
2642 | switch (get_gimple_rhs_class (subcode)) | |
2643 | { | |
2644 | case GIMPLE_SINGLE_RHS: | |
2645 | { | |
2646 | tree rhs = gimple_assign_rhs1 (stmt); | |
2647 | ||
2648 | /* Try to fold a conditional expression. */ | |
2649 | if (TREE_CODE (rhs) == COND_EXPR) | |
2650 | { | |
2651 | tree temp = fold (COND_EXPR_COND (rhs)); | |
2652 | if (temp != COND_EXPR_COND (rhs)) | |
2653 | result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp, | |
2654 | COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs)); | |
2655 | } | |
2656 | ||
2657 | /* If we couldn't fold the RHS, hand over to the generic | |
2658 | fold routines. */ | |
2659 | if (result == NULL_TREE) | |
2660 | result = fold (rhs); | |
2661 | ||
2662 | /* Strip away useless type conversions. Both the NON_LVALUE_EXPR | |
2663 | that may have been added by fold, and "useless" type | |
2664 | conversions that might now be apparent due to propagation. */ | |
2665 | STRIP_USELESS_TYPE_CONVERSION (result); | |
2666 | ||
2667 | if (result != rhs && valid_gimple_rhs_p (result)) | |
2668 | return result; | |
2669 | else | |
2670 | /* It is possible that fold_stmt_r simplified the RHS. | |
2671 | Make sure that the subcode of this statement still | |
2672 | reflects the principal operator of the rhs operand. */ | |
2673 | return rhs; | |
2674 | } | |
2675 | break; | |
2676 | ||
2677 | case GIMPLE_UNARY_RHS: | |
ff8b183b RG |
2678 | { |
2679 | tree rhs = gimple_assign_rhs1 (stmt); | |
726a989a | 2680 | |
ff8b183b RG |
2681 | result = fold_unary (subcode, gimple_expr_type (stmt), rhs); |
2682 | if (result) | |
2683 | { | |
2684 | /* If the operation was a conversion do _not_ mark a | |
2685 | resulting constant with TREE_OVERFLOW if the original | |
2686 | constant was not. These conversions have implementation | |
2687 | defined behavior and retaining the TREE_OVERFLOW flag | |
2688 | here would confuse later passes such as VRP. */ | |
2689 | if (CONVERT_EXPR_CODE_P (subcode) | |
2690 | && TREE_CODE (result) == INTEGER_CST | |
2691 | && TREE_CODE (rhs) == INTEGER_CST) | |
2692 | TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs); | |
2693 | ||
2694 | STRIP_USELESS_TYPE_CONVERSION (result); | |
2695 | if (valid_gimple_rhs_p (result)) | |
2696 | return result; | |
2697 | } | |
2698 | else if (CONVERT_EXPR_CODE_P (subcode) | |
2699 | && POINTER_TYPE_P (gimple_expr_type (stmt)) | |
2700 | && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt)))) | |
2701 | { | |
2702 | tree type = gimple_expr_type (stmt); | |
2703 | tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt), | |
2704 | integer_zero_node, type); | |
2705 | if (t) | |
2706 | return t; | |
2707 | } | |
2708 | } | |
726a989a RB |
2709 | break; |
2710 | ||
2711 | case GIMPLE_BINARY_RHS: | |
2712 | /* Try to fold pointer addition. */ | |
2713 | if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) | |
66e8b99c RG |
2714 | { |
2715 | tree type = TREE_TYPE (gimple_assign_rhs1 (stmt)); | |
2716 | if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE) | |
2717 | { | |
2718 | type = build_pointer_type (TREE_TYPE (TREE_TYPE (type))); | |
2719 | if (!useless_type_conversion_p | |
2720 | (TREE_TYPE (gimple_assign_lhs (stmt)), type)) | |
2721 | type = TREE_TYPE (gimple_assign_rhs1 (stmt)); | |
2722 | } | |
2723 | result = maybe_fold_stmt_addition (type, | |
2724 | gimple_assign_rhs1 (stmt), | |
2725 | gimple_assign_rhs2 (stmt)); | |
2726 | } | |
726a989a RB |
2727 | |
2728 | if (!result) | |
2729 | result = fold_binary (subcode, | |
2730 | TREE_TYPE (gimple_assign_lhs (stmt)), | |
2731 | gimple_assign_rhs1 (stmt), | |
2732 | gimple_assign_rhs2 (stmt)); | |
2733 | ||
2734 | if (result) | |
2735 | { | |
2736 | STRIP_USELESS_TYPE_CONVERSION (result); | |
2737 | if (valid_gimple_rhs_p (result)) | |
2738 | return result; | |
001003c2 PB |
2739 | |
2740 | /* Fold might have produced non-GIMPLE, so if we trust it blindly | |
2741 | we lose canonicalization opportunities. Do not go again | |
2742 | through fold here though, or the same non-GIMPLE will be | |
2743 | produced. */ | |
2744 | if (commutative_tree_code (subcode) | |
2745 | && tree_swap_operands_p (gimple_assign_rhs1 (stmt), | |
2746 | gimple_assign_rhs2 (stmt), false)) | |
2747 | return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)), | |
2748 | gimple_assign_rhs2 (stmt), | |
2749 | gimple_assign_rhs1 (stmt)); | |
726a989a RB |
2750 | } |
2751 | break; | |
2752 | ||
2753 | case GIMPLE_INVALID_RHS: | |
2754 | gcc_unreachable (); | |
2755 | } | |
2756 | ||
2757 | return NULL_TREE; | |
2758 | } | |
2759 | ||
2760 | /* Attempt to fold a conditional statement. Return true if any changes were | |
2761 | made. We only attempt to fold the condition expression, and do not perform | |
2762 | any transformation that would require alteration of the cfg. It is | |
2763 | assumed that the operands have been previously folded. */ | |
2764 | ||
2765 | static bool | |
2766 | fold_gimple_cond (gimple stmt) | |
2767 | { | |
2768 | tree result = fold_binary (gimple_cond_code (stmt), | |
2769 | boolean_type_node, | |
2770 | gimple_cond_lhs (stmt), | |
2771 | gimple_cond_rhs (stmt)); | |
2772 | ||
2773 | if (result) | |
2774 | { | |
2775 | STRIP_USELESS_TYPE_CONVERSION (result); | |
2776 | if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result)) | |
2777 | { | |
2778 | gimple_cond_set_condition_from_tree (stmt, result); | |
2779 | return true; | |
2780 | } | |
2781 | } | |
2782 | ||
2783 | return false; | |
2784 | } | |
2785 | ||
2786 | ||
2787 | /* Attempt to fold a call statement referenced by the statement iterator GSI. | |
2788 | The statement may be replaced by another statement, e.g., if the call | |
2789 | simplifies to a constant value. Return true if any changes were made. | |
2790 | It is assumed that the operands have been previously folded. */ | |
2791 | ||
2792 | static bool | |
2793 | fold_gimple_call (gimple_stmt_iterator *gsi) | |
2794 | { | |
2795 | gimple stmt = gsi_stmt (*gsi); | |
2796 | ||
2797 | tree callee = gimple_call_fndecl (stmt); | |
2798 | ||
2799 | /* Check for builtins that CCP can handle using information not | |
2800 | available in the generic fold routines. */ | |
2801 | if (callee && DECL_BUILT_IN (callee)) | |
2802 | { | |
2803 | tree result = ccp_fold_builtin (stmt); | |
2804 | ||
2805 | if (result) | |
2806 | return update_call_from_tree (gsi, result); | |
2807 | } | |
2808 | else | |
2809 | { | |
2810 | /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve | |
2811 | here are when we've propagated the address of a decl into the | |
2812 | object slot. */ | |
2813 | /* ??? Should perhaps do this in fold proper. However, doing it | |
2814 | there requires that we create a new CALL_EXPR, and that requires | |
2815 | copying EH region info to the new node. Easier to just do it | |
2816 | here where we can just smash the call operand. */ | |
2817 | /* ??? Is there a good reason not to do this in fold_stmt_inplace? */ | |
2818 | callee = gimple_call_fn (stmt); | |
2819 | if (TREE_CODE (callee) == OBJ_TYPE_REF | |
2820 | && lang_hooks.fold_obj_type_ref | |
2821 | && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR | |
2822 | && DECL_P (TREE_OPERAND | |
2823 | (OBJ_TYPE_REF_OBJECT (callee), 0))) | |
2824 | { | |
2825 | tree t; | |
2826 | ||
2827 | /* ??? Caution: Broken ADDR_EXPR semantics means that | |
2828 | looking at the type of the operand of the addr_expr | |
2829 | can yield an array type. See silly exception in | |
2830 | check_pointer_types_r. */ | |
2831 | t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee))); | |
2832 | t = lang_hooks.fold_obj_type_ref (callee, t); | |
2833 | if (t) | |
2834 | { | |
2835 | gimple_call_set_fn (stmt, t); | |
2836 | return true; | |
2837 | } | |
2838 | } | |
2839 | } | |
2840 | ||
2841 | return false; | |
2842 | } | |
6de9cd9a | 2843 | |
726a989a | 2844 | /* Fold the statement pointed to by GSI. In some cases, this function may |
750628d8 DN |
2845 | replace the whole statement with a new one. Returns true iff folding |
2846 | makes any changes. */ | |
6de9cd9a | 2847 | |
750628d8 | 2848 | bool |
726a989a | 2849 | fold_stmt (gimple_stmt_iterator *gsi) |
6de9cd9a | 2850 | { |
726a989a | 2851 | tree res; |
622f91ba | 2852 | struct fold_stmt_r_data fold_stmt_r_data; |
726a989a RB |
2853 | struct walk_stmt_info wi; |
2854 | ||
750628d8 | 2855 | bool changed = false; |
622f91ba JL |
2856 | bool inside_addr_expr = false; |
2857 | ||
726a989a | 2858 | gimple stmt = gsi_stmt (*gsi); |
6ac01510 ILT |
2859 | |
2860 | fold_stmt_r_data.stmt = stmt; | |
622f91ba JL |
2861 | fold_stmt_r_data.changed_p = &changed; |
2862 | fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr; | |
6de9cd9a | 2863 | |
726a989a RB |
2864 | memset (&wi, 0, sizeof (wi)); |
2865 | wi.info = &fold_stmt_r_data; | |
6de9cd9a | 2866 | |
726a989a RB |
2867 | /* Fold the individual operands. |
2868 | For example, fold instances of *&VAR into VAR, etc. */ | |
2869 | res = walk_gimple_op (stmt, fold_stmt_r, &wi); | |
2870 | gcc_assert (!res); | |
6de9cd9a | 2871 | |
726a989a RB |
2872 | /* Fold the main computation performed by the statement. */ |
2873 | switch (gimple_code (stmt)) | |
6de9cd9a | 2874 | { |
726a989a RB |
2875 | case GIMPLE_ASSIGN: |
2876 | { | |
2877 | tree new_rhs = fold_gimple_assign (gsi); | |
2878 | if (new_rhs != NULL_TREE) | |
2879 | { | |
2880 | gimple_assign_set_rhs_from_tree (gsi, new_rhs); | |
2881 | changed = true; | |
2882 | } | |
2883 | stmt = gsi_stmt (*gsi); | |
2884 | break; | |
2885 | } | |
2886 | case GIMPLE_COND: | |
2887 | changed |= fold_gimple_cond (stmt); | |
2888 | break; | |
2889 | case GIMPLE_CALL: | |
2890 | /* The entire statement may be replaced in this case. */ | |
2891 | changed |= fold_gimple_call (gsi); | |
2892 | break; | |
1809ff6b | 2893 | |
726a989a RB |
2894 | default: |
2895 | return changed; | |
2896 | break; | |
f255541f | 2897 | } |
6de9cd9a | 2898 | |
750628d8 | 2899 | return changed; |
6de9cd9a DN |
2900 | } |
2901 | ||
38965eb2 ZD |
2902 | /* Perform the minimal folding on statement STMT. Only operations like |
2903 | *&x created by constant propagation are handled. The statement cannot | |
726a989a RB |
2904 | be replaced with a new one. Return true if the statement was |
2905 | changed, false otherwise. */ | |
38965eb2 ZD |
2906 | |
2907 | bool | |
726a989a | 2908 | fold_stmt_inplace (gimple stmt) |
38965eb2 | 2909 | { |
726a989a | 2910 | tree res; |
622f91ba | 2911 | struct fold_stmt_r_data fold_stmt_r_data; |
726a989a RB |
2912 | struct walk_stmt_info wi; |
2913 | gimple_stmt_iterator si; | |
2914 | ||
38965eb2 | 2915 | bool changed = false; |
622f91ba JL |
2916 | bool inside_addr_expr = false; |
2917 | ||
6ac01510 | 2918 | fold_stmt_r_data.stmt = stmt; |
622f91ba JL |
2919 | fold_stmt_r_data.changed_p = &changed; |
2920 | fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr; | |
38965eb2 | 2921 | |
726a989a RB |
2922 | memset (&wi, 0, sizeof (wi)); |
2923 | wi.info = &fold_stmt_r_data; | |
2924 | ||
2925 | /* Fold the individual operands. | |
2926 | For example, fold instances of *&VAR into VAR, etc. | |
38965eb2 | 2927 | |
726a989a RB |
2928 | It appears that, at one time, maybe_fold_stmt_indirect |
2929 | would cause the walk to return non-null in order to | |
2930 | signal that the entire statement should be replaced with | |
2931 | a call to _builtin_trap. This functionality is currently | |
2932 | disabled, as noted in a FIXME, and cannot be supported here. */ | |
2933 | res = walk_gimple_op (stmt, fold_stmt_r, &wi); | |
2934 | gcc_assert (!res); | |
38965eb2 | 2935 | |
726a989a RB |
2936 | /* Fold the main computation performed by the statement. */ |
2937 | switch (gimple_code (stmt)) | |
2938 | { | |
2939 | case GIMPLE_ASSIGN: | |
2940 | { | |
2941 | unsigned old_num_ops; | |
2942 | tree new_rhs; | |
2943 | old_num_ops = gimple_num_ops (stmt); | |
2944 | si = gsi_for_stmt (stmt); | |
2945 | new_rhs = fold_gimple_assign (&si); | |
2946 | if (new_rhs != NULL_TREE | |
2947 | && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops) | |
2948 | { | |
2949 | gimple_assign_set_rhs_from_tree (&si, new_rhs); | |
2950 | changed = true; | |
2951 | } | |
2952 | gcc_assert (gsi_stmt (si) == stmt); | |
2953 | break; | |
2954 | } | |
2955 | case GIMPLE_COND: | |
2956 | changed |= fold_gimple_cond (stmt); | |
2957 | break; | |
38965eb2 | 2958 | |
726a989a RB |
2959 | default: |
2960 | break; | |
2961 | } | |
38965eb2 ZD |
2962 | |
2963 | return changed; | |
2964 | } | |
726a989a | 2965 | |
cb8e078d JJ |
2966 | /* Try to optimize out __builtin_stack_restore. Optimize it out |
2967 | if there is another __builtin_stack_restore in the same basic | |
2968 | block and no calls or ASM_EXPRs are in between, or if this block's | |
2969 | only outgoing edge is to EXIT_BLOCK and there are no calls or | |
2970 | ASM_EXPRs after this __builtin_stack_restore. */ | |
2971 | ||
2972 | static tree | |
726a989a | 2973 | optimize_stack_restore (gimple_stmt_iterator i) |
cb8e078d | 2974 | { |
726a989a RB |
2975 | tree callee, rhs; |
2976 | gimple stmt, stack_save; | |
2977 | gimple_stmt_iterator stack_save_gsi; | |
2978 | ||
2979 | basic_block bb = gsi_bb (i); | |
2980 | gimple call = gsi_stmt (i); | |
cb8e078d | 2981 | |
726a989a RB |
2982 | if (gimple_code (call) != GIMPLE_CALL |
2983 | || gimple_call_num_args (call) != 1 | |
2984 | || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME | |
2985 | || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0)))) | |
cb8e078d JJ |
2986 | return NULL_TREE; |
2987 | ||
726a989a | 2988 | for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i)) |
cb8e078d | 2989 | { |
726a989a RB |
2990 | stmt = gsi_stmt (i); |
2991 | if (gimple_code (stmt) == GIMPLE_ASM) | |
cb8e078d | 2992 | return NULL_TREE; |
726a989a | 2993 | if (gimple_code (stmt) != GIMPLE_CALL) |
cb8e078d JJ |
2994 | continue; |
2995 | ||
726a989a | 2996 | callee = gimple_call_fndecl (stmt); |
cb8e078d JJ |
2997 | if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) |
2998 | return NULL_TREE; | |
2999 | ||
3000 | if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE) | |
3001 | break; | |
3002 | } | |
3003 | ||
726a989a | 3004 | if (gsi_end_p (i) |
cb8e078d JJ |
3005 | && (! single_succ_p (bb) |
3006 | || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)) | |
3007 | return NULL_TREE; | |
3008 | ||
726a989a RB |
3009 | stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0)); |
3010 | if (gimple_code (stack_save) != GIMPLE_CALL | |
3011 | || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0) | |
3012 | || stmt_could_throw_p (stack_save) | |
3013 | || !has_single_use (gimple_call_arg (call, 0))) | |
cb8e078d JJ |
3014 | return NULL_TREE; |
3015 | ||
726a989a | 3016 | callee = gimple_call_fndecl (stack_save); |
cb8e078d JJ |
3017 | if (!callee |
3018 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL | |
3019 | || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE | |
726a989a | 3020 | || gimple_call_num_args (stack_save) != 0) |
cb8e078d JJ |
3021 | return NULL_TREE; |
3022 | ||
726a989a RB |
3023 | stack_save_gsi = gsi_for_stmt (stack_save); |
3024 | push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi)); | |
3025 | rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0); | |
3026 | if (!update_call_from_tree (&stack_save_gsi, rhs)) | |
cb8e078d | 3027 | { |
726a989a | 3028 | discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi)); |
cb8e078d JJ |
3029 | return NULL_TREE; |
3030 | } | |
726a989a | 3031 | pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi)); |
cb8e078d | 3032 | |
726a989a | 3033 | /* No effect, so the statement will be deleted. */ |
cb8e078d JJ |
3034 | return integer_zero_node; |
3035 | } | |
726a989a | 3036 | |
d7bd8aeb JJ |
3037 | /* If va_list type is a simple pointer and nothing special is needed, |
3038 | optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0), | |
3039 | __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple | |
3040 | pointer assignment. */ | |
3041 | ||
3042 | static tree | |
726a989a | 3043 | optimize_stdarg_builtin (gimple call) |
d7bd8aeb | 3044 | { |
35cbb299 | 3045 | tree callee, lhs, rhs, cfun_va_list; |
d7bd8aeb JJ |
3046 | bool va_list_simple_ptr; |
3047 | ||
726a989a | 3048 | if (gimple_code (call) != GIMPLE_CALL) |
d7bd8aeb JJ |
3049 | return NULL_TREE; |
3050 | ||
726a989a | 3051 | callee = gimple_call_fndecl (call); |
35cbb299 KT |
3052 | |
3053 | cfun_va_list = targetm.fn_abi_va_list (callee); | |
3054 | va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) | |
3055 | && (TREE_TYPE (cfun_va_list) == void_type_node | |
3056 | || TREE_TYPE (cfun_va_list) == char_type_node); | |
3057 | ||
d7bd8aeb JJ |
3058 | switch (DECL_FUNCTION_CODE (callee)) |
3059 | { | |
3060 | case BUILT_IN_VA_START: | |
3061 | if (!va_list_simple_ptr | |
3062 | || targetm.expand_builtin_va_start != NULL | |
726a989a | 3063 | || built_in_decls[BUILT_IN_NEXT_ARG] == NULL) |
d7bd8aeb JJ |
3064 | return NULL_TREE; |
3065 | ||
726a989a | 3066 | if (gimple_call_num_args (call) != 2) |
d7bd8aeb JJ |
3067 | return NULL_TREE; |
3068 | ||
726a989a | 3069 | lhs = gimple_call_arg (call, 0); |
d7bd8aeb JJ |
3070 | if (!POINTER_TYPE_P (TREE_TYPE (lhs)) |
3071 | || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) | |
35cbb299 | 3072 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
d7bd8aeb | 3073 | return NULL_TREE; |
726a989a | 3074 | |
d7bd8aeb JJ |
3075 | lhs = build_fold_indirect_ref (lhs); |
3076 | rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG], | |
726a989a | 3077 | 1, integer_zero_node); |
d7bd8aeb JJ |
3078 | rhs = fold_convert (TREE_TYPE (lhs), rhs); |
3079 | return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); | |
3080 | ||
3081 | case BUILT_IN_VA_COPY: | |
3082 | if (!va_list_simple_ptr) | |
3083 | return NULL_TREE; | |
3084 | ||
726a989a | 3085 | if (gimple_call_num_args (call) != 2) |
d7bd8aeb JJ |
3086 | return NULL_TREE; |
3087 | ||
726a989a | 3088 | lhs = gimple_call_arg (call, 0); |
d7bd8aeb JJ |
3089 | if (!POINTER_TYPE_P (TREE_TYPE (lhs)) |
3090 | || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) | |
35cbb299 | 3091 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
d7bd8aeb JJ |
3092 | return NULL_TREE; |
3093 | ||
3094 | lhs = build_fold_indirect_ref (lhs); | |
726a989a | 3095 | rhs = gimple_call_arg (call, 1); |
d7bd8aeb | 3096 | if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs)) |
35cbb299 | 3097 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
d7bd8aeb JJ |
3098 | return NULL_TREE; |
3099 | ||
3100 | rhs = fold_convert (TREE_TYPE (lhs), rhs); | |
3101 | return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); | |
3102 | ||
3103 | case BUILT_IN_VA_END: | |
726a989a | 3104 | /* No effect, so the statement will be deleted. */ |
d7bd8aeb JJ |
3105 | return integer_zero_node; |
3106 | ||
3107 | default: | |
3108 | gcc_unreachable (); | |
3109 | } | |
3110 | } | |
726a989a | 3111 | |
b28b1600 JJ |
3112 | /* Convert EXPR into a GIMPLE value suitable for substitution on the |
3113 | RHS of an assignment. Insert the necessary statements before | |
726a989a RB |
3114 | iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL |
3115 | is replaced. If the call is expected to produces a result, then it | |
3116 | is replaced by an assignment of the new RHS to the result variable. | |
3117 | If the result is to be ignored, then the call is replaced by a | |
3118 | GIMPLE_NOP. */ | |
b28b1600 | 3119 | |
726a989a RB |
3120 | static void |
3121 | gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) | |
b28b1600 | 3122 | { |
726a989a RB |
3123 | tree lhs; |
3124 | tree tmp = NULL_TREE; /* Silence warning. */ | |
3125 | gimple stmt, new_stmt; | |
3126 | gimple_stmt_iterator i; | |
3127 | gimple_seq stmts = gimple_seq_alloc(); | |
d406b663 | 3128 | struct gimplify_ctx gctx; |
b28b1600 | 3129 | |
726a989a RB |
3130 | stmt = gsi_stmt (*si_p); |
3131 | ||
3132 | gcc_assert (is_gimple_call (stmt)); | |
3133 | ||
3134 | lhs = gimple_call_lhs (stmt); | |
3135 | ||
d406b663 | 3136 | push_gimplify_context (&gctx); |
726a989a RB |
3137 | |
3138 | if (lhs == NULL_TREE) | |
3139 | gimplify_and_add (expr, &stmts); | |
3140 | else | |
2e929cf3 | 3141 | tmp = get_initialized_tmp_var (expr, &stmts, NULL); |
726a989a | 3142 | |
b28b1600 JJ |
3143 | pop_gimplify_context (NULL); |
3144 | ||
726a989a RB |
3145 | if (gimple_has_location (stmt)) |
3146 | annotate_all_with_location (stmts, gimple_location (stmt)); | |
f47c96aa | 3147 | |
b28b1600 | 3148 | /* The replacement can expose previously unreferenced variables. */ |
726a989a RB |
3149 | for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i)) |
3150 | { | |
3151 | new_stmt = gsi_stmt (i); | |
3152 | find_new_referenced_vars (new_stmt); | |
3153 | gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT); | |
3154 | mark_symbols_for_renaming (new_stmt); | |
3155 | gsi_next (si_p); | |
3156 | } | |
3157 | ||
3158 | if (lhs == NULL_TREE) | |
3159 | new_stmt = gimple_build_nop (); | |
3160 | else | |
b28b1600 | 3161 | { |
726a989a RB |
3162 | new_stmt = gimple_build_assign (lhs, tmp); |
3163 | copy_virtual_operands (new_stmt, stmt); | |
3164 | move_ssa_defining_stmt_for_defs (new_stmt, stmt); | |
b28b1600 JJ |
3165 | } |
3166 | ||
726a989a RB |
3167 | gimple_set_location (new_stmt, gimple_location (stmt)); |
3168 | gsi_replace (si_p, new_stmt, false); | |
b28b1600 JJ |
3169 | } |
3170 | ||
6de9cd9a DN |
3171 | /* A simple pass that attempts to fold all builtin functions. This pass |
3172 | is run after we've propagated as many constants as we can. */ | |
3173 | ||
c2924966 | 3174 | static unsigned int |
6de9cd9a DN |
3175 | execute_fold_all_builtins (void) |
3176 | { | |
a7d6ba24 | 3177 | bool cfg_changed = false; |
6de9cd9a | 3178 | basic_block bb; |
7b0e48fb DB |
3179 | unsigned int todoflags = 0; |
3180 | ||
6de9cd9a DN |
3181 | FOR_EACH_BB (bb) |
3182 | { | |
726a989a RB |
3183 | gimple_stmt_iterator i; |
3184 | for (i = gsi_start_bb (bb); !gsi_end_p (i); ) | |
6de9cd9a | 3185 | { |
726a989a | 3186 | gimple stmt, old_stmt; |
6de9cd9a | 3187 | tree callee, result; |
10a0d495 | 3188 | enum built_in_function fcode; |
6de9cd9a | 3189 | |
726a989a RB |
3190 | stmt = gsi_stmt (i); |
3191 | ||
3192 | if (gimple_code (stmt) != GIMPLE_CALL) | |
10a0d495 | 3193 | { |
726a989a | 3194 | gsi_next (&i); |
10a0d495 JJ |
3195 | continue; |
3196 | } | |
726a989a | 3197 | callee = gimple_call_fndecl (stmt); |
6de9cd9a | 3198 | if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) |
10a0d495 | 3199 | { |
726a989a | 3200 | gsi_next (&i); |
10a0d495 JJ |
3201 | continue; |
3202 | } | |
3203 | fcode = DECL_FUNCTION_CODE (callee); | |
6de9cd9a | 3204 | |
726a989a | 3205 | result = ccp_fold_builtin (stmt); |
53a8f709 UB |
3206 | |
3207 | if (result) | |
726a989a | 3208 | gimple_remove_stmt_histograms (cfun, stmt); |
53a8f709 | 3209 | |
6de9cd9a DN |
3210 | if (!result) |
3211 | switch (DECL_FUNCTION_CODE (callee)) | |
3212 | { | |
3213 | case BUILT_IN_CONSTANT_P: | |
3214 | /* Resolve __builtin_constant_p. If it hasn't been | |
3215 | folded to integer_one_node by now, it's fairly | |
3216 | certain that the value simply isn't constant. */ | |
726a989a | 3217 | result = integer_zero_node; |
6de9cd9a DN |
3218 | break; |
3219 | ||
cb8e078d | 3220 | case BUILT_IN_STACK_RESTORE: |
726a989a | 3221 | result = optimize_stack_restore (i); |
d7bd8aeb JJ |
3222 | if (result) |
3223 | break; | |
726a989a | 3224 | gsi_next (&i); |
d7bd8aeb JJ |
3225 | continue; |
3226 | ||
3227 | case BUILT_IN_VA_START: | |
3228 | case BUILT_IN_VA_END: | |
3229 | case BUILT_IN_VA_COPY: | |
3230 | /* These shouldn't be folded before pass_stdarg. */ | |
726a989a | 3231 | result = optimize_stdarg_builtin (stmt); |
cb8e078d JJ |
3232 | if (result) |
3233 | break; | |
3234 | /* FALLTHRU */ | |
3235 | ||
6de9cd9a | 3236 | default: |
726a989a | 3237 | gsi_next (&i); |
6de9cd9a DN |
3238 | continue; |
3239 | } | |
3240 | ||
3241 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3242 | { | |
3243 | fprintf (dump_file, "Simplified\n "); | |
726a989a | 3244 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
6de9cd9a DN |
3245 | } |
3246 | ||
726a989a RB |
3247 | old_stmt = stmt; |
3248 | push_stmt_changes (gsi_stmt_ptr (&i)); | |
cfaab3a9 | 3249 | |
726a989a RB |
3250 | if (!update_call_from_tree (&i, result)) |
3251 | { | |
3252 | gimplify_and_update_call_from_tree (&i, result); | |
3253 | todoflags |= TODO_rebuild_alias; | |
3254 | } | |
cfaab3a9 | 3255 | |
726a989a RB |
3256 | stmt = gsi_stmt (i); |
3257 | pop_stmt_changes (gsi_stmt_ptr (&i)); | |
cfaab3a9 | 3258 | |
726a989a RB |
3259 | if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt) |
3260 | && gimple_purge_dead_eh_edges (bb)) | |
a7d6ba24 | 3261 | cfg_changed = true; |
6de9cd9a DN |
3262 | |
3263 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3264 | { | |
3265 | fprintf (dump_file, "to\n "); | |
726a989a | 3266 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
6de9cd9a DN |
3267 | fprintf (dump_file, "\n"); |
3268 | } | |
10a0d495 JJ |
3269 | |
3270 | /* Retry the same statement if it changed into another | |
3271 | builtin, there might be new opportunities now. */ | |
726a989a | 3272 | if (gimple_code (stmt) != GIMPLE_CALL) |
10a0d495 | 3273 | { |
726a989a | 3274 | gsi_next (&i); |
10a0d495 JJ |
3275 | continue; |
3276 | } | |
726a989a | 3277 | callee = gimple_call_fndecl (stmt); |
10a0d495 | 3278 | if (!callee |
726a989a | 3279 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL |
10a0d495 | 3280 | || DECL_FUNCTION_CODE (callee) == fcode) |
726a989a | 3281 | gsi_next (&i); |
6de9cd9a DN |
3282 | } |
3283 | } | |
7b0e48fb | 3284 | |
a7d6ba24 | 3285 | /* Delete unreachable blocks. */ |
7b0e48fb DB |
3286 | if (cfg_changed) |
3287 | todoflags |= TODO_cleanup_cfg; | |
3288 | ||
3289 | return todoflags; | |
6de9cd9a DN |
3290 | } |
3291 | ||
750628d8 | 3292 | |
8ddbbcae | 3293 | struct gimple_opt_pass pass_fold_builtins = |
6de9cd9a | 3294 | { |
8ddbbcae JH |
3295 | { |
3296 | GIMPLE_PASS, | |
6de9cd9a DN |
3297 | "fab", /* name */ |
3298 | NULL, /* gate */ | |
3299 | execute_fold_all_builtins, /* execute */ | |
3300 | NULL, /* sub */ | |
3301 | NULL, /* next */ | |
3302 | 0, /* static_pass_number */ | |
3303 | 0, /* tv_id */ | |
7faade0f | 3304 | PROP_cfg | PROP_ssa, /* properties_required */ |
6de9cd9a DN |
3305 | 0, /* properties_provided */ |
3306 | 0, /* properties_destroyed */ | |
3307 | 0, /* todo_flags_start */ | |
b28b1600 JJ |
3308 | TODO_dump_func |
3309 | | TODO_verify_ssa | |
8ddbbcae JH |
3310 | | TODO_update_ssa /* todo_flags_finish */ |
3311 | } | |
6de9cd9a | 3312 | }; |