]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* Conditional constant propagation pass for the GNU compiler. |
fa10beec | 2 | Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 |
1e052c19 | 3 | Free Software Foundation, Inc. |
6de9cd9a DN |
4 | Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org> |
5 | Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com> | |
6 | ||
7 | This file is part of GCC. | |
8 | ||
9 | GCC is free software; you can redistribute it and/or modify it | |
10 | under the terms of the GNU General Public License as published by the | |
9dcd6f09 | 11 | Free Software Foundation; either version 3, or (at your option) any |
6de9cd9a DN |
12 | later version. |
13 | ||
14 | GCC is distributed in the hope that it will be useful, but WITHOUT | |
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
17 | for more details. | |
18 | ||
19 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a | 22 | |
0bca51f0 DN |
23 | /* Conditional constant propagation (CCP) is based on the SSA |
24 | propagation engine (tree-ssa-propagate.c). Constant assignments of | |
25 | the form VAR = CST are propagated from the assignments into uses of | |
26 | VAR, which in turn may generate new constants. The simulation uses | |
27 | a four level lattice to keep track of constant values associated | |
28 | with SSA names. Given an SSA name V_i, it may take one of the | |
29 | following values: | |
30 | ||
106dec71 ZD |
31 | UNINITIALIZED -> the initial state of the value. This value |
32 | is replaced with a correct initial value | |
33 | the first time the value is used, so the | |
34 | rest of the pass does not need to care about | |
35 | it. Using this value simplifies initialization | |
36 | of the pass, and prevents us from needlessly | |
37 | scanning statements that are never reached. | |
0bca51f0 DN |
38 | |
39 | UNDEFINED -> V_i is a local variable whose definition | |
40 | has not been processed yet. Therefore we | |
41 | don't yet know if its value is a constant | |
42 | or not. | |
43 | ||
44 | CONSTANT -> V_i has been found to hold a constant | |
45 | value C. | |
46 | ||
47 | VARYING -> V_i cannot take a constant value, or if it | |
48 | does, it is not possible to determine it | |
49 | at compile time. | |
50 | ||
51 | The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node: | |
52 | ||
53 | 1- In ccp_visit_stmt, we are interested in assignments whose RHS | |
54 | evaluates into a constant and conditional jumps whose predicate | |
55 | evaluates into a boolean true or false. When an assignment of | |
56 | the form V_i = CONST is found, V_i's lattice value is set to | |
57 | CONSTANT and CONST is associated with it. This causes the | |
58 | propagation engine to add all the SSA edges coming out the | |
59 | assignment into the worklists, so that statements that use V_i | |
60 | can be visited. | |
61 | ||
62 | If the statement is a conditional with a constant predicate, we | |
63 | mark the outgoing edges as executable or not executable | |
64 | depending on the predicate's value. This is then used when | |
65 | visiting PHI nodes to know when a PHI argument can be ignored. | |
66 | ||
67 | ||
68 | 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the | |
69 | same constant C, then the LHS of the PHI is set to C. This | |
70 | evaluation is known as the "meet operation". Since one of the | |
71 | goals of this evaluation is to optimistically return constant | |
72 | values as often as possible, it uses two main short cuts: | |
73 | ||
74 | - If an argument is flowing in through a non-executable edge, it | |
75 | is ignored. This is useful in cases like this: | |
76 | ||
77 | if (PRED) | |
78 | a_9 = 3; | |
79 | else | |
80 | a_10 = 100; | |
81 | a_11 = PHI (a_9, a_10) | |
82 | ||
83 | If PRED is known to always evaluate to false, then we can | |
84 | assume that a_11 will always take its value from a_10, meaning | |
85 | that instead of consider it VARYING (a_9 and a_10 have | |
86 | different values), we can consider it CONSTANT 100. | |
87 | ||
88 | - If an argument has an UNDEFINED value, then it does not affect | |
89 | the outcome of the meet operation. If a variable V_i has an | |
90 | UNDEFINED value, it means that either its defining statement | |
91 | hasn't been visited yet or V_i has no defining statement, in | |
92 | which case the original symbol 'V' is being used | |
93 | uninitialized. Since 'V' is a local variable, the compiler | |
94 | may assume any initial value for it. | |
95 | ||
96 | ||
97 | After propagation, every variable V_i that ends up with a lattice | |
98 | value of CONSTANT will have the associated constant value in the | |
99 | array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for | |
100 | final substitution and folding. | |
101 | ||
102 | ||
103 | Constant propagation in stores and loads (STORE-CCP) | |
104 | ---------------------------------------------------- | |
105 | ||
106 | While CCP has all the logic to propagate constants in GIMPLE | |
107 | registers, it is missing the ability to associate constants with | |
108 | stores and loads (i.e., pointer dereferences, structures and | |
109 | global/aliased variables). We don't keep loads and stores in | |
110 | SSA, but we do build a factored use-def web for them (in the | |
111 | virtual operands). | |
112 | ||
113 | For instance, consider the following code fragment: | |
114 | ||
115 | struct A a; | |
116 | const int B = 42; | |
117 | ||
118 | void foo (int i) | |
119 | { | |
120 | if (i > 10) | |
121 | a.a = 42; | |
122 | else | |
123 | { | |
124 | a.b = 21; | |
125 | a.a = a.b + 21; | |
126 | } | |
127 | ||
128 | if (a.a != B) | |
129 | never_executed (); | |
130 | } | |
131 | ||
132 | We should be able to deduce that the predicate 'a.a != B' is always | |
133 | false. To achieve this, we associate constant values to the SSA | |
38635499 DN |
134 | names in the VDEF operands for each store. Additionally, |
135 | since we also glob partial loads/stores with the base symbol, we | |
136 | also keep track of the memory reference where the constant value | |
137 | was stored (in the MEM_REF field of PROP_VALUE_T). For instance, | |
0bca51f0 | 138 | |
38635499 | 139 | # a_5 = VDEF <a_4> |
0bca51f0 DN |
140 | a.a = 2; |
141 | ||
142 | # VUSE <a_5> | |
143 | x_3 = a.b; | |
144 | ||
145 | In the example above, CCP will associate value '2' with 'a_5', but | |
146 | it would be wrong to replace the load from 'a.b' with '2', because | |
147 | '2' had been stored into a.a. | |
148 | ||
106dec71 ZD |
149 | Note that the initial value of virtual operands is VARYING, not |
150 | UNDEFINED. Consider, for instance global variables: | |
0bca51f0 DN |
151 | |
152 | int A; | |
153 | ||
154 | foo (int i) | |
155 | { | |
156 | if (i_3 > 10) | |
157 | A_4 = 3; | |
158 | # A_5 = PHI (A_4, A_2); | |
159 | ||
160 | # VUSE <A_5> | |
161 | A.0_6 = A; | |
162 | ||
163 | return A.0_6; | |
164 | } | |
165 | ||
166 | The value of A_2 cannot be assumed to be UNDEFINED, as it may have | |
167 | been defined outside of foo. If we were to assume it UNDEFINED, we | |
106dec71 | 168 | would erroneously optimize the above into 'return 3;'. |
0bca51f0 DN |
169 | |
170 | Though STORE-CCP is not too expensive, it does have to do more work | |
171 | than regular CCP, so it is only enabled at -O2. Both regular CCP | |
172 | and STORE-CCP use the exact same algorithm. The only distinction | |
173 | is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is | |
174 | set to true. This affects the evaluation of statements and PHI | |
175 | nodes. | |
6de9cd9a DN |
176 | |
177 | References: | |
178 | ||
179 | Constant propagation with conditional branches, | |
180 | Wegman and Zadeck, ACM TOPLAS 13(2):181-210. | |
181 | ||
182 | Building an Optimizing Compiler, | |
183 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
184 | ||
185 | Advanced Compiler Design and Implementation, | |
186 | Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */ | |
187 | ||
188 | #include "config.h" | |
189 | #include "system.h" | |
190 | #include "coretypes.h" | |
191 | #include "tm.h" | |
6de9cd9a | 192 | #include "tree.h" |
750628d8 | 193 | #include "flags.h" |
6de9cd9a DN |
194 | #include "rtl.h" |
195 | #include "tm_p.h" | |
750628d8 | 196 | #include "ggc.h" |
6de9cd9a | 197 | #include "basic-block.h" |
750628d8 | 198 | #include "output.h" |
750628d8 DN |
199 | #include "expr.h" |
200 | #include "function.h" | |
6de9cd9a | 201 | #include "diagnostic.h" |
750628d8 | 202 | #include "timevar.h" |
6de9cd9a | 203 | #include "tree-dump.h" |
750628d8 | 204 | #include "tree-flow.h" |
6de9cd9a | 205 | #include "tree-pass.h" |
750628d8 | 206 | #include "tree-ssa-propagate.h" |
53a8f709 | 207 | #include "value-prof.h" |
750628d8 | 208 | #include "langhooks.h" |
ae3df618 | 209 | #include "target.h" |
6ac01510 | 210 | #include "toplev.h" |
6de9cd9a DN |
211 | |
212 | ||
213 | /* Possible lattice values. */ | |
214 | typedef enum | |
215 | { | |
106dec71 | 216 | UNINITIALIZED, |
6de9cd9a DN |
217 | UNDEFINED, |
218 | CONSTANT, | |
219 | VARYING | |
0bca51f0 | 220 | } ccp_lattice_t; |
6de9cd9a | 221 | |
0bca51f0 DN |
222 | /* Array of propagated constant values. After propagation, |
223 | CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If | |
224 | the constant is held in an SSA name representing a memory store | |
38635499 DN |
225 | (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual |
226 | memory reference used to store (i.e., the LHS of the assignment | |
227 | doing the store). */ | |
404f4351 | 228 | static prop_value_t *const_val; |
6de9cd9a | 229 | |
0bca51f0 DN |
230 | /* True if we are also propagating constants in stores and loads. */ |
231 | static bool do_store_ccp; | |
6de9cd9a | 232 | |
0bca51f0 | 233 | /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */ |
95eec0d6 DB |
234 | |
235 | static void | |
0bca51f0 | 236 | dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val) |
95eec0d6 | 237 | { |
750628d8 | 238 | switch (val.lattice_val) |
95eec0d6 | 239 | { |
0bca51f0 DN |
240 | case UNINITIALIZED: |
241 | fprintf (outf, "%sUNINITIALIZED", prefix); | |
242 | break; | |
750628d8 DN |
243 | case UNDEFINED: |
244 | fprintf (outf, "%sUNDEFINED", prefix); | |
245 | break; | |
246 | case VARYING: | |
247 | fprintf (outf, "%sVARYING", prefix); | |
248 | break; | |
750628d8 DN |
249 | case CONSTANT: |
250 | fprintf (outf, "%sCONSTANT ", prefix); | |
0bca51f0 | 251 | print_generic_expr (outf, val.value, dump_flags); |
750628d8 DN |
252 | break; |
253 | default: | |
1e128c5f | 254 | gcc_unreachable (); |
750628d8 | 255 | } |
95eec0d6 | 256 | } |
6de9cd9a | 257 | |
6de9cd9a | 258 | |
0bca51f0 DN |
259 | /* Print lattice value VAL to stderr. */ |
260 | ||
261 | void debug_lattice_value (prop_value_t val); | |
262 | ||
263 | void | |
264 | debug_lattice_value (prop_value_t val) | |
265 | { | |
266 | dump_lattice_value (stderr, "", val); | |
267 | fprintf (stderr, "\n"); | |
268 | } | |
6de9cd9a | 269 | |
6de9cd9a | 270 | |
726a989a | 271 | |
fc9962ee ZD |
272 | /* If SYM is a constant variable with known value, return the value. |
273 | NULL_TREE is returned otherwise. */ | |
274 | ||
ed97ddc6 | 275 | tree |
fc9962ee ZD |
276 | get_symbol_constant_value (tree sym) |
277 | { | |
278 | if (TREE_STATIC (sym) | |
279 | && TREE_READONLY (sym) | |
ed97ddc6 | 280 | && !MTAG_P (sym)) |
fc9962ee ZD |
281 | { |
282 | tree val = DECL_INITIAL (sym); | |
62bc00e2 RG |
283 | if (val) |
284 | { | |
285 | STRIP_USELESS_TYPE_CONVERSION (val); | |
286 | if (is_gimple_min_invariant (val)) | |
287 | return val; | |
288 | } | |
b0940154 | 289 | /* Variables declared 'const' without an initializer |
fa10beec | 290 | have zero as the initializer if they may not be |
ed97ddc6 | 291 | overridden at link or run time. */ |
b0940154 | 292 | if (!val |
ed97ddc6 | 293 | && targetm.binds_local_p (sym) |
b0940154 AP |
294 | && (INTEGRAL_TYPE_P (TREE_TYPE (sym)) |
295 | || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym)))) | |
296 | return fold_convert (TREE_TYPE (sym), integer_zero_node); | |
fc9962ee ZD |
297 | } |
298 | ||
299 | return NULL_TREE; | |
300 | } | |
688e936d | 301 | |
0bca51f0 DN |
302 | /* Compute a default value for variable VAR and store it in the |
303 | CONST_VAL array. The following rules are used to get default | |
304 | values: | |
95eec0d6 | 305 | |
0bca51f0 DN |
306 | 1- Global and static variables that are declared constant are |
307 | considered CONSTANT. | |
308 | ||
309 | 2- Any other value is considered UNDEFINED. This is useful when | |
750628d8 DN |
310 | considering PHI nodes. PHI arguments that are undefined do not |
311 | change the constant value of the PHI node, which allows for more | |
0bca51f0 | 312 | constants to be propagated. |
6de9cd9a | 313 | |
caf55296 | 314 | 3- Variables defined by statements other than assignments and PHI |
0bca51f0 | 315 | nodes are considered VARYING. |
6de9cd9a | 316 | |
caf55296 | 317 | 4- Initial values of variables that are not GIMPLE registers are |
106dec71 | 318 | considered VARYING. */ |
6de9cd9a | 319 | |
0bca51f0 DN |
320 | static prop_value_t |
321 | get_default_value (tree var) | |
322 | { | |
323 | tree sym = SSA_NAME_VAR (var); | |
324 | prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE }; | |
fc9962ee | 325 | tree cst_val; |
106dec71 | 326 | |
0bca51f0 | 327 | if (!do_store_ccp && !is_gimple_reg (var)) |
6de9cd9a | 328 | { |
0bca51f0 DN |
329 | /* Short circuit for regular CCP. We are not interested in any |
330 | non-register when DO_STORE_CCP is false. */ | |
750628d8 | 331 | val.lattice_val = VARYING; |
6de9cd9a | 332 | } |
fc9962ee | 333 | else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE) |
750628d8 | 334 | { |
0bca51f0 DN |
335 | /* Globals and static variables declared 'const' take their |
336 | initial value. */ | |
337 | val.lattice_val = CONSTANT; | |
fc9962ee | 338 | val.value = cst_val; |
0bca51f0 | 339 | val.mem_ref = sym; |
750628d8 DN |
340 | } |
341 | else | |
342 | { | |
726a989a | 343 | gimple stmt = SSA_NAME_DEF_STMT (var); |
6de9cd9a | 344 | |
726a989a | 345 | if (gimple_nop_p (stmt)) |
0bca51f0 DN |
346 | { |
347 | /* Variables defined by an empty statement are those used | |
348 | before being initialized. If VAR is a local variable, we | |
106dec71 ZD |
349 | can assume initially that it is UNDEFINED, otherwise we must |
350 | consider it VARYING. */ | |
0bca51f0 DN |
351 | if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL) |
352 | val.lattice_val = UNDEFINED; | |
0bca51f0 | 353 | else |
750628d8 DN |
354 | val.lattice_val = VARYING; |
355 | } | |
726a989a RB |
356 | else if (is_gimple_assign (stmt) |
357 | /* Value-returning GIMPLE_CALL statements assign to | |
358 | a variable, and are treated similarly to GIMPLE_ASSIGN. */ | |
359 | || (is_gimple_call (stmt) | |
360 | && gimple_call_lhs (stmt) != NULL_TREE) | |
361 | || gimple_code (stmt) == GIMPLE_PHI) | |
362 | { | |
0bca51f0 | 363 | /* Any other variable defined by an assignment or a PHI node |
106dec71 ZD |
364 | is considered UNDEFINED. */ |
365 | val.lattice_val = UNDEFINED; | |
0bca51f0 DN |
366 | } |
367 | else | |
368 | { | |
369 | /* Otherwise, VAR will never take on a constant value. */ | |
370 | val.lattice_val = VARYING; | |
371 | } | |
750628d8 | 372 | } |
6de9cd9a | 373 | |
750628d8 DN |
374 | return val; |
375 | } | |
6de9cd9a | 376 | |
6de9cd9a | 377 | |
106dec71 | 378 | /* Get the constant value associated with variable VAR. */ |
6de9cd9a | 379 | |
106dec71 ZD |
380 | static inline prop_value_t * |
381 | get_value (tree var) | |
0bca51f0 | 382 | { |
ed97ddc6 | 383 | prop_value_t *val; |
106dec71 | 384 | |
ed97ddc6 RG |
385 | if (const_val == NULL) |
386 | return NULL; | |
387 | ||
388 | val = &const_val[SSA_NAME_VERSION (var)]; | |
106dec71 | 389 | if (val->lattice_val == UNINITIALIZED) |
6de9cd9a DN |
390 | *val = get_default_value (var); |
391 | ||
392 | return val; | |
393 | } | |
394 | ||
106dec71 ZD |
395 | /* Sets the value associated with VAR to VARYING. */ |
396 | ||
397 | static inline void | |
398 | set_value_varying (tree var) | |
399 | { | |
400 | prop_value_t *val = &const_val[SSA_NAME_VERSION (var)]; | |
401 | ||
402 | val->lattice_val = VARYING; | |
403 | val->value = NULL_TREE; | |
404 | val->mem_ref = NULL_TREE; | |
405 | } | |
6de9cd9a | 406 | |
fbb5445b L |
407 | /* For float types, modify the value of VAL to make ccp work correctly |
408 | for non-standard values (-0, NaN): | |
409 | ||
410 | If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0. | |
411 | If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED. | |
412 | This is to fix the following problem (see PR 29921): Suppose we have | |
413 | ||
414 | x = 0.0 * y | |
415 | ||
416 | and we set value of y to NaN. This causes value of x to be set to NaN. | |
417 | When we later determine that y is in fact VARYING, fold uses the fact | |
418 | that HONOR_NANS is false, and we try to change the value of x to 0, | |
419 | causing an ICE. With HONOR_NANS being false, the real appearance of | |
420 | NaN would cause undefined behavior, though, so claiming that y (and x) | |
421 | are UNDEFINED initially is correct. */ | |
422 | ||
423 | static void | |
424 | canonicalize_float_value (prop_value_t *val) | |
425 | { | |
426 | enum machine_mode mode; | |
427 | tree type; | |
428 | REAL_VALUE_TYPE d; | |
429 | ||
430 | if (val->lattice_val != CONSTANT | |
431 | || TREE_CODE (val->value) != REAL_CST) | |
432 | return; | |
433 | ||
434 | d = TREE_REAL_CST (val->value); | |
435 | type = TREE_TYPE (val->value); | |
436 | mode = TYPE_MODE (type); | |
437 | ||
438 | if (!HONOR_SIGNED_ZEROS (mode) | |
439 | && REAL_VALUE_MINUS_ZERO (d)) | |
440 | { | |
441 | val->value = build_real (type, dconst0); | |
442 | return; | |
443 | } | |
444 | ||
445 | if (!HONOR_NANS (mode) | |
446 | && REAL_VALUE_ISNAN (d)) | |
447 | { | |
448 | val->lattice_val = UNDEFINED; | |
449 | val->value = NULL; | |
450 | val->mem_ref = NULL; | |
451 | return; | |
452 | } | |
453 | } | |
454 | ||
0bca51f0 DN |
455 | /* Set the value for variable VAR to NEW_VAL. Return true if the new |
456 | value is different from VAR's previous value. */ | |
6de9cd9a | 457 | |
750628d8 | 458 | static bool |
0bca51f0 | 459 | set_lattice_value (tree var, prop_value_t new_val) |
6de9cd9a | 460 | { |
106dec71 | 461 | prop_value_t *old_val = get_value (var); |
0bca51f0 | 462 | |
fbb5445b L |
463 | canonicalize_float_value (&new_val); |
464 | ||
0bca51f0 | 465 | /* Lattice transitions must always be monotonically increasing in |
106dec71 ZD |
466 | value. If *OLD_VAL and NEW_VAL are the same, return false to |
467 | inform the caller that this was a non-transition. */ | |
468 | ||
fc9962ee | 469 | gcc_assert (old_val->lattice_val < new_val.lattice_val |
0bca51f0 | 470 | || (old_val->lattice_val == new_val.lattice_val |
fc9962ee ZD |
471 | && ((!old_val->value && !new_val.value) |
472 | || operand_equal_p (old_val->value, new_val.value, 0)) | |
106dec71 | 473 | && old_val->mem_ref == new_val.mem_ref)); |
0bca51f0 DN |
474 | |
475 | if (old_val->lattice_val != new_val.lattice_val) | |
6de9cd9a | 476 | { |
750628d8 DN |
477 | if (dump_file && (dump_flags & TDF_DETAILS)) |
478 | { | |
0bca51f0 | 479 | dump_lattice_value (dump_file, "Lattice value changed to ", new_val); |
106dec71 | 480 | fprintf (dump_file, ". Adding SSA edges to worklist.\n"); |
750628d8 DN |
481 | } |
482 | ||
0bca51f0 DN |
483 | *old_val = new_val; |
484 | ||
106dec71 ZD |
485 | gcc_assert (new_val.lattice_val != UNDEFINED); |
486 | return true; | |
6de9cd9a | 487 | } |
750628d8 DN |
488 | |
489 | return false; | |
6de9cd9a DN |
490 | } |
491 | ||
492 | ||
0bca51f0 | 493 | /* Return the likely CCP lattice value for STMT. |
6de9cd9a | 494 | |
750628d8 | 495 | If STMT has no operands, then return CONSTANT. |
6de9cd9a | 496 | |
7f879c96 RG |
497 | Else if undefinedness of operands of STMT cause its value to be |
498 | undefined, then return UNDEFINED. | |
6de9cd9a | 499 | |
750628d8 | 500 | Else if any operands of STMT are constants, then return CONSTANT. |
6de9cd9a | 501 | |
750628d8 | 502 | Else return VARYING. */ |
6de9cd9a | 503 | |
0bca51f0 | 504 | static ccp_lattice_t |
726a989a | 505 | likely_value (gimple stmt) |
750628d8 | 506 | { |
7f879c96 | 507 | bool has_constant_operand, has_undefined_operand, all_undefined_operands; |
750628d8 DN |
508 | tree use; |
509 | ssa_op_iter iter; | |
6de9cd9a | 510 | |
726a989a RB |
511 | enum tree_code code = gimple_code (stmt); |
512 | ||
513 | /* This function appears to be called only for assignments, calls, | |
514 | conditionals, and switches, due to the logic in visit_stmt. */ | |
515 | gcc_assert (code == GIMPLE_ASSIGN | |
516 | || code == GIMPLE_CALL | |
517 | || code == GIMPLE_COND | |
518 | || code == GIMPLE_SWITCH); | |
0bca51f0 DN |
519 | |
520 | /* If the statement has volatile operands, it won't fold to a | |
521 | constant value. */ | |
726a989a | 522 | if (gimple_has_volatile_ops (stmt)) |
0bca51f0 DN |
523 | return VARYING; |
524 | ||
525 | /* If we are not doing store-ccp, statements with loads | |
526 | and/or stores will never fold into a constant. */ | |
527 | if (!do_store_ccp | |
ff88c5aa | 528 | && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)) |
750628d8 | 529 | return VARYING; |
6de9cd9a | 530 | |
726a989a | 531 | /* A GIMPLE_CALL is assumed to be varying. NOTE: This may be overly |
0bca51f0 | 532 | conservative, in the presence of const and pure calls. */ |
726a989a | 533 | if (code == GIMPLE_CALL) |
750628d8 | 534 | return VARYING; |
6de9cd9a | 535 | |
726a989a RB |
536 | /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy |
537 | is_gimple_min_invariant, so we do not consider calls or | |
538 | other forms of assignment. */ | |
539 | if (code == GIMPLE_ASSIGN | |
540 | && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
541 | == GIMPLE_SINGLE_RHS) | |
542 | && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))) | |
543 | return CONSTANT; | |
0bca51f0 | 544 | |
726a989a RB |
545 | if (code == GIMPLE_COND |
546 | && is_gimple_min_invariant (gimple_cond_lhs (stmt)) | |
547 | && is_gimple_min_invariant (gimple_cond_rhs (stmt))) | |
a318e3ac SB |
548 | return CONSTANT; |
549 | ||
726a989a RB |
550 | if (code == GIMPLE_SWITCH |
551 | && is_gimple_min_invariant (gimple_switch_index (stmt))) | |
552 | return CONSTANT; | |
553 | ||
554 | /* Arrive here for more complex cases. */ | |
555 | ||
106dec71 | 556 | has_constant_operand = false; |
7f879c96 RG |
557 | has_undefined_operand = false; |
558 | all_undefined_operands = true; | |
106dec71 | 559 | FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE) |
750628d8 | 560 | { |
106dec71 | 561 | prop_value_t *val = get_value (use); |
750628d8 | 562 | |
106dec71 | 563 | if (val->lattice_val == UNDEFINED) |
7f879c96 RG |
564 | has_undefined_operand = true; |
565 | else | |
566 | all_undefined_operands = false; | |
0bca51f0 | 567 | |
750628d8 | 568 | if (val->lattice_val == CONSTANT) |
106dec71 | 569 | has_constant_operand = true; |
6de9cd9a | 570 | } |
750628d8 | 571 | |
7f879c96 RG |
572 | /* If the operation combines operands like COMPLEX_EXPR make sure to |
573 | not mark the result UNDEFINED if only one part of the result is | |
574 | undefined. */ | |
726a989a | 575 | if (has_undefined_operand && all_undefined_operands) |
7f879c96 | 576 | return UNDEFINED; |
726a989a | 577 | else if (code == GIMPLE_ASSIGN && has_undefined_operand) |
7f879c96 | 578 | { |
726a989a | 579 | switch (gimple_assign_rhs_code (stmt)) |
7f879c96 RG |
580 | { |
581 | /* Unary operators are handled with all_undefined_operands. */ | |
582 | case PLUS_EXPR: | |
583 | case MINUS_EXPR: | |
7f879c96 | 584 | case POINTER_PLUS_EXPR: |
7f879c96 RG |
585 | /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected. |
586 | Not bitwise operators, one VARYING operand may specify the | |
587 | result completely. Not logical operators for the same reason. | |
0cedb9e9 RG |
588 | Not COMPLEX_EXPR as one VARYING operand makes the result partly |
589 | not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because | |
590 | the undefined operand may be promoted. */ | |
7f879c96 RG |
591 | return UNDEFINED; |
592 | ||
593 | default: | |
594 | ; | |
595 | } | |
596 | } | |
597 | /* If there was an UNDEFINED operand but the result may be not UNDEFINED | |
598 | fall back to VARYING even if there were CONSTANT operands. */ | |
599 | if (has_undefined_operand) | |
600 | return VARYING; | |
601 | ||
106dec71 ZD |
602 | if (has_constant_operand |
603 | /* We do not consider virtual operands here -- load from read-only | |
604 | memory may have only VARYING virtual operands, but still be | |
605 | constant. */ | |
606 | || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE)) | |
0bca51f0 DN |
607 | return CONSTANT; |
608 | ||
106dec71 | 609 | return VARYING; |
6de9cd9a DN |
610 | } |
611 | ||
106dec71 ZD |
612 | /* Returns true if STMT cannot be constant. */ |
613 | ||
614 | static bool | |
726a989a | 615 | surely_varying_stmt_p (gimple stmt) |
106dec71 ZD |
616 | { |
617 | /* If the statement has operands that we cannot handle, it cannot be | |
618 | constant. */ | |
726a989a | 619 | if (gimple_has_volatile_ops (stmt)) |
106dec71 ZD |
620 | return true; |
621 | ||
622 | if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)) | |
623 | { | |
624 | if (!do_store_ccp) | |
625 | return true; | |
626 | ||
627 | /* We can only handle simple loads and stores. */ | |
628 | if (!stmt_makes_single_load (stmt) | |
629 | && !stmt_makes_single_store (stmt)) | |
630 | return true; | |
631 | } | |
632 | ||
633 | /* If it contains a call, it is varying. */ | |
726a989a | 634 | if (is_gimple_call (stmt)) |
106dec71 ZD |
635 | return true; |
636 | ||
637 | /* Anything other than assignments and conditional jumps are not | |
638 | interesting for CCP. */ | |
726a989a RB |
639 | if (gimple_code (stmt) != GIMPLE_ASSIGN |
640 | && (gimple_code (stmt) != GIMPLE_COND) | |
641 | && (gimple_code (stmt) != GIMPLE_SWITCH)) | |
106dec71 ZD |
642 | return true; |
643 | ||
644 | return false; | |
645 | } | |
6de9cd9a | 646 | |
750628d8 | 647 | /* Initialize local data structures for CCP. */ |
6de9cd9a DN |
648 | |
649 | static void | |
750628d8 | 650 | ccp_initialize (void) |
6de9cd9a | 651 | { |
750628d8 | 652 | basic_block bb; |
6de9cd9a | 653 | |
b9eae1a9 | 654 | const_val = XCNEWVEC (prop_value_t, num_ssa_names); |
6de9cd9a | 655 | |
750628d8 DN |
656 | /* Initialize simulation flags for PHI nodes and statements. */ |
657 | FOR_EACH_BB (bb) | |
6de9cd9a | 658 | { |
726a989a | 659 | gimple_stmt_iterator i; |
6de9cd9a | 660 | |
726a989a | 661 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) |
750628d8 | 662 | { |
726a989a | 663 | gimple stmt = gsi_stmt (i); |
106dec71 | 664 | bool is_varying = surely_varying_stmt_p (stmt); |
6de9cd9a | 665 | |
106dec71 | 666 | if (is_varying) |
750628d8 | 667 | { |
0bca51f0 DN |
668 | tree def; |
669 | ssa_op_iter iter; | |
670 | ||
671 | /* If the statement will not produce a constant, mark | |
672 | all its outputs VARYING. */ | |
673 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) | |
106dec71 ZD |
674 | { |
675 | if (is_varying) | |
676 | set_value_varying (def); | |
677 | } | |
750628d8 | 678 | } |
726a989a | 679 | prop_set_simulate_again (stmt, !is_varying); |
750628d8 | 680 | } |
6de9cd9a DN |
681 | } |
682 | ||
726a989a RB |
683 | /* Now process PHI nodes. We never clear the simulate_again flag on |
684 | phi nodes, since we do not know which edges are executable yet, | |
685 | except for phi nodes for virtual operands when we do not do store ccp. */ | |
750628d8 | 686 | FOR_EACH_BB (bb) |
6de9cd9a | 687 | { |
726a989a | 688 | gimple_stmt_iterator i; |
750628d8 | 689 | |
726a989a RB |
690 | for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i)) |
691 | { | |
692 | gimple phi = gsi_stmt (i); | |
693 | ||
694 | if (!do_store_ccp && !is_gimple_reg (gimple_phi_result (phi))) | |
695 | prop_set_simulate_again (phi, false); | |
106dec71 | 696 | else |
726a989a | 697 | prop_set_simulate_again (phi, true); |
750628d8 | 698 | } |
6de9cd9a | 699 | } |
750628d8 | 700 | } |
6de9cd9a | 701 | |
6de9cd9a | 702 | |
0bca51f0 | 703 | /* Do final substitution of propagated values, cleanup the flowgraph and |
3253eafb | 704 | free allocated storage. |
6de9cd9a | 705 | |
3253eafb JH |
706 | Return TRUE when something was optimized. */ |
707 | ||
708 | static bool | |
0bca51f0 | 709 | ccp_finalize (void) |
6de9cd9a | 710 | { |
0bca51f0 | 711 | /* Perform substitutions based on the known constant values. */ |
3253eafb | 712 | bool something_changed = substitute_and_fold (const_val, false); |
6de9cd9a | 713 | |
0bca51f0 | 714 | free (const_val); |
ed97ddc6 | 715 | const_val = NULL; |
3253eafb | 716 | return something_changed;; |
6de9cd9a DN |
717 | } |
718 | ||
719 | ||
0bca51f0 DN |
720 | /* Compute the meet operator between *VAL1 and *VAL2. Store the result |
721 | in VAL1. | |
722 | ||
723 | any M UNDEFINED = any | |
0bca51f0 DN |
724 | any M VARYING = VARYING |
725 | Ci M Cj = Ci if (i == j) | |
726 | Ci M Cj = VARYING if (i != j) | |
106dec71 | 727 | */ |
6de9cd9a DN |
728 | |
729 | static void | |
0bca51f0 | 730 | ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2) |
6de9cd9a | 731 | { |
0bca51f0 | 732 | if (val1->lattice_val == UNDEFINED) |
6de9cd9a | 733 | { |
0bca51f0 DN |
734 | /* UNDEFINED M any = any */ |
735 | *val1 = *val2; | |
750628d8 | 736 | } |
0bca51f0 | 737 | else if (val2->lattice_val == UNDEFINED) |
195da47b | 738 | { |
0bca51f0 DN |
739 | /* any M UNDEFINED = any |
740 | Nothing to do. VAL1 already contains the value we want. */ | |
741 | ; | |
195da47b | 742 | } |
0bca51f0 DN |
743 | else if (val1->lattice_val == VARYING |
744 | || val2->lattice_val == VARYING) | |
750628d8 | 745 | { |
0bca51f0 DN |
746 | /* any M VARYING = VARYING. */ |
747 | val1->lattice_val = VARYING; | |
748 | val1->value = NULL_TREE; | |
749 | val1->mem_ref = NULL_TREE; | |
750628d8 | 750 | } |
0bca51f0 DN |
751 | else if (val1->lattice_val == CONSTANT |
752 | && val2->lattice_val == CONSTANT | |
753 | && simple_cst_equal (val1->value, val2->value) == 1 | |
754 | && (!do_store_ccp | |
a318e3ac SB |
755 | || (val1->mem_ref && val2->mem_ref |
756 | && operand_equal_p (val1->mem_ref, val2->mem_ref, 0)))) | |
750628d8 | 757 | { |
0bca51f0 DN |
758 | /* Ci M Cj = Ci if (i == j) |
759 | Ci M Cj = VARYING if (i != j) | |
760 | ||
761 | If these two values come from memory stores, make sure that | |
762 | they come from the same memory reference. */ | |
763 | val1->lattice_val = CONSTANT; | |
764 | val1->value = val1->value; | |
765 | val1->mem_ref = val1->mem_ref; | |
750628d8 DN |
766 | } |
767 | else | |
768 | { | |
0bca51f0 DN |
769 | /* Any other combination is VARYING. */ |
770 | val1->lattice_val = VARYING; | |
771 | val1->value = NULL_TREE; | |
772 | val1->mem_ref = NULL_TREE; | |
750628d8 | 773 | } |
6de9cd9a DN |
774 | } |
775 | ||
776 | ||
750628d8 DN |
777 | /* Loop through the PHI_NODE's parameters for BLOCK and compare their |
778 | lattice values to determine PHI_NODE's lattice value. The value of a | |
0bca51f0 | 779 | PHI node is determined calling ccp_lattice_meet with all the arguments |
750628d8 | 780 | of the PHI node that are incoming via executable edges. */ |
6de9cd9a | 781 | |
750628d8 | 782 | static enum ssa_prop_result |
726a989a | 783 | ccp_visit_phi_node (gimple phi) |
6de9cd9a | 784 | { |
726a989a | 785 | unsigned i; |
0bca51f0 | 786 | prop_value_t *old_val, new_val; |
6de9cd9a | 787 | |
750628d8 | 788 | if (dump_file && (dump_flags & TDF_DETAILS)) |
6de9cd9a | 789 | { |
750628d8 | 790 | fprintf (dump_file, "\nVisiting PHI node: "); |
726a989a | 791 | print_gimple_stmt (dump_file, phi, 0, dump_flags); |
6de9cd9a | 792 | } |
6de9cd9a | 793 | |
726a989a | 794 | old_val = get_value (gimple_phi_result (phi)); |
750628d8 DN |
795 | switch (old_val->lattice_val) |
796 | { | |
797 | case VARYING: | |
0bca51f0 | 798 | return SSA_PROP_VARYING; |
6de9cd9a | 799 | |
750628d8 DN |
800 | case CONSTANT: |
801 | new_val = *old_val; | |
802 | break; | |
6de9cd9a | 803 | |
750628d8 | 804 | case UNDEFINED: |
750628d8 | 805 | new_val.lattice_val = UNDEFINED; |
0bca51f0 DN |
806 | new_val.value = NULL_TREE; |
807 | new_val.mem_ref = NULL_TREE; | |
750628d8 | 808 | break; |
6de9cd9a | 809 | |
750628d8 | 810 | default: |
1e128c5f | 811 | gcc_unreachable (); |
750628d8 | 812 | } |
6de9cd9a | 813 | |
726a989a | 814 | for (i = 0; i < gimple_phi_num_args (phi); i++) |
750628d8 | 815 | { |
0bca51f0 DN |
816 | /* Compute the meet operator over all the PHI arguments flowing |
817 | through executable edges. */ | |
726a989a | 818 | edge e = gimple_phi_arg_edge (phi, i); |
6de9cd9a | 819 | |
750628d8 DN |
820 | if (dump_file && (dump_flags & TDF_DETAILS)) |
821 | { | |
822 | fprintf (dump_file, | |
823 | "\n Argument #%d (%d -> %d %sexecutable)\n", | |
824 | i, e->src->index, e->dest->index, | |
825 | (e->flags & EDGE_EXECUTABLE) ? "" : "not "); | |
826 | } | |
827 | ||
828 | /* If the incoming edge is executable, Compute the meet operator for | |
829 | the existing value of the PHI node and the current PHI argument. */ | |
830 | if (e->flags & EDGE_EXECUTABLE) | |
831 | { | |
726a989a | 832 | tree arg = gimple_phi_arg (phi, i)->def; |
0bca51f0 | 833 | prop_value_t arg_val; |
6de9cd9a | 834 | |
0bca51f0 | 835 | if (is_gimple_min_invariant (arg)) |
750628d8 | 836 | { |
0bca51f0 DN |
837 | arg_val.lattice_val = CONSTANT; |
838 | arg_val.value = arg; | |
839 | arg_val.mem_ref = NULL_TREE; | |
750628d8 DN |
840 | } |
841 | else | |
106dec71 | 842 | arg_val = *(get_value (arg)); |
6de9cd9a | 843 | |
0bca51f0 | 844 | ccp_lattice_meet (&new_val, &arg_val); |
6de9cd9a | 845 | |
750628d8 DN |
846 | if (dump_file && (dump_flags & TDF_DETAILS)) |
847 | { | |
848 | fprintf (dump_file, "\t"); | |
0bca51f0 DN |
849 | print_generic_expr (dump_file, arg, dump_flags); |
850 | dump_lattice_value (dump_file, "\tValue: ", arg_val); | |
750628d8 DN |
851 | fprintf (dump_file, "\n"); |
852 | } | |
6de9cd9a | 853 | |
750628d8 DN |
854 | if (new_val.lattice_val == VARYING) |
855 | break; | |
856 | } | |
857 | } | |
6de9cd9a DN |
858 | |
859 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
750628d8 DN |
860 | { |
861 | dump_lattice_value (dump_file, "\n PHI node value: ", new_val); | |
862 | fprintf (dump_file, "\n\n"); | |
863 | } | |
864 | ||
106dec71 | 865 | /* Make the transition to the new value. */ |
726a989a | 866 | if (set_lattice_value (gimple_phi_result (phi), new_val)) |
750628d8 DN |
867 | { |
868 | if (new_val.lattice_val == VARYING) | |
869 | return SSA_PROP_VARYING; | |
870 | else | |
871 | return SSA_PROP_INTERESTING; | |
872 | } | |
873 | else | |
874 | return SSA_PROP_NOT_INTERESTING; | |
6de9cd9a DN |
875 | } |
876 | ||
877 | ||
750628d8 DN |
878 | /* CCP specific front-end to the non-destructive constant folding |
879 | routines. | |
6de9cd9a DN |
880 | |
881 | Attempt to simplify the RHS of STMT knowing that one or more | |
882 | operands are constants. | |
883 | ||
884 | If simplification is possible, return the simplified RHS, | |
726a989a | 885 | otherwise return the original RHS or NULL_TREE. */ |
6de9cd9a DN |
886 | |
887 | static tree | |
726a989a | 888 | ccp_fold (gimple stmt) |
6de9cd9a | 889 | { |
726a989a | 890 | switch (gimple_code (stmt)) |
0bca51f0 | 891 | { |
726a989a RB |
892 | case GIMPLE_ASSIGN: |
893 | { | |
894 | enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
895 | ||
896 | switch (get_gimple_rhs_class (subcode)) | |
897 | { | |
898 | case GIMPLE_SINGLE_RHS: | |
899 | { | |
900 | tree rhs = gimple_assign_rhs1 (stmt); | |
901 | enum tree_code_class kind = TREE_CODE_CLASS (subcode); | |
902 | ||
903 | if (TREE_CODE (rhs) == SSA_NAME) | |
904 | { | |
905 | /* If the RHS is an SSA_NAME, return its known constant value, | |
906 | if any. */ | |
907 | return get_value (rhs)->value; | |
908 | } | |
909 | /* Handle propagating invariant addresses into address operations. | |
910 | The folding we do here matches that in tree-ssa-forwprop.c. */ | |
911 | else if (TREE_CODE (rhs) == ADDR_EXPR) | |
912 | { | |
913 | tree *base; | |
914 | base = &TREE_OPERAND (rhs, 0); | |
915 | while (handled_component_p (*base)) | |
916 | base = &TREE_OPERAND (*base, 0); | |
917 | if (TREE_CODE (*base) == INDIRECT_REF | |
918 | && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME) | |
919 | { | |
920 | prop_value_t *val = get_value (TREE_OPERAND (*base, 0)); | |
921 | if (val->lattice_val == CONSTANT | |
922 | && TREE_CODE (val->value) == ADDR_EXPR | |
923 | && useless_type_conversion_p | |
924 | (TREE_TYPE (TREE_OPERAND (*base, 0)), | |
925 | TREE_TYPE (val->value)) | |
926 | && useless_type_conversion_p | |
927 | (TREE_TYPE (*base), | |
928 | TREE_TYPE (TREE_OPERAND (val->value, 0)))) | |
929 | { | |
930 | /* We need to return a new tree, not modify the IL | |
931 | or share parts of it. So play some tricks to | |
932 | avoid manually building it. */ | |
933 | tree ret, save = *base; | |
934 | *base = TREE_OPERAND (val->value, 0); | |
935 | ret = unshare_expr (rhs); | |
936 | recompute_tree_invariant_for_addr_expr (ret); | |
937 | *base = save; | |
938 | return ret; | |
939 | } | |
940 | } | |
941 | } | |
6de9cd9a | 942 | |
726a989a RB |
943 | else if (do_store_ccp && stmt_makes_single_load (stmt)) |
944 | { | |
945 | /* If the RHS is a memory load, see if the VUSEs associated with | |
946 | it are a valid constant for that memory load. */ | |
947 | prop_value_t *val = get_value_loaded_by (stmt, const_val); | |
948 | if (val && val->mem_ref) | |
949 | { | |
950 | if (operand_equal_p (val->mem_ref, rhs, 0)) | |
951 | return val->value; | |
952 | ||
953 | /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a | |
954 | complex type with a known constant value, return it. */ | |
955 | if ((TREE_CODE (rhs) == REALPART_EXPR | |
956 | || TREE_CODE (rhs) == IMAGPART_EXPR) | |
957 | && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0)) | |
958 | return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value); | |
959 | } | |
960 | } | |
961 | ||
962 | if (kind == tcc_reference) | |
963 | return fold_const_aggregate_ref (rhs); | |
964 | else if (kind == tcc_declaration) | |
965 | return get_symbol_constant_value (rhs); | |
966 | return rhs; | |
967 | } | |
968 | ||
969 | case GIMPLE_UNARY_RHS: | |
970 | { | |
971 | /* Handle unary operators that can appear in GIMPLE form. | |
972 | Note that we know the single operand must be a constant, | |
973 | so this should almost always return a simplified RHS. */ | |
974 | tree lhs = gimple_assign_lhs (stmt); | |
975 | tree op0 = gimple_assign_rhs1 (stmt); | |
976 | ||
977 | /* Simplify the operand down to a constant. */ | |
978 | if (TREE_CODE (op0) == SSA_NAME) | |
979 | { | |
980 | prop_value_t *val = get_value (op0); | |
981 | if (val->lattice_val == CONSTANT) | |
982 | op0 = get_value (op0)->value; | |
983 | } | |
984 | ||
985 | /* Conversions are useless for CCP purposes if they are | |
986 | value-preserving. Thus the restrictions that | |
987 | useless_type_conversion_p places for pointer type conversions | |
988 | do not apply here. Substitution later will only substitute to | |
989 | allowed places. */ | |
990 | if ((subcode == NOP_EXPR || subcode == CONVERT_EXPR) | |
991 | && ((POINTER_TYPE_P (TREE_TYPE (lhs)) | |
992 | && POINTER_TYPE_P (TREE_TYPE (op0))) | |
993 | || useless_type_conversion_p (TREE_TYPE (lhs), | |
994 | TREE_TYPE (op0)))) | |
995 | return op0; | |
996 | ||
997 | return fold_unary (subcode, gimple_expr_type (stmt), op0); | |
998 | } | |
999 | ||
1000 | case GIMPLE_BINARY_RHS: | |
1001 | { | |
1002 | /* Handle binary operators that can appear in GIMPLE form. */ | |
1003 | tree op0 = gimple_assign_rhs1 (stmt); | |
1004 | tree op1 = gimple_assign_rhs2 (stmt); | |
1005 | ||
1006 | /* Simplify the operands down to constants when appropriate. */ | |
1007 | if (TREE_CODE (op0) == SSA_NAME) | |
1008 | { | |
1009 | prop_value_t *val = get_value (op0); | |
1010 | if (val->lattice_val == CONSTANT) | |
1011 | op0 = val->value; | |
1012 | } | |
1013 | ||
1014 | if (TREE_CODE (op1) == SSA_NAME) | |
1015 | { | |
1016 | prop_value_t *val = get_value (op1); | |
1017 | if (val->lattice_val == CONSTANT) | |
1018 | op1 = val->value; | |
1019 | } | |
1020 | ||
1021 | return fold_binary (subcode, gimple_expr_type (stmt), op0, op1); | |
1022 | } | |
1023 | ||
1024 | default: | |
1025 | gcc_unreachable (); | |
1026 | } | |
1027 | } | |
1028 | break; | |
6de9cd9a | 1029 | |
726a989a RB |
1030 | case GIMPLE_CALL: |
1031 | /* It may be possible to fold away calls to builtin functions if | |
1032 | their arguments are constants. At present, such folding will not | |
1033 | be attempted, as likely_value classifies all calls as VARYING. */ | |
1034 | gcc_unreachable (); | |
1035 | break; | |
6de9cd9a | 1036 | |
726a989a RB |
1037 | case GIMPLE_COND: |
1038 | { | |
1039 | /* Handle comparison operators that can appear in GIMPLE form. */ | |
1040 | tree op0 = gimple_cond_lhs (stmt); | |
1041 | tree op1 = gimple_cond_rhs (stmt); | |
1042 | enum tree_code code = gimple_cond_code (stmt); | |
1043 | ||
1044 | /* Simplify the operands down to constants when appropriate. */ | |
1045 | if (TREE_CODE (op0) == SSA_NAME) | |
1046 | { | |
1047 | prop_value_t *val = get_value (op0); | |
1048 | if (val->lattice_val == CONSTANT) | |
1049 | op0 = val->value; | |
1050 | } | |
1051 | ||
1052 | if (TREE_CODE (op1) == SSA_NAME) | |
1053 | { | |
1054 | prop_value_t *val = get_value (op1); | |
1055 | if (val->lattice_val == CONSTANT) | |
1056 | op1 = val->value; | |
1057 | } | |
1058 | ||
1059 | return fold_binary (code, boolean_type_node, op0, op1); | |
1060 | } | |
6de9cd9a | 1061 | |
726a989a RB |
1062 | case GIMPLE_SWITCH: |
1063 | { | |
1064 | tree rhs = gimple_switch_index (stmt); | |
87e1e42b | 1065 | |
726a989a RB |
1066 | if (TREE_CODE (rhs) == SSA_NAME) |
1067 | { | |
1068 | /* If the RHS is an SSA_NAME, return its known constant value, | |
1069 | if any. */ | |
1070 | return get_value (rhs)->value; | |
1071 | } | |
87e1e42b | 1072 | |
726a989a RB |
1073 | return rhs; |
1074 | } | |
00d382a8 | 1075 | |
726a989a RB |
1076 | default: |
1077 | gcc_unreachable (); | |
6de9cd9a | 1078 | } |
6de9cd9a DN |
1079 | } |
1080 | ||
1081 | ||
ae3df618 SB |
1082 | /* Return the tree representing the element referenced by T if T is an |
1083 | ARRAY_REF or COMPONENT_REF into constant aggregates. Return | |
1084 | NULL_TREE otherwise. */ | |
1085 | ||
ed97ddc6 | 1086 | tree |
ae3df618 SB |
1087 | fold_const_aggregate_ref (tree t) |
1088 | { | |
1089 | prop_value_t *value; | |
4038c495 GB |
1090 | tree base, ctor, idx, field; |
1091 | unsigned HOST_WIDE_INT cnt; | |
1092 | tree cfield, cval; | |
ae3df618 SB |
1093 | |
1094 | switch (TREE_CODE (t)) | |
1095 | { | |
1096 | case ARRAY_REF: | |
1097 | /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its | |
1098 | DECL_INITIAL. If BASE is a nested reference into another | |
1099 | ARRAY_REF or COMPONENT_REF, make a recursive call to resolve | |
1100 | the inner reference. */ | |
1101 | base = TREE_OPERAND (t, 0); | |
1102 | switch (TREE_CODE (base)) | |
1103 | { | |
1104 | case VAR_DECL: | |
1105 | if (!TREE_READONLY (base) | |
1106 | || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE | |
1107 | || !targetm.binds_local_p (base)) | |
1108 | return NULL_TREE; | |
1109 | ||
1110 | ctor = DECL_INITIAL (base); | |
1111 | break; | |
1112 | ||
1113 | case ARRAY_REF: | |
1114 | case COMPONENT_REF: | |
1115 | ctor = fold_const_aggregate_ref (base); | |
1116 | break; | |
1117 | ||
87e1e42b RG |
1118 | case STRING_CST: |
1119 | case CONSTRUCTOR: | |
1120 | ctor = base; | |
1121 | break; | |
1122 | ||
ae3df618 SB |
1123 | default: |
1124 | return NULL_TREE; | |
1125 | } | |
1126 | ||
1127 | if (ctor == NULL_TREE | |
faaf1436 RG |
1128 | || (TREE_CODE (ctor) != CONSTRUCTOR |
1129 | && TREE_CODE (ctor) != STRING_CST) | |
ae3df618 SB |
1130 | || !TREE_STATIC (ctor)) |
1131 | return NULL_TREE; | |
1132 | ||
1133 | /* Get the index. If we have an SSA_NAME, try to resolve it | |
1134 | with the current lattice value for the SSA_NAME. */ | |
1135 | idx = TREE_OPERAND (t, 1); | |
1136 | switch (TREE_CODE (idx)) | |
1137 | { | |
1138 | case SSA_NAME: | |
106dec71 | 1139 | if ((value = get_value (idx)) |
ae3df618 SB |
1140 | && value->lattice_val == CONSTANT |
1141 | && TREE_CODE (value->value) == INTEGER_CST) | |
1142 | idx = value->value; | |
1143 | else | |
1144 | return NULL_TREE; | |
1145 | break; | |
1146 | ||
1147 | case INTEGER_CST: | |
1148 | break; | |
1149 | ||
1150 | default: | |
1151 | return NULL_TREE; | |
1152 | } | |
1153 | ||
faaf1436 RG |
1154 | /* Fold read from constant string. */ |
1155 | if (TREE_CODE (ctor) == STRING_CST) | |
1156 | { | |
1157 | if ((TYPE_MODE (TREE_TYPE (t)) | |
1158 | == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1159 | && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1160 | == MODE_INT) | |
1161 | && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1 | |
1162 | && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0) | |
0c4d4efb DJ |
1163 | return build_int_cst_type (TREE_TYPE (t), |
1164 | (TREE_STRING_POINTER (ctor) | |
1165 | [TREE_INT_CST_LOW (idx)])); | |
faaf1436 RG |
1166 | return NULL_TREE; |
1167 | } | |
1168 | ||
ae3df618 | 1169 | /* Whoo-hoo! I'll fold ya baby. Yeah! */ |
4038c495 GB |
1170 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) |
1171 | if (tree_int_cst_equal (cfield, idx)) | |
62bc00e2 RG |
1172 | { |
1173 | STRIP_USELESS_TYPE_CONVERSION (cval); | |
1174 | return cval; | |
1175 | } | |
ae3df618 SB |
1176 | break; |
1177 | ||
1178 | case COMPONENT_REF: | |
1179 | /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its | |
1180 | DECL_INITIAL. If BASE is a nested reference into another | |
1181 | ARRAY_REF or COMPONENT_REF, make a recursive call to resolve | |
1182 | the inner reference. */ | |
1183 | base = TREE_OPERAND (t, 0); | |
1184 | switch (TREE_CODE (base)) | |
1185 | { | |
1186 | case VAR_DECL: | |
1187 | if (!TREE_READONLY (base) | |
1188 | || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE | |
1189 | || !targetm.binds_local_p (base)) | |
1190 | return NULL_TREE; | |
1191 | ||
1192 | ctor = DECL_INITIAL (base); | |
1193 | break; | |
1194 | ||
1195 | case ARRAY_REF: | |
1196 | case COMPONENT_REF: | |
1197 | ctor = fold_const_aggregate_ref (base); | |
1198 | break; | |
1199 | ||
1200 | default: | |
1201 | return NULL_TREE; | |
1202 | } | |
1203 | ||
1204 | if (ctor == NULL_TREE | |
1205 | || TREE_CODE (ctor) != CONSTRUCTOR | |
1206 | || !TREE_STATIC (ctor)) | |
1207 | return NULL_TREE; | |
1208 | ||
1209 | field = TREE_OPERAND (t, 1); | |
1210 | ||
4038c495 GB |
1211 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) |
1212 | if (cfield == field | |
ae3df618 | 1213 | /* FIXME: Handle bit-fields. */ |
4038c495 | 1214 | && ! DECL_BIT_FIELD (cfield)) |
62bc00e2 RG |
1215 | { |
1216 | STRIP_USELESS_TYPE_CONVERSION (cval); | |
1217 | return cval; | |
1218 | } | |
ae3df618 SB |
1219 | break; |
1220 | ||
1ebd8d9a SB |
1221 | case REALPART_EXPR: |
1222 | case IMAGPART_EXPR: | |
1223 | { | |
1224 | tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0)); | |
1225 | if (c && TREE_CODE (c) == COMPLEX_CST) | |
1226 | return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c); | |
1227 | break; | |
1228 | } | |
87e1e42b RG |
1229 | |
1230 | case INDIRECT_REF: | |
1231 | { | |
1232 | tree base = TREE_OPERAND (t, 0); | |
1233 | if (TREE_CODE (base) == SSA_NAME | |
1234 | && (value = get_value (base)) | |
1235 | && value->lattice_val == CONSTANT | |
1236 | && TREE_CODE (value->value) == ADDR_EXPR) | |
1237 | return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0)); | |
1238 | break; | |
1239 | } | |
1240 | ||
ae3df618 SB |
1241 | default: |
1242 | break; | |
1243 | } | |
1244 | ||
1245 | return NULL_TREE; | |
1246 | } | |
726a989a RB |
1247 | |
1248 | /* Evaluate statement STMT. | |
1249 | Valid only for assignments, calls, conditionals, and switches. */ | |
6de9cd9a | 1250 | |
0bca51f0 | 1251 | static prop_value_t |
726a989a | 1252 | evaluate_stmt (gimple stmt) |
6de9cd9a | 1253 | { |
0bca51f0 | 1254 | prop_value_t val; |
faaf1436 | 1255 | tree simplified = NULL_TREE; |
0bca51f0 | 1256 | ccp_lattice_t likelyvalue = likely_value (stmt); |
6ac01510 | 1257 | bool is_constant; |
0bca51f0 DN |
1258 | |
1259 | val.mem_ref = NULL_TREE; | |
6de9cd9a | 1260 | |
6ac01510 ILT |
1261 | fold_defer_overflow_warnings (); |
1262 | ||
6de9cd9a DN |
1263 | /* If the statement is likely to have a CONSTANT result, then try |
1264 | to fold the statement to determine the constant value. */ | |
726a989a RB |
1265 | /* FIXME. This is the only place that we call ccp_fold. |
1266 | Since likely_value never returns CONSTANT for calls, we will | |
1267 | not attempt to fold them, including builtins that may profit. */ | |
6de9cd9a DN |
1268 | if (likelyvalue == CONSTANT) |
1269 | simplified = ccp_fold (stmt); | |
1270 | /* If the statement is likely to have a VARYING result, then do not | |
1271 | bother folding the statement. */ | |
87e1e42b | 1272 | else if (likelyvalue == VARYING) |
726a989a RB |
1273 | { |
1274 | enum tree_code code = gimple_code (stmt); | |
1275 | if (code == GIMPLE_ASSIGN) | |
1276 | { | |
1277 | enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
1278 | ||
1279 | /* Other cases cannot satisfy is_gimple_min_invariant | |
1280 | without folding. */ | |
1281 | if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS) | |
1282 | simplified = gimple_assign_rhs1 (stmt); | |
1283 | } | |
1284 | else if (code == GIMPLE_SWITCH) | |
1285 | simplified = gimple_switch_index (stmt); | |
1286 | else | |
1287 | /* These cannot satisfy is_gimple_min_invariant without folding. */ | |
1288 | gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND); | |
1289 | } | |
6de9cd9a | 1290 | |
6ac01510 ILT |
1291 | is_constant = simplified && is_gimple_min_invariant (simplified); |
1292 | ||
1293 | fold_undefer_overflow_warnings (is_constant, stmt, 0); | |
1294 | ||
00d382a8 RG |
1295 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1296 | { | |
1297 | fprintf (dump_file, "which is likely "); | |
1298 | switch (likelyvalue) | |
1299 | { | |
1300 | case CONSTANT: | |
1301 | fprintf (dump_file, "CONSTANT"); | |
1302 | break; | |
1303 | case UNDEFINED: | |
1304 | fprintf (dump_file, "UNDEFINED"); | |
1305 | break; | |
1306 | case VARYING: | |
1307 | fprintf (dump_file, "VARYING"); | |
1308 | break; | |
1309 | default:; | |
1310 | } | |
1311 | fprintf (dump_file, "\n"); | |
1312 | } | |
1313 | ||
6ac01510 | 1314 | if (is_constant) |
6de9cd9a DN |
1315 | { |
1316 | /* The statement produced a constant value. */ | |
1317 | val.lattice_val = CONSTANT; | |
0bca51f0 | 1318 | val.value = simplified; |
6de9cd9a DN |
1319 | } |
1320 | else | |
1321 | { | |
1322 | /* The statement produced a nonconstant value. If the statement | |
0bca51f0 DN |
1323 | had UNDEFINED operands, then the result of the statement |
1324 | should be UNDEFINED. Otherwise, the statement is VARYING. */ | |
106dec71 | 1325 | if (likelyvalue == UNDEFINED) |
a318e3ac SB |
1326 | val.lattice_val = likelyvalue; |
1327 | else | |
1328 | val.lattice_val = VARYING; | |
1329 | ||
0bca51f0 | 1330 | val.value = NULL_TREE; |
6de9cd9a | 1331 | } |
750628d8 DN |
1332 | |
1333 | return val; | |
6de9cd9a DN |
1334 | } |
1335 | ||
750628d8 | 1336 | /* Visit the assignment statement STMT. Set the value of its LHS to the |
0bca51f0 DN |
1337 | value computed by the RHS and store LHS in *OUTPUT_P. If STMT |
1338 | creates virtual definitions, set the value of each new name to that | |
726a989a RB |
1339 | of the RHS (if we can derive a constant out of the RHS). |
1340 | Value-returning call statements also perform an assignment, and | |
1341 | are handled here. */ | |
6de9cd9a | 1342 | |
750628d8 | 1343 | static enum ssa_prop_result |
726a989a | 1344 | visit_assignment (gimple stmt, tree *output_p) |
6de9cd9a | 1345 | { |
0bca51f0 | 1346 | prop_value_t val; |
0bca51f0 | 1347 | enum ssa_prop_result retval; |
6de9cd9a | 1348 | |
726a989a | 1349 | tree lhs = gimple_get_lhs (stmt); |
6de9cd9a | 1350 | |
726a989a RB |
1351 | gcc_assert (gimple_code (stmt) != GIMPLE_CALL |
1352 | || gimple_call_lhs (stmt) != NULL_TREE); | |
1353 | ||
1354 | if (gimple_assign_copy_p (stmt)) | |
750628d8 | 1355 | { |
726a989a | 1356 | tree rhs = gimple_assign_rhs1 (stmt); |
0bca51f0 | 1357 | |
726a989a RB |
1358 | if (TREE_CODE (rhs) == SSA_NAME) |
1359 | { | |
1360 | /* For a simple copy operation, we copy the lattice values. */ | |
1361 | prop_value_t *nval = get_value (rhs); | |
1362 | val = *nval; | |
1363 | } | |
1364 | else if (do_store_ccp && stmt_makes_single_load (stmt)) | |
1365 | { | |
1366 | /* Same as above, but the RHS is not a gimple register and yet | |
1367 | has a known VUSE. If STMT is loading from the same memory | |
1368 | location that created the SSA_NAMEs for the virtual operands, | |
1369 | we can propagate the value on the RHS. */ | |
1370 | prop_value_t *nval = get_value_loaded_by (stmt, const_val); | |
1371 | ||
1372 | if (nval | |
1373 | && nval->mem_ref | |
1374 | && operand_equal_p (nval->mem_ref, rhs, 0)) | |
1375 | val = *nval; | |
1376 | else | |
1377 | val = evaluate_stmt (stmt); | |
1378 | } | |
0bca51f0 | 1379 | else |
726a989a | 1380 | val = evaluate_stmt (stmt); |
750628d8 DN |
1381 | } |
1382 | else | |
726a989a RB |
1383 | /* Evaluate the statement, which could be |
1384 | either a GIMPLE_ASSIGN or a GIMPLE_CALL. */ | |
87e1e42b | 1385 | val = evaluate_stmt (stmt); |
6de9cd9a | 1386 | |
0bca51f0 | 1387 | retval = SSA_PROP_NOT_INTERESTING; |
6de9cd9a | 1388 | |
750628d8 | 1389 | /* Set the lattice value of the statement's output. */ |
0bca51f0 | 1390 | if (TREE_CODE (lhs) == SSA_NAME) |
6de9cd9a | 1391 | { |
0bca51f0 DN |
1392 | /* If STMT is an assignment to an SSA_NAME, we only have one |
1393 | value to set. */ | |
1394 | if (set_lattice_value (lhs, val)) | |
1395 | { | |
1396 | *output_p = lhs; | |
1397 | if (val.lattice_val == VARYING) | |
1398 | retval = SSA_PROP_VARYING; | |
1399 | else | |
1400 | retval = SSA_PROP_INTERESTING; | |
1401 | } | |
6de9cd9a | 1402 | } |
0bca51f0 DN |
1403 | else if (do_store_ccp && stmt_makes_single_store (stmt)) |
1404 | { | |
38635499 DN |
1405 | /* Otherwise, set the names in VDEF operands to the new |
1406 | constant value and mark the LHS as the memory reference | |
1407 | associated with VAL. */ | |
0bca51f0 DN |
1408 | ssa_op_iter i; |
1409 | tree vdef; | |
1410 | bool changed; | |
1411 | ||
0bca51f0 | 1412 | /* Mark VAL as stored in the LHS of this assignment. */ |
106dec71 ZD |
1413 | if (val.lattice_val == CONSTANT) |
1414 | val.mem_ref = lhs; | |
0bca51f0 DN |
1415 | |
1416 | /* Set the value of every VDEF to VAL. */ | |
1417 | changed = false; | |
1418 | FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS) | |
fc9962ee ZD |
1419 | { |
1420 | /* See PR 29801. We may have VDEFs for read-only variables | |
1421 | (see the handling of unmodifiable variables in | |
1422 | add_virtual_operand); do not attempt to change their value. */ | |
1423 | if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE) | |
1424 | continue; | |
1425 | ||
1426 | changed |= set_lattice_value (vdef, val); | |
1427 | } | |
0bca51f0 DN |
1428 | |
1429 | /* Note that for propagation purposes, we are only interested in | |
1430 | visiting statements that load the exact same memory reference | |
1431 | stored here. Those statements will have the exact same list | |
1432 | of virtual uses, so it is enough to set the output of this | |
1433 | statement to be its first virtual definition. */ | |
1434 | *output_p = first_vdef (stmt); | |
1435 | if (changed) | |
1436 | { | |
1437 | if (val.lattice_val == VARYING) | |
1438 | retval = SSA_PROP_VARYING; | |
1439 | else | |
1440 | retval = SSA_PROP_INTERESTING; | |
1441 | } | |
1442 | } | |
1443 | ||
1444 | return retval; | |
6de9cd9a DN |
1445 | } |
1446 | ||
6de9cd9a | 1447 | |
750628d8 DN |
1448 | /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING |
1449 | if it can determine which edge will be taken. Otherwise, return | |
1450 | SSA_PROP_VARYING. */ | |
1451 | ||
1452 | static enum ssa_prop_result | |
726a989a | 1453 | visit_cond_stmt (gimple stmt, edge *taken_edge_p) |
6de9cd9a | 1454 | { |
0bca51f0 | 1455 | prop_value_t val; |
750628d8 DN |
1456 | basic_block block; |
1457 | ||
726a989a | 1458 | block = gimple_bb (stmt); |
750628d8 DN |
1459 | val = evaluate_stmt (stmt); |
1460 | ||
1461 | /* Find which edge out of the conditional block will be taken and add it | |
1462 | to the worklist. If no single edge can be determined statically, | |
1463 | return SSA_PROP_VARYING to feed all the outgoing edges to the | |
1464 | propagation engine. */ | |
0bca51f0 | 1465 | *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0; |
750628d8 DN |
1466 | if (*taken_edge_p) |
1467 | return SSA_PROP_INTERESTING; | |
1468 | else | |
1469 | return SSA_PROP_VARYING; | |
6de9cd9a DN |
1470 | } |
1471 | ||
6de9cd9a | 1472 | |
750628d8 DN |
1473 | /* Evaluate statement STMT. If the statement produces an output value and |
1474 | its evaluation changes the lattice value of its output, return | |
1475 | SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the | |
1476 | output value. | |
1477 | ||
1478 | If STMT is a conditional branch and we can determine its truth | |
1479 | value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying | |
1480 | value, return SSA_PROP_VARYING. */ | |
6de9cd9a | 1481 | |
750628d8 | 1482 | static enum ssa_prop_result |
726a989a | 1483 | ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p) |
750628d8 | 1484 | { |
750628d8 DN |
1485 | tree def; |
1486 | ssa_op_iter iter; | |
6de9cd9a | 1487 | |
750628d8 | 1488 | if (dump_file && (dump_flags & TDF_DETAILS)) |
6de9cd9a | 1489 | { |
0bca51f0 | 1490 | fprintf (dump_file, "\nVisiting statement:\n"); |
726a989a | 1491 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
6de9cd9a | 1492 | } |
6de9cd9a | 1493 | |
726a989a | 1494 | switch (gimple_code (stmt)) |
6de9cd9a | 1495 | { |
726a989a RB |
1496 | case GIMPLE_ASSIGN: |
1497 | /* If the statement is an assignment that produces a single | |
1498 | output value, evaluate its RHS to see if the lattice value of | |
1499 | its output has changed. */ | |
1500 | return visit_assignment (stmt, output_p); | |
1501 | ||
1502 | case GIMPLE_CALL: | |
1503 | /* A value-returning call also performs an assignment. */ | |
1504 | if (gimple_call_lhs (stmt) != NULL_TREE) | |
1505 | return visit_assignment (stmt, output_p); | |
1506 | break; | |
1507 | ||
1508 | case GIMPLE_COND: | |
1509 | case GIMPLE_SWITCH: | |
1510 | /* If STMT is a conditional branch, see if we can determine | |
1511 | which branch will be taken. */ | |
1512 | /* FIXME. It appears that we should be able to optimize | |
1513 | computed GOTOs here as well. */ | |
1514 | return visit_cond_stmt (stmt, taken_edge_p); | |
1515 | ||
1516 | default: | |
1517 | break; | |
6de9cd9a | 1518 | } |
6de9cd9a | 1519 | |
750628d8 DN |
1520 | /* Any other kind of statement is not interesting for constant |
1521 | propagation and, therefore, not worth simulating. */ | |
750628d8 DN |
1522 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1523 | fprintf (dump_file, "No interesting values produced. Marked VARYING.\n"); | |
6de9cd9a | 1524 | |
750628d8 DN |
1525 | /* Definitions made by statements other than assignments to |
1526 | SSA_NAMEs represent unknown modifications to their outputs. | |
1527 | Mark them VARYING. */ | |
0bca51f0 DN |
1528 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) |
1529 | { | |
1530 | prop_value_t v = { VARYING, NULL_TREE, NULL_TREE }; | |
1531 | set_lattice_value (def, v); | |
1532 | } | |
6de9cd9a | 1533 | |
750628d8 DN |
1534 | return SSA_PROP_VARYING; |
1535 | } | |
6de9cd9a | 1536 | |
6de9cd9a | 1537 | |
0bca51f0 | 1538 | /* Main entry point for SSA Conditional Constant Propagation. */ |
750628d8 | 1539 | |
3253eafb | 1540 | static unsigned int |
0bca51f0 | 1541 | execute_ssa_ccp (bool store_ccp) |
750628d8 | 1542 | { |
0bca51f0 | 1543 | do_store_ccp = store_ccp; |
750628d8 DN |
1544 | ccp_initialize (); |
1545 | ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node); | |
3253eafb | 1546 | if (ccp_finalize ()) |
706ca88e | 1547 | return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals); |
3253eafb JH |
1548 | else |
1549 | return 0; | |
6de9cd9a DN |
1550 | } |
1551 | ||
173b818d | 1552 | |
c2924966 | 1553 | static unsigned int |
0bca51f0 DN |
1554 | do_ssa_ccp (void) |
1555 | { | |
3253eafb | 1556 | return execute_ssa_ccp (false); |
0bca51f0 DN |
1557 | } |
1558 | ||
1559 | ||
173b818d | 1560 | static bool |
750628d8 | 1561 | gate_ccp (void) |
173b818d | 1562 | { |
750628d8 | 1563 | return flag_tree_ccp != 0; |
173b818d BB |
1564 | } |
1565 | ||
6de9cd9a | 1566 | |
8ddbbcae | 1567 | struct gimple_opt_pass pass_ccp = |
750628d8 | 1568 | { |
8ddbbcae JH |
1569 | { |
1570 | GIMPLE_PASS, | |
750628d8 DN |
1571 | "ccp", /* name */ |
1572 | gate_ccp, /* gate */ | |
0bca51f0 | 1573 | do_ssa_ccp, /* execute */ |
750628d8 DN |
1574 | NULL, /* sub */ |
1575 | NULL, /* next */ | |
1576 | 0, /* static_pass_number */ | |
1577 | TV_TREE_CCP, /* tv_id */ | |
7faade0f | 1578 | PROP_cfg | PROP_ssa, /* properties_required */ |
750628d8 | 1579 | 0, /* properties_provided */ |
ae07b463 | 1580 | 0, /* properties_destroyed */ |
750628d8 | 1581 | 0, /* todo_flags_start */ |
3253eafb | 1582 | TODO_dump_func | TODO_verify_ssa |
8ddbbcae JH |
1583 | | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */ |
1584 | } | |
750628d8 | 1585 | }; |
6de9cd9a | 1586 | |
6de9cd9a | 1587 | |
c2924966 | 1588 | static unsigned int |
0bca51f0 DN |
1589 | do_ssa_store_ccp (void) |
1590 | { | |
1591 | /* If STORE-CCP is not enabled, we just run regular CCP. */ | |
3253eafb | 1592 | return execute_ssa_ccp (flag_tree_store_ccp != 0); |
0bca51f0 DN |
1593 | } |
1594 | ||
1595 | static bool | |
1596 | gate_store_ccp (void) | |
1597 | { | |
1598 | /* STORE-CCP is enabled only with -ftree-store-ccp, but when | |
1599 | -fno-tree-store-ccp is specified, we should run regular CCP. | |
1600 | That's why the pass is enabled with either flag. */ | |
1601 | return flag_tree_store_ccp != 0 || flag_tree_ccp != 0; | |
1602 | } | |
1603 | ||
1604 | ||
8ddbbcae | 1605 | struct gimple_opt_pass pass_store_ccp = |
0bca51f0 | 1606 | { |
8ddbbcae JH |
1607 | { |
1608 | GIMPLE_PASS, | |
0bca51f0 DN |
1609 | "store_ccp", /* name */ |
1610 | gate_store_ccp, /* gate */ | |
1611 | do_ssa_store_ccp, /* execute */ | |
1612 | NULL, /* sub */ | |
1613 | NULL, /* next */ | |
1614 | 0, /* static_pass_number */ | |
1615 | TV_TREE_STORE_CCP, /* tv_id */ | |
1616 | PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */ | |
1617 | 0, /* properties_provided */ | |
ae07b463 | 1618 | 0, /* properties_destroyed */ |
0bca51f0 | 1619 | 0, /* todo_flags_start */ |
3253eafb | 1620 | TODO_dump_func | TODO_verify_ssa |
8ddbbcae JH |
1621 | | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */ |
1622 | } | |
0bca51f0 DN |
1623 | }; |
1624 | ||
6de9cd9a DN |
1625 | /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X]. |
1626 | BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE | |
9cf737f8 | 1627 | is the desired result type. */ |
6de9cd9a DN |
1628 | |
1629 | static tree | |
1fd89060 RG |
1630 | maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type, |
1631 | bool allow_negative_idx) | |
6de9cd9a | 1632 | { |
891fc5e9 | 1633 | tree min_idx, idx, idx_type, elt_offset = integer_zero_node; |
44de5aeb | 1634 | tree array_type, elt_type, elt_size; |
3097760b | 1635 | tree domain_type; |
44de5aeb RK |
1636 | |
1637 | /* If BASE is an ARRAY_REF, we can pick up another offset (this time | |
1638 | measured in units of the size of elements type) from that ARRAY_REF). | |
1639 | We can't do anything if either is variable. | |
1640 | ||
1641 | The case we handle here is *(&A[N]+O). */ | |
1642 | if (TREE_CODE (base) == ARRAY_REF) | |
1643 | { | |
1644 | tree low_bound = array_ref_low_bound (base); | |
1645 | ||
1646 | elt_offset = TREE_OPERAND (base, 1); | |
1647 | if (TREE_CODE (low_bound) != INTEGER_CST | |
1648 | || TREE_CODE (elt_offset) != INTEGER_CST) | |
1649 | return NULL_TREE; | |
1650 | ||
1651 | elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0); | |
1652 | base = TREE_OPERAND (base, 0); | |
1653 | } | |
6de9cd9a DN |
1654 | |
1655 | /* Ignore stupid user tricks of indexing non-array variables. */ | |
1656 | array_type = TREE_TYPE (base); | |
1657 | if (TREE_CODE (array_type) != ARRAY_TYPE) | |
1658 | return NULL_TREE; | |
1659 | elt_type = TREE_TYPE (array_type); | |
f4088621 | 1660 | if (!useless_type_conversion_p (orig_type, elt_type)) |
6de9cd9a | 1661 | return NULL_TREE; |
891fc5e9 RG |
1662 | |
1663 | /* Use signed size type for intermediate computation on the index. */ | |
1664 | idx_type = signed_type_for (size_type_node); | |
1665 | ||
44de5aeb RK |
1666 | /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the |
1667 | element type (so we can use the alignment if it's not constant). | |
1668 | Otherwise, compute the offset as an index by using a division. If the | |
1669 | division isn't exact, then don't do anything. */ | |
6de9cd9a | 1670 | elt_size = TYPE_SIZE_UNIT (elt_type); |
fe9821b8 JH |
1671 | if (!elt_size) |
1672 | return NULL; | |
44de5aeb RK |
1673 | if (integer_zerop (offset)) |
1674 | { | |
1675 | if (TREE_CODE (elt_size) != INTEGER_CST) | |
1676 | elt_size = size_int (TYPE_ALIGN (elt_type)); | |
6de9cd9a | 1677 | |
891fc5e9 | 1678 | idx = build_int_cst (idx_type, 0); |
44de5aeb RK |
1679 | } |
1680 | else | |
1681 | { | |
1682 | unsigned HOST_WIDE_INT lquo, lrem; | |
1683 | HOST_WIDE_INT hquo, hrem; | |
891fc5e9 | 1684 | double_int soffset; |
44de5aeb | 1685 | |
891fc5e9 RG |
1686 | /* The final array offset should be signed, so we need |
1687 | to sign-extend the (possibly pointer) offset here | |
1688 | and use signed division. */ | |
1689 | soffset = double_int_sext (tree_to_double_int (offset), | |
1690 | TYPE_PRECISION (TREE_TYPE (offset))); | |
44de5aeb | 1691 | if (TREE_CODE (elt_size) != INTEGER_CST |
891fc5e9 RG |
1692 | || div_and_round_double (TRUNC_DIV_EXPR, 0, |
1693 | soffset.low, soffset.high, | |
44de5aeb RK |
1694 | TREE_INT_CST_LOW (elt_size), |
1695 | TREE_INT_CST_HIGH (elt_size), | |
1696 | &lquo, &hquo, &lrem, &hrem) | |
1697 | || lrem || hrem) | |
1698 | return NULL_TREE; | |
6de9cd9a | 1699 | |
891fc5e9 | 1700 | idx = build_int_cst_wide (idx_type, lquo, hquo); |
44de5aeb RK |
1701 | } |
1702 | ||
1703 | /* Assume the low bound is zero. If there is a domain type, get the | |
1704 | low bound, if any, convert the index into that type, and add the | |
1705 | low bound. */ | |
891fc5e9 | 1706 | min_idx = build_int_cst (idx_type, 0); |
3097760b JH |
1707 | domain_type = TYPE_DOMAIN (array_type); |
1708 | if (domain_type) | |
6de9cd9a | 1709 | { |
3097760b | 1710 | idx_type = domain_type; |
891fc5e9 RG |
1711 | if (TYPE_MIN_VALUE (idx_type)) |
1712 | min_idx = TYPE_MIN_VALUE (idx_type); | |
44de5aeb | 1713 | else |
891fc5e9 | 1714 | min_idx = fold_convert (idx_type, min_idx); |
44de5aeb RK |
1715 | |
1716 | if (TREE_CODE (min_idx) != INTEGER_CST) | |
1717 | return NULL_TREE; | |
1718 | ||
891fc5e9 | 1719 | elt_offset = fold_convert (idx_type, elt_offset); |
6de9cd9a DN |
1720 | } |
1721 | ||
44de5aeb RK |
1722 | if (!integer_zerop (min_idx)) |
1723 | idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0); | |
1724 | if (!integer_zerop (elt_offset)) | |
1725 | idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0); | |
1726 | ||
891fc5e9 RG |
1727 | /* Make sure to possibly truncate late after offsetting. */ |
1728 | idx = fold_convert (idx_type, idx); | |
1729 | ||
3097760b | 1730 | /* We don't want to construct access past array bounds. For example |
1fd89060 RG |
1731 | char *(c[4]); |
1732 | c[3][2]; | |
1733 | should not be simplified into (*c)[14] or tree-vrp will | |
1734 | give false warnings. The same is true for | |
1735 | struct A { long x; char d[0]; } *a; | |
1736 | (char *)a - 4; | |
1737 | which should be not folded to &a->d[-8]. */ | |
1738 | if (domain_type | |
1739 | && TYPE_MAX_VALUE (domain_type) | |
3097760b JH |
1740 | && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST) |
1741 | { | |
1742 | tree up_bound = TYPE_MAX_VALUE (domain_type); | |
1743 | ||
1744 | if (tree_int_cst_lt (up_bound, idx) | |
1745 | /* Accesses after the end of arrays of size 0 (gcc | |
1746 | extension) and 1 are likely intentional ("struct | |
1747 | hack"). */ | |
1748 | && compare_tree_int (up_bound, 1) > 0) | |
1749 | return NULL_TREE; | |
1750 | } | |
1fd89060 RG |
1751 | if (domain_type |
1752 | && TYPE_MIN_VALUE (domain_type)) | |
1753 | { | |
1754 | if (!allow_negative_idx | |
1755 | && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST | |
1756 | && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type))) | |
1757 | return NULL_TREE; | |
1758 | } | |
1759 | else if (!allow_negative_idx | |
1760 | && compare_tree_int (idx, 0) < 0) | |
1761 | return NULL_TREE; | |
3097760b | 1762 | |
04d86531 | 1763 | return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE); |
6de9cd9a DN |
1764 | } |
1765 | ||
750628d8 | 1766 | |
fe9821b8 | 1767 | /* Attempt to fold *(S+O) to S.X. |
6de9cd9a DN |
1768 | BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE |
1769 | is the desired result type. */ | |
6de9cd9a | 1770 | |
fe9821b8 | 1771 | static tree |
6de9cd9a DN |
1772 | maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset, |
1773 | tree orig_type, bool base_is_ptr) | |
1774 | { | |
f34fa464 | 1775 | tree f, t, field_type, tail_array_field, field_offset; |
6e6e19cb JH |
1776 | tree ret; |
1777 | tree new_base; | |
6de9cd9a DN |
1778 | |
1779 | if (TREE_CODE (record_type) != RECORD_TYPE | |
1780 | && TREE_CODE (record_type) != UNION_TYPE | |
1781 | && TREE_CODE (record_type) != QUAL_UNION_TYPE) | |
1782 | return NULL_TREE; | |
1783 | ||
1784 | /* Short-circuit silly cases. */ | |
f4088621 | 1785 | if (useless_type_conversion_p (record_type, orig_type)) |
6de9cd9a DN |
1786 | return NULL_TREE; |
1787 | ||
1788 | tail_array_field = NULL_TREE; | |
1789 | for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f)) | |
1790 | { | |
1791 | int cmp; | |
1792 | ||
1793 | if (TREE_CODE (f) != FIELD_DECL) | |
1794 | continue; | |
1795 | if (DECL_BIT_FIELD (f)) | |
1796 | continue; | |
f34fa464 | 1797 | |
fe9821b8 JH |
1798 | if (!DECL_FIELD_OFFSET (f)) |
1799 | continue; | |
f34fa464 ZD |
1800 | field_offset = byte_position (f); |
1801 | if (TREE_CODE (field_offset) != INTEGER_CST) | |
6de9cd9a DN |
1802 | continue; |
1803 | ||
1804 | /* ??? Java creates "interesting" fields for representing base classes. | |
1805 | They have no name, and have no context. With no context, we get into | |
1806 | trouble with nonoverlapping_component_refs_p. Skip them. */ | |
1807 | if (!DECL_FIELD_CONTEXT (f)) | |
1808 | continue; | |
1809 | ||
1810 | /* The previous array field isn't at the end. */ | |
1811 | tail_array_field = NULL_TREE; | |
1812 | ||
1813 | /* Check to see if this offset overlaps with the field. */ | |
f34fa464 | 1814 | cmp = tree_int_cst_compare (field_offset, offset); |
6de9cd9a DN |
1815 | if (cmp > 0) |
1816 | continue; | |
1817 | ||
1818 | field_type = TREE_TYPE (f); | |
6de9cd9a DN |
1819 | |
1820 | /* Here we exactly match the offset being checked. If the types match, | |
1821 | then we can return that field. */ | |
53dba802 | 1822 | if (cmp == 0 |
f4088621 | 1823 | && useless_type_conversion_p (orig_type, field_type)) |
6de9cd9a DN |
1824 | { |
1825 | if (base_is_ptr) | |
1826 | base = build1 (INDIRECT_REF, record_type, base); | |
b4257cfc | 1827 | t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); |
6de9cd9a DN |
1828 | return t; |
1829 | } | |
53dba802 ZD |
1830 | |
1831 | /* Don't care about offsets into the middle of scalars. */ | |
1832 | if (!AGGREGATE_TYPE_P (field_type)) | |
1833 | continue; | |
6de9cd9a | 1834 | |
53dba802 ZD |
1835 | /* Check for array at the end of the struct. This is often |
1836 | used as for flexible array members. We should be able to | |
1837 | turn this into an array access anyway. */ | |
1838 | if (TREE_CODE (field_type) == ARRAY_TYPE) | |
1839 | tail_array_field = f; | |
1840 | ||
1841 | /* Check the end of the field against the offset. */ | |
1842 | if (!DECL_SIZE_UNIT (f) | |
1843 | || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST) | |
1844 | continue; | |
1845 | t = int_const_binop (MINUS_EXPR, offset, field_offset, 1); | |
1846 | if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f))) | |
1847 | continue; | |
6de9cd9a | 1848 | |
53dba802 ZD |
1849 | /* If we matched, then set offset to the displacement into |
1850 | this field. */ | |
6e6e19cb JH |
1851 | if (base_is_ptr) |
1852 | new_base = build1 (INDIRECT_REF, record_type, base); | |
1853 | else | |
1854 | new_base = base; | |
1855 | new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE); | |
1856 | ||
1857 | /* Recurse to possibly find the match. */ | |
1fd89060 RG |
1858 | ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type, |
1859 | f == TYPE_FIELDS (record_type)); | |
6e6e19cb JH |
1860 | if (ret) |
1861 | return ret; | |
1862 | ret = maybe_fold_offset_to_component_ref (field_type, new_base, t, | |
1863 | orig_type, false); | |
1864 | if (ret) | |
1865 | return ret; | |
6de9cd9a DN |
1866 | } |
1867 | ||
1868 | if (!tail_array_field) | |
1869 | return NULL_TREE; | |
1870 | ||
1871 | f = tail_array_field; | |
1872 | field_type = TREE_TYPE (f); | |
53dba802 | 1873 | offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1); |
6de9cd9a | 1874 | |
6de9cd9a | 1875 | /* If we get here, we've got an aggregate field, and a possibly |
1ea7e6ad | 1876 | nonzero offset into them. Recurse and hope for a valid match. */ |
6de9cd9a DN |
1877 | if (base_is_ptr) |
1878 | base = build1 (INDIRECT_REF, record_type, base); | |
b4257cfc | 1879 | base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); |
6de9cd9a | 1880 | |
1fd89060 RG |
1881 | t = maybe_fold_offset_to_array_ref (base, offset, orig_type, |
1882 | f == TYPE_FIELDS (record_type)); | |
6de9cd9a DN |
1883 | if (t) |
1884 | return t; | |
1885 | return maybe_fold_offset_to_component_ref (field_type, base, offset, | |
1886 | orig_type, false); | |
1887 | } | |
1888 | ||
fe9821b8 JH |
1889 | /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type |
1890 | or BASE[index] or by combination of those. | |
1891 | ||
1892 | Before attempting the conversion strip off existing ADDR_EXPRs and | |
1893 | handled component refs. */ | |
1894 | ||
1895 | tree | |
1896 | maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type) | |
1897 | { | |
1898 | tree ret; | |
1899 | tree type; | |
1900 | bool base_is_ptr = true; | |
1901 | ||
1902 | STRIP_NOPS (base); | |
1903 | if (TREE_CODE (base) == ADDR_EXPR) | |
1904 | { | |
1905 | base_is_ptr = false; | |
1906 | ||
1907 | base = TREE_OPERAND (base, 0); | |
1908 | ||
1909 | /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union, | |
1910 | so it needs to be removed and new COMPONENT_REF constructed. | |
1911 | The wrong COMPONENT_REF are often constructed by folding the | |
1912 | (type *)&object within the expression (type *)&object+offset */ | |
1913 | if (handled_component_p (base) && 0) | |
1914 | { | |
1915 | HOST_WIDE_INT sub_offset, size, maxsize; | |
1916 | tree newbase; | |
1917 | newbase = get_ref_base_and_extent (base, &sub_offset, | |
1918 | &size, &maxsize); | |
1919 | gcc_assert (newbase); | |
1920 | gcc_assert (!(sub_offset & (BITS_PER_UNIT - 1))); | |
1921 | if (size == maxsize) | |
1922 | { | |
1923 | base = newbase; | |
1924 | if (sub_offset) | |
1925 | offset = int_const_binop (PLUS_EXPR, offset, | |
1926 | build_int_cst (TREE_TYPE (offset), | |
1927 | sub_offset / BITS_PER_UNIT), 1); | |
1928 | } | |
1929 | } | |
f4088621 | 1930 | if (useless_type_conversion_p (orig_type, TREE_TYPE (base)) |
fe9821b8 JH |
1931 | && integer_zerop (offset)) |
1932 | return base; | |
1933 | type = TREE_TYPE (base); | |
1934 | } | |
1935 | else | |
1936 | { | |
1937 | base_is_ptr = true; | |
1938 | if (!POINTER_TYPE_P (TREE_TYPE (base))) | |
1939 | return NULL_TREE; | |
1940 | type = TREE_TYPE (TREE_TYPE (base)); | |
1941 | } | |
1942 | ret = maybe_fold_offset_to_component_ref (type, base, offset, | |
1943 | orig_type, base_is_ptr); | |
1944 | if (!ret) | |
1945 | { | |
1946 | if (base_is_ptr) | |
1947 | base = build1 (INDIRECT_REF, type, base); | |
1fd89060 | 1948 | ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true); |
fe9821b8 JH |
1949 | } |
1950 | return ret; | |
1951 | } | |
750628d8 | 1952 | |
6de9cd9a DN |
1953 | /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET). |
1954 | Return the simplified expression, or NULL if nothing could be done. */ | |
1955 | ||
1956 | static tree | |
1957 | maybe_fold_stmt_indirect (tree expr, tree base, tree offset) | |
1958 | { | |
1959 | tree t; | |
cbf8d355 | 1960 | bool volatile_p = TREE_THIS_VOLATILE (expr); |
6de9cd9a DN |
1961 | |
1962 | /* We may well have constructed a double-nested PLUS_EXPR via multiple | |
1963 | substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that | |
1964 | are sometimes added. */ | |
1965 | base = fold (base); | |
ff7c8597 | 1966 | STRIP_TYPE_NOPS (base); |
6de9cd9a DN |
1967 | TREE_OPERAND (expr, 0) = base; |
1968 | ||
1969 | /* One possibility is that the address reduces to a string constant. */ | |
1970 | t = fold_read_from_constant_string (expr); | |
1971 | if (t) | |
1972 | return t; | |
1973 | ||
5be014d5 AP |
1974 | /* Add in any offset from a POINTER_PLUS_EXPR. */ |
1975 | if (TREE_CODE (base) == POINTER_PLUS_EXPR) | |
6de9cd9a DN |
1976 | { |
1977 | tree offset2; | |
1978 | ||
1979 | offset2 = TREE_OPERAND (base, 1); | |
1980 | if (TREE_CODE (offset2) != INTEGER_CST) | |
1981 | return NULL_TREE; | |
1982 | base = TREE_OPERAND (base, 0); | |
1983 | ||
5be014d5 AP |
1984 | offset = fold_convert (sizetype, |
1985 | int_const_binop (PLUS_EXPR, offset, offset2, 1)); | |
6de9cd9a DN |
1986 | } |
1987 | ||
1988 | if (TREE_CODE (base) == ADDR_EXPR) | |
1989 | { | |
fe9821b8 JH |
1990 | tree base_addr = base; |
1991 | ||
6de9cd9a DN |
1992 | /* Strip the ADDR_EXPR. */ |
1993 | base = TREE_OPERAND (base, 0); | |
1994 | ||
0534fa56 RH |
1995 | /* Fold away CONST_DECL to its value, if the type is scalar. */ |
1996 | if (TREE_CODE (base) == CONST_DECL | |
1b738915 | 1997 | && is_gimple_min_invariant (DECL_INITIAL (base))) |
0534fa56 RH |
1998 | return DECL_INITIAL (base); |
1999 | ||
6de9cd9a | 2000 | /* Try folding *(&B+O) to B.X. */ |
fe9821b8 JH |
2001 | t = maybe_fold_offset_to_reference (base_addr, offset, |
2002 | TREE_TYPE (expr)); | |
6de9cd9a | 2003 | if (t) |
cbf8d355 RG |
2004 | { |
2005 | TREE_THIS_VOLATILE (t) = volatile_p; | |
2006 | return t; | |
2007 | } | |
6de9cd9a DN |
2008 | } |
2009 | else | |
2010 | { | |
2011 | /* We can get here for out-of-range string constant accesses, | |
2012 | such as "_"[3]. Bail out of the entire substitution search | |
2013 | and arrange for the entire statement to be replaced by a | |
aabcd309 | 2014 | call to __builtin_trap. In all likelihood this will all be |
6de9cd9a DN |
2015 | constant-folded away, but in the meantime we can't leave with |
2016 | something that get_expr_operands can't understand. */ | |
2017 | ||
2018 | t = base; | |
2019 | STRIP_NOPS (t); | |
2020 | if (TREE_CODE (t) == ADDR_EXPR | |
2021 | && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST) | |
2022 | { | |
2023 | /* FIXME: Except that this causes problems elsewhere with dead | |
0e61db61 | 2024 | code not being deleted, and we die in the rtl expanders |
6de9cd9a DN |
2025 | because we failed to remove some ssa_name. In the meantime, |
2026 | just return zero. */ | |
2027 | /* FIXME2: This condition should be signaled by | |
2028 | fold_read_from_constant_string directly, rather than | |
2029 | re-checking for it here. */ | |
2030 | return integer_zero_node; | |
2031 | } | |
2032 | ||
2033 | /* Try folding *(B+O) to B->X. Still an improvement. */ | |
2034 | if (POINTER_TYPE_P (TREE_TYPE (base))) | |
2035 | { | |
fe9821b8 JH |
2036 | t = maybe_fold_offset_to_reference (base, offset, |
2037 | TREE_TYPE (expr)); | |
6de9cd9a DN |
2038 | if (t) |
2039 | return t; | |
2040 | } | |
2041 | } | |
2042 | ||
2043 | /* Otherwise we had an offset that we could not simplify. */ | |
2044 | return NULL_TREE; | |
2045 | } | |
2046 | ||
750628d8 | 2047 | |
726a989a | 2048 | /* A quaint feature extant in our address arithmetic is that there |
6de9cd9a DN |
2049 | can be hidden type changes here. The type of the result need |
2050 | not be the same as the type of the input pointer. | |
2051 | ||
2052 | What we're after here is an expression of the form | |
2053 | (T *)(&array + const) | |
726a989a RB |
2054 | where array is OP0, const is OP1, RES_TYPE is T and |
2055 | the cast doesn't actually exist, but is implicit in the | |
5be014d5 | 2056 | type of the POINTER_PLUS_EXPR. We'd like to turn this into |
6de9cd9a DN |
2057 | &array[x] |
2058 | which may be able to propagate further. */ | |
2059 | ||
726a989a RB |
2060 | tree |
2061 | maybe_fold_stmt_addition (tree res_type, tree op0, tree op1) | |
6de9cd9a | 2062 | { |
6de9cd9a DN |
2063 | tree ptd_type; |
2064 | tree t; | |
6de9cd9a | 2065 | |
6de9cd9a DN |
2066 | /* It had better be a constant. */ |
2067 | if (TREE_CODE (op1) != INTEGER_CST) | |
2068 | return NULL_TREE; | |
2069 | /* The first operand should be an ADDR_EXPR. */ | |
2070 | if (TREE_CODE (op0) != ADDR_EXPR) | |
2071 | return NULL_TREE; | |
2072 | op0 = TREE_OPERAND (op0, 0); | |
2073 | ||
2074 | /* If the first operand is an ARRAY_REF, expand it so that we can fold | |
2075 | the offset into it. */ | |
2076 | while (TREE_CODE (op0) == ARRAY_REF) | |
2077 | { | |
2078 | tree array_obj = TREE_OPERAND (op0, 0); | |
2079 | tree array_idx = TREE_OPERAND (op0, 1); | |
2080 | tree elt_type = TREE_TYPE (op0); | |
2081 | tree elt_size = TYPE_SIZE_UNIT (elt_type); | |
2082 | tree min_idx; | |
2083 | ||
2084 | if (TREE_CODE (array_idx) != INTEGER_CST) | |
2085 | break; | |
2086 | if (TREE_CODE (elt_size) != INTEGER_CST) | |
2087 | break; | |
2088 | ||
2089 | /* Un-bias the index by the min index of the array type. */ | |
2090 | min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj)); | |
2091 | if (min_idx) | |
2092 | { | |
2093 | min_idx = TYPE_MIN_VALUE (min_idx); | |
2094 | if (min_idx) | |
2095 | { | |
44de5aeb RK |
2096 | if (TREE_CODE (min_idx) != INTEGER_CST) |
2097 | break; | |
2098 | ||
b6f65e3c | 2099 | array_idx = fold_convert (TREE_TYPE (min_idx), array_idx); |
6de9cd9a DN |
2100 | if (!integer_zerop (min_idx)) |
2101 | array_idx = int_const_binop (MINUS_EXPR, array_idx, | |
2102 | min_idx, 0); | |
2103 | } | |
2104 | } | |
2105 | ||
2106 | /* Convert the index to a byte offset. */ | |
b6f65e3c | 2107 | array_idx = fold_convert (sizetype, array_idx); |
6de9cd9a DN |
2108 | array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0); |
2109 | ||
2110 | /* Update the operands for the next round, or for folding. */ | |
5be014d5 | 2111 | op1 = int_const_binop (PLUS_EXPR, |
6de9cd9a | 2112 | array_idx, op1, 0); |
6de9cd9a DN |
2113 | op0 = array_obj; |
2114 | } | |
2115 | ||
726a989a | 2116 | ptd_type = TREE_TYPE (res_type); |
c4e5b5a8 RG |
2117 | /* If we want a pointer to void, reconstruct the reference from the |
2118 | array element type. A pointer to that can be trivially converted | |
2119 | to void *. This happens as we fold (void *)(ptr p+ off). */ | |
2120 | if (VOID_TYPE_P (ptd_type) | |
2121 | && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE) | |
2122 | ptd_type = TREE_TYPE (TREE_TYPE (op0)); | |
6de9cd9a DN |
2123 | |
2124 | /* At which point we can try some of the same things as for indirects. */ | |
1fd89060 | 2125 | t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true); |
6de9cd9a DN |
2126 | if (!t) |
2127 | t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1, | |
2128 | ptd_type, false); | |
2129 | if (t) | |
726a989a | 2130 | t = build1 (ADDR_EXPR, res_type, t); |
6de9cd9a DN |
2131 | |
2132 | return t; | |
2133 | } | |
2134 | ||
622f91ba JL |
2135 | /* For passing state through walk_tree into fold_stmt_r and its |
2136 | children. */ | |
2137 | ||
2138 | struct fold_stmt_r_data | |
2139 | { | |
726a989a | 2140 | gimple stmt; |
6ac01510 ILT |
2141 | bool *changed_p; |
2142 | bool *inside_addr_expr_p; | |
622f91ba JL |
2143 | }; |
2144 | ||
6de9cd9a DN |
2145 | /* Subroutine of fold_stmt called via walk_tree. We perform several |
2146 | simplifications of EXPR_P, mostly having to do with pointer arithmetic. */ | |
2147 | ||
2148 | static tree | |
2149 | fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data) | |
2150 | { | |
726a989a RB |
2151 | struct walk_stmt_info *wi = (struct walk_stmt_info *) data; |
2152 | struct fold_stmt_r_data *fold_stmt_r_data; | |
2153 | bool *inside_addr_expr_p; | |
2154 | bool *changed_p; | |
6de9cd9a | 2155 | tree expr = *expr_p, t; |
6aaa4013 | 2156 | bool volatile_p = TREE_THIS_VOLATILE (expr); |
6de9cd9a | 2157 | |
726a989a RB |
2158 | fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info; |
2159 | inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p; | |
2160 | changed_p = fold_stmt_r_data->changed_p; | |
2161 | ||
6de9cd9a DN |
2162 | /* ??? It'd be nice if walk_tree had a pre-order option. */ |
2163 | switch (TREE_CODE (expr)) | |
2164 | { | |
2165 | case INDIRECT_REF: | |
2166 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
2167 | if (t) | |
2168 | return t; | |
2169 | *walk_subtrees = 0; | |
2170 | ||
2171 | t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0), | |
2172 | integer_zero_node); | |
16ac8575 RG |
2173 | if (!t |
2174 | && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) | |
2175 | /* If we had a good reason for propagating the address here, | |
2176 | make sure we end up with valid gimple. See PR34989. */ | |
2177 | t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0); | |
6de9cd9a DN |
2178 | break; |
2179 | ||
fe9821b8 JH |
2180 | case NOP_EXPR: |
2181 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
2182 | if (t) | |
2183 | return t; | |
2184 | *walk_subtrees = 0; | |
2185 | ||
2186 | if (POINTER_TYPE_P (TREE_TYPE (expr)) | |
2187 | && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0))) | |
2188 | && (t = maybe_fold_offset_to_reference | |
2189 | (TREE_OPERAND (expr, 0), | |
2190 | integer_zero_node, | |
2191 | TREE_TYPE (TREE_TYPE (expr))))) | |
fa2050d2 RG |
2192 | { |
2193 | tree ptr_type = build_pointer_type (TREE_TYPE (t)); | |
2194 | if (!useless_type_conversion_p (TREE_TYPE (expr), ptr_type)) | |
2195 | return NULL_TREE; | |
2196 | t = build_fold_addr_expr_with_type (t, ptr_type); | |
2197 | } | |
fe9821b8 JH |
2198 | break; |
2199 | ||
622f91ba | 2200 | /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF. |
6de9cd9a DN |
2201 | We'd only want to bother decomposing an existing ARRAY_REF if |
2202 | the base array is found to have another offset contained within. | |
2203 | Otherwise we'd be wasting time. */ | |
622f91ba JL |
2204 | case ARRAY_REF: |
2205 | /* If we are not processing expressions found within an | |
2206 | ADDR_EXPR, then we can fold constant array references. */ | |
2207 | if (!*inside_addr_expr_p) | |
2208 | t = fold_read_from_constant_string (expr); | |
2209 | else | |
2210 | t = NULL; | |
2211 | break; | |
6de9cd9a DN |
2212 | |
2213 | case ADDR_EXPR: | |
622f91ba | 2214 | *inside_addr_expr_p = true; |
6de9cd9a | 2215 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); |
622f91ba | 2216 | *inside_addr_expr_p = false; |
6de9cd9a DN |
2217 | if (t) |
2218 | return t; | |
2219 | *walk_subtrees = 0; | |
2220 | ||
51eed280 PB |
2221 | /* Make sure the value is properly considered constant, and so gets |
2222 | propagated as expected. */ | |
6de9cd9a | 2223 | if (*changed_p) |
127203ac | 2224 | recompute_tree_invariant_for_addr_expr (expr); |
6de9cd9a DN |
2225 | return NULL_TREE; |
2226 | ||
6de9cd9a DN |
2227 | case COMPONENT_REF: |
2228 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
2229 | if (t) | |
2230 | return t; | |
2231 | *walk_subtrees = 0; | |
2232 | ||
fa27426e RH |
2233 | /* Make sure the FIELD_DECL is actually a field in the type on the lhs. |
2234 | We've already checked that the records are compatible, so we should | |
2235 | come up with a set of compatible fields. */ | |
2236 | { | |
2237 | tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0)); | |
2238 | tree expr_field = TREE_OPERAND (expr, 1); | |
2239 | ||
2240 | if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record)) | |
2241 | { | |
2242 | expr_field = find_compatible_field (expr_record, expr_field); | |
2243 | TREE_OPERAND (expr, 1) = expr_field; | |
2244 | } | |
2245 | } | |
6de9cd9a DN |
2246 | break; |
2247 | ||
ac182688 ZD |
2248 | case TARGET_MEM_REF: |
2249 | t = maybe_fold_tmr (expr); | |
2250 | break; | |
2251 | ||
726a989a RB |
2252 | case POINTER_PLUS_EXPR: |
2253 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
2254 | if (t) | |
2255 | return t; | |
2256 | t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL); | |
2257 | if (t) | |
2258 | return t; | |
2259 | *walk_subtrees = 0; | |
2260 | ||
2261 | t = maybe_fold_stmt_addition (TREE_TYPE (expr), | |
2262 | TREE_OPERAND (expr, 0), | |
2263 | TREE_OPERAND (expr, 1)); | |
2264 | break; | |
2265 | ||
f393e7f5 RG |
2266 | case COND_EXPR: |
2267 | if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0))) | |
2268 | { | |
2269 | tree op0 = TREE_OPERAND (expr, 0); | |
6ac01510 ILT |
2270 | tree tem; |
2271 | bool set; | |
2272 | ||
2273 | fold_defer_overflow_warnings (); | |
2274 | tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0), | |
2275 | TREE_OPERAND (op0, 0), | |
2276 | TREE_OPERAND (op0, 1)); | |
726a989a RB |
2277 | /* This is actually a conditional expression, not a GIMPLE |
2278 | conditional statement, however, the valid_gimple_rhs_p | |
2279 | test still applies. */ | |
2280 | set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem); | |
6ac01510 ILT |
2281 | fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0); |
2282 | if (set) | |
5cdc4a26 | 2283 | { |
726a989a RB |
2284 | COND_EXPR_COND (expr) = tem; |
2285 | t = expr; | |
5cdc4a26 RS |
2286 | break; |
2287 | } | |
f393e7f5 | 2288 | } |
5cdc4a26 | 2289 | return NULL_TREE; |
f393e7f5 | 2290 | |
6de9cd9a DN |
2291 | default: |
2292 | return NULL_TREE; | |
2293 | } | |
2294 | ||
2295 | if (t) | |
2296 | { | |
6aaa4013 RG |
2297 | /* Preserve volatileness of the original expression. */ |
2298 | TREE_THIS_VOLATILE (t) = volatile_p; | |
6de9cd9a DN |
2299 | *expr_p = t; |
2300 | *changed_p = true; | |
2301 | } | |
2302 | ||
2303 | return NULL_TREE; | |
2304 | } | |
2305 | ||
10a0d495 JJ |
2306 | /* Return the string length, maximum string length or maximum value of |
2307 | ARG in LENGTH. | |
2308 | If ARG is an SSA name variable, follow its use-def chains. If LENGTH | |
2309 | is not NULL and, for TYPE == 0, its value is not equal to the length | |
2310 | we determine or if we are unable to determine the length or value, | |
2311 | return false. VISITED is a bitmap of visited variables. | |
2312 | TYPE is 0 if string length should be returned, 1 for maximum string | |
2313 | length and 2 for maximum value ARG can have. */ | |
6de9cd9a | 2314 | |
06a9b53f | 2315 | static bool |
10a0d495 | 2316 | get_maxval_strlen (tree arg, tree *length, bitmap visited, int type) |
6de9cd9a | 2317 | { |
726a989a RB |
2318 | tree var, val; |
2319 | gimple def_stmt; | |
750628d8 DN |
2320 | |
2321 | if (TREE_CODE (arg) != SSA_NAME) | |
06a9b53f | 2322 | { |
f255541f RC |
2323 | if (TREE_CODE (arg) == COND_EXPR) |
2324 | return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type) | |
2325 | && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type); | |
c4e5b5a8 RG |
2326 | /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */ |
2327 | else if (TREE_CODE (arg) == ADDR_EXPR | |
2328 | && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF | |
2329 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1))) | |
2330 | { | |
2331 | tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); | |
2332 | if (TREE_CODE (aop0) == INDIRECT_REF | |
2333 | && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME) | |
2334 | return get_maxval_strlen (TREE_OPERAND (aop0, 0), | |
2335 | length, visited, type); | |
2336 | } | |
f255541f | 2337 | |
10a0d495 JJ |
2338 | if (type == 2) |
2339 | { | |
2340 | val = arg; | |
2341 | if (TREE_CODE (val) != INTEGER_CST | |
2342 | || tree_int_cst_sgn (val) < 0) | |
2343 | return false; | |
2344 | } | |
2345 | else | |
2346 | val = c_strlen (arg, 1); | |
750628d8 | 2347 | if (!val) |
06a9b53f | 2348 | return false; |
cd709752 | 2349 | |
10a0d495 JJ |
2350 | if (*length) |
2351 | { | |
2352 | if (type > 0) | |
2353 | { | |
2354 | if (TREE_CODE (*length) != INTEGER_CST | |
2355 | || TREE_CODE (val) != INTEGER_CST) | |
2356 | return false; | |
2357 | ||
2358 | if (tree_int_cst_lt (*length, val)) | |
2359 | *length = val; | |
2360 | return true; | |
2361 | } | |
2362 | else if (simple_cst_equal (val, *length) != 1) | |
2363 | return false; | |
2364 | } | |
6de9cd9a | 2365 | |
750628d8 DN |
2366 | *length = val; |
2367 | return true; | |
6de9cd9a | 2368 | } |
06a9b53f | 2369 | |
750628d8 DN |
2370 | /* If we were already here, break the infinite cycle. */ |
2371 | if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg))) | |
2372 | return true; | |
2373 | bitmap_set_bit (visited, SSA_NAME_VERSION (arg)); | |
2374 | ||
2375 | var = arg; | |
2376 | def_stmt = SSA_NAME_DEF_STMT (var); | |
6de9cd9a | 2377 | |
726a989a RB |
2378 | switch (gimple_code (def_stmt)) |
2379 | { | |
2380 | case GIMPLE_ASSIGN: | |
2381 | /* The RHS of the statement defining VAR must either have a | |
2382 | constant length or come from another SSA_NAME with a constant | |
2383 | length. */ | |
2384 | if (gimple_assign_single_p (def_stmt) | |
2385 | || gimple_assign_unary_nop_p (def_stmt)) | |
2386 | { | |
2387 | tree rhs = gimple_assign_rhs1 (def_stmt); | |
2388 | return get_maxval_strlen (rhs, length, visited, type); | |
2389 | } | |
2390 | return false; | |
2391 | ||
2392 | case GIMPLE_PHI: | |
750628d8 DN |
2393 | { |
2394 | /* All the arguments of the PHI node must have the same constant | |
2395 | length. */ | |
726a989a RB |
2396 | unsigned i; |
2397 | ||
2398 | for (i = 0; i < gimple_phi_num_args (def_stmt); i++) | |
2399 | { | |
2400 | tree arg = gimple_phi_arg (def_stmt, i)->def; | |
2401 | ||
2402 | /* If this PHI has itself as an argument, we cannot | |
2403 | determine the string length of this argument. However, | |
2404 | if we can find a constant string length for the other | |
2405 | PHI args then we can still be sure that this is a | |
2406 | constant string length. So be optimistic and just | |
2407 | continue with the next argument. */ | |
2408 | if (arg == gimple_phi_result (def_stmt)) | |
2409 | continue; | |
2410 | ||
2411 | if (!get_maxval_strlen (arg, length, visited, type)) | |
2412 | return false; | |
2413 | } | |
2414 | } | |
2415 | return true; | |
6de9cd9a | 2416 | |
750628d8 | 2417 | default: |
726a989a | 2418 | return false; |
6de9cd9a | 2419 | } |
6de9cd9a DN |
2420 | } |
2421 | ||
2422 | ||
726a989a RB |
2423 | /* Fold builtin call in statement STMT. Returns a simplified tree. |
2424 | We may return a non-constant expression, including another call | |
2425 | to a different function and with different arguments, e.g., | |
2426 | substituting memcpy for strcpy when the string length is known. | |
2427 | Note that some builtins expand into inline code that may not | |
2428 | be valid in GIMPLE. Callers must take care. */ | |
6de9cd9a DN |
2429 | |
2430 | static tree | |
726a989a | 2431 | ccp_fold_builtin (gimple stmt) |
6de9cd9a | 2432 | { |
10a0d495 | 2433 | tree result, val[3]; |
5039610b | 2434 | tree callee, a; |
10a0d495 | 2435 | int arg_mask, i, type; |
a32e70c3 RS |
2436 | bitmap visited; |
2437 | bool ignore; | |
5039610b | 2438 | int nargs; |
6de9cd9a | 2439 | |
726a989a RB |
2440 | gcc_assert (is_gimple_call (stmt)); |
2441 | ||
2442 | ignore = (gimple_call_lhs (stmt) == NULL); | |
6de9cd9a DN |
2443 | |
2444 | /* First try the generic builtin folder. If that succeeds, return the | |
2445 | result directly. */ | |
726a989a | 2446 | result = fold_call_stmt (stmt, ignore); |
6de9cd9a | 2447 | if (result) |
10a0d495 JJ |
2448 | { |
2449 | if (ignore) | |
2450 | STRIP_NOPS (result); | |
2451 | return result; | |
2452 | } | |
a32e70c3 RS |
2453 | |
2454 | /* Ignore MD builtins. */ | |
726a989a | 2455 | callee = gimple_call_fndecl (stmt); |
a32e70c3 RS |
2456 | if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD) |
2457 | return NULL_TREE; | |
6de9cd9a DN |
2458 | |
2459 | /* If the builtin could not be folded, and it has no argument list, | |
2460 | we're done. */ | |
726a989a | 2461 | nargs = gimple_call_num_args (stmt); |
5039610b | 2462 | if (nargs == 0) |
6de9cd9a DN |
2463 | return NULL_TREE; |
2464 | ||
2465 | /* Limit the work only for builtins we know how to simplify. */ | |
2466 | switch (DECL_FUNCTION_CODE (callee)) | |
2467 | { | |
2468 | case BUILT_IN_STRLEN: | |
2469 | case BUILT_IN_FPUTS: | |
2470 | case BUILT_IN_FPUTS_UNLOCKED: | |
10a0d495 JJ |
2471 | arg_mask = 1; |
2472 | type = 0; | |
6de9cd9a DN |
2473 | break; |
2474 | case BUILT_IN_STRCPY: | |
2475 | case BUILT_IN_STRNCPY: | |
10a0d495 JJ |
2476 | arg_mask = 2; |
2477 | type = 0; | |
2478 | break; | |
2479 | case BUILT_IN_MEMCPY_CHK: | |
2480 | case BUILT_IN_MEMPCPY_CHK: | |
2481 | case BUILT_IN_MEMMOVE_CHK: | |
2482 | case BUILT_IN_MEMSET_CHK: | |
2483 | case BUILT_IN_STRNCPY_CHK: | |
2484 | arg_mask = 4; | |
2485 | type = 2; | |
2486 | break; | |
2487 | case BUILT_IN_STRCPY_CHK: | |
2488 | case BUILT_IN_STPCPY_CHK: | |
2489 | arg_mask = 2; | |
2490 | type = 1; | |
2491 | break; | |
2492 | case BUILT_IN_SNPRINTF_CHK: | |
2493 | case BUILT_IN_VSNPRINTF_CHK: | |
2494 | arg_mask = 2; | |
2495 | type = 2; | |
6de9cd9a DN |
2496 | break; |
2497 | default: | |
2498 | return NULL_TREE; | |
2499 | } | |
2500 | ||
2501 | /* Try to use the dataflow information gathered by the CCP process. */ | |
8bdbfff5 | 2502 | visited = BITMAP_ALLOC (NULL); |
6de9cd9a | 2503 | |
10a0d495 | 2504 | memset (val, 0, sizeof (val)); |
726a989a | 2505 | for (i = 0; i < nargs; i++) |
5039610b | 2506 | { |
726a989a RB |
2507 | if ((arg_mask >> i) & 1) |
2508 | { | |
2509 | a = gimple_call_arg (stmt, i); | |
2510 | bitmap_clear (visited); | |
2511 | if (!get_maxval_strlen (a, &val[i], visited, type)) | |
2512 | val[i] = NULL_TREE; | |
2513 | } | |
5039610b | 2514 | } |
6de9cd9a | 2515 | |
8bdbfff5 | 2516 | BITMAP_FREE (visited); |
6de9cd9a | 2517 | |
a32e70c3 | 2518 | result = NULL_TREE; |
6de9cd9a DN |
2519 | switch (DECL_FUNCTION_CODE (callee)) |
2520 | { | |
2521 | case BUILT_IN_STRLEN: | |
10a0d495 | 2522 | if (val[0]) |
6de9cd9a | 2523 | { |
726a989a RB |
2524 | tree new_val = |
2525 | fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]); | |
6de9cd9a DN |
2526 | |
2527 | /* If the result is not a valid gimple value, or not a cast | |
2528 | of a valid gimple value, then we can not use the result. */ | |
c22940cd TN |
2529 | if (is_gimple_val (new_val) |
2530 | || (is_gimple_cast (new_val) | |
2531 | && is_gimple_val (TREE_OPERAND (new_val, 0)))) | |
2532 | return new_val; | |
6de9cd9a | 2533 | } |
a32e70c3 RS |
2534 | break; |
2535 | ||
6de9cd9a | 2536 | case BUILT_IN_STRCPY: |
5039610b SL |
2537 | if (val[1] && is_gimple_val (val[1]) && nargs == 2) |
2538 | result = fold_builtin_strcpy (callee, | |
726a989a RB |
2539 | gimple_call_arg (stmt, 0), |
2540 | gimple_call_arg (stmt, 1), | |
5039610b | 2541 | val[1]); |
a32e70c3 RS |
2542 | break; |
2543 | ||
6de9cd9a | 2544 | case BUILT_IN_STRNCPY: |
5039610b SL |
2545 | if (val[1] && is_gimple_val (val[1]) && nargs == 3) |
2546 | result = fold_builtin_strncpy (callee, | |
726a989a RB |
2547 | gimple_call_arg (stmt, 0), |
2548 | gimple_call_arg (stmt, 1), | |
2549 | gimple_call_arg (stmt, 2), | |
5039610b | 2550 | val[1]); |
a32e70c3 RS |
2551 | break; |
2552 | ||
6de9cd9a | 2553 | case BUILT_IN_FPUTS: |
726a989a RB |
2554 | result = fold_builtin_fputs (gimple_call_arg (stmt, 0), |
2555 | gimple_call_arg (stmt, 1), | |
2556 | ignore, false, val[0]); | |
a32e70c3 RS |
2557 | break; |
2558 | ||
6de9cd9a | 2559 | case BUILT_IN_FPUTS_UNLOCKED: |
726a989a RB |
2560 | result = fold_builtin_fputs (gimple_call_arg (stmt, 0), |
2561 | gimple_call_arg (stmt, 1), | |
2562 | ignore, true, val[0]); | |
10a0d495 JJ |
2563 | break; |
2564 | ||
2565 | case BUILT_IN_MEMCPY_CHK: | |
2566 | case BUILT_IN_MEMPCPY_CHK: | |
2567 | case BUILT_IN_MEMMOVE_CHK: | |
2568 | case BUILT_IN_MEMSET_CHK: | |
2569 | if (val[2] && is_gimple_val (val[2])) | |
5039610b | 2570 | result = fold_builtin_memory_chk (callee, |
726a989a RB |
2571 | gimple_call_arg (stmt, 0), |
2572 | gimple_call_arg (stmt, 1), | |
2573 | gimple_call_arg (stmt, 2), | |
2574 | gimple_call_arg (stmt, 3), | |
5039610b | 2575 | val[2], ignore, |
10a0d495 JJ |
2576 | DECL_FUNCTION_CODE (callee)); |
2577 | break; | |
2578 | ||
2579 | case BUILT_IN_STRCPY_CHK: | |
2580 | case BUILT_IN_STPCPY_CHK: | |
2581 | if (val[1] && is_gimple_val (val[1])) | |
5039610b | 2582 | result = fold_builtin_stxcpy_chk (callee, |
726a989a RB |
2583 | gimple_call_arg (stmt, 0), |
2584 | gimple_call_arg (stmt, 1), | |
2585 | gimple_call_arg (stmt, 2), | |
5039610b | 2586 | val[1], ignore, |
10a0d495 JJ |
2587 | DECL_FUNCTION_CODE (callee)); |
2588 | break; | |
2589 | ||
2590 | case BUILT_IN_STRNCPY_CHK: | |
2591 | if (val[2] && is_gimple_val (val[2])) | |
726a989a RB |
2592 | result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0), |
2593 | gimple_call_arg (stmt, 1), | |
2594 | gimple_call_arg (stmt, 2), | |
2595 | gimple_call_arg (stmt, 3), | |
5039610b | 2596 | val[2]); |
10a0d495 JJ |
2597 | break; |
2598 | ||
2599 | case BUILT_IN_SNPRINTF_CHK: | |
2600 | case BUILT_IN_VSNPRINTF_CHK: | |
2601 | if (val[1] && is_gimple_val (val[1])) | |
726a989a RB |
2602 | result = gimple_fold_builtin_snprintf_chk (stmt, val[1], |
2603 | DECL_FUNCTION_CODE (callee)); | |
a32e70c3 | 2604 | break; |
6de9cd9a DN |
2605 | |
2606 | default: | |
1e128c5f | 2607 | gcc_unreachable (); |
6de9cd9a DN |
2608 | } |
2609 | ||
a32e70c3 | 2610 | if (result && ignore) |
9675412f | 2611 | result = fold_ignored_result (result); |
a32e70c3 | 2612 | return result; |
6de9cd9a DN |
2613 | } |
2614 | ||
726a989a RB |
2615 | /* Attempt to fold an assignment statement pointed-to by SI. Returns a |
2616 | replacement rhs for the statement or NULL_TREE if no simplification | |
2617 | could be made. It is assumed that the operands have been previously | |
2618 | folded. */ | |
2619 | ||
2620 | static tree | |
2621 | fold_gimple_assign (gimple_stmt_iterator *si) | |
2622 | { | |
2623 | gimple stmt = gsi_stmt (*si); | |
2624 | enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
2625 | ||
2626 | tree result = NULL; | |
2627 | ||
2628 | switch (get_gimple_rhs_class (subcode)) | |
2629 | { | |
2630 | case GIMPLE_SINGLE_RHS: | |
2631 | { | |
2632 | tree rhs = gimple_assign_rhs1 (stmt); | |
2633 | ||
2634 | /* Try to fold a conditional expression. */ | |
2635 | if (TREE_CODE (rhs) == COND_EXPR) | |
2636 | { | |
2637 | tree temp = fold (COND_EXPR_COND (rhs)); | |
2638 | if (temp != COND_EXPR_COND (rhs)) | |
2639 | result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp, | |
2640 | COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs)); | |
2641 | } | |
2642 | ||
2643 | /* If we couldn't fold the RHS, hand over to the generic | |
2644 | fold routines. */ | |
2645 | if (result == NULL_TREE) | |
2646 | result = fold (rhs); | |
2647 | ||
2648 | /* Strip away useless type conversions. Both the NON_LVALUE_EXPR | |
2649 | that may have been added by fold, and "useless" type | |
2650 | conversions that might now be apparent due to propagation. */ | |
2651 | STRIP_USELESS_TYPE_CONVERSION (result); | |
2652 | ||
2653 | if (result != rhs && valid_gimple_rhs_p (result)) | |
2654 | return result; | |
2655 | else | |
2656 | /* It is possible that fold_stmt_r simplified the RHS. | |
2657 | Make sure that the subcode of this statement still | |
2658 | reflects the principal operator of the rhs operand. */ | |
2659 | return rhs; | |
2660 | } | |
2661 | break; | |
2662 | ||
2663 | case GIMPLE_UNARY_RHS: | |
2664 | result = fold_unary (subcode, | |
2665 | gimple_expr_type (stmt), | |
2666 | gimple_assign_rhs1 (stmt)); | |
2667 | ||
2668 | if (result) | |
2669 | { | |
2670 | STRIP_USELESS_TYPE_CONVERSION (result); | |
2671 | if (valid_gimple_rhs_p (result)) | |
2672 | return result; | |
2673 | } | |
2674 | else if ((gimple_assign_rhs_code (stmt) == NOP_EXPR | |
2675 | || gimple_assign_rhs_code (stmt) == CONVERT_EXPR) | |
2676 | && POINTER_TYPE_P (gimple_expr_type (stmt)) | |
2677 | && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt)))) | |
2678 | { | |
2679 | tree type = gimple_expr_type (stmt); | |
2680 | tree t = maybe_fold_offset_to_reference (gimple_assign_rhs1 (stmt), | |
2681 | integer_zero_node, | |
2682 | TREE_TYPE (type)); | |
2683 | if (t) | |
2684 | { | |
2685 | tree ptr_type = build_pointer_type (TREE_TYPE (t)); | |
2686 | if (useless_type_conversion_p (type, ptr_type)) | |
2687 | return build_fold_addr_expr_with_type (t, ptr_type); | |
2688 | } | |
2689 | } | |
2690 | break; | |
2691 | ||
2692 | case GIMPLE_BINARY_RHS: | |
2693 | /* Try to fold pointer addition. */ | |
2694 | if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) | |
2695 | result = maybe_fold_stmt_addition ( | |
2696 | TREE_TYPE (gimple_assign_lhs (stmt)), | |
2697 | gimple_assign_rhs1 (stmt), | |
2698 | gimple_assign_rhs2 (stmt)); | |
2699 | ||
2700 | if (!result) | |
2701 | result = fold_binary (subcode, | |
2702 | TREE_TYPE (gimple_assign_lhs (stmt)), | |
2703 | gimple_assign_rhs1 (stmt), | |
2704 | gimple_assign_rhs2 (stmt)); | |
2705 | ||
2706 | if (result) | |
2707 | { | |
2708 | STRIP_USELESS_TYPE_CONVERSION (result); | |
2709 | if (valid_gimple_rhs_p (result)) | |
2710 | return result; | |
2711 | } | |
2712 | break; | |
2713 | ||
2714 | case GIMPLE_INVALID_RHS: | |
2715 | gcc_unreachable (); | |
2716 | } | |
2717 | ||
2718 | return NULL_TREE; | |
2719 | } | |
2720 | ||
2721 | /* Attempt to fold a conditional statement. Return true if any changes were | |
2722 | made. We only attempt to fold the condition expression, and do not perform | |
2723 | any transformation that would require alteration of the cfg. It is | |
2724 | assumed that the operands have been previously folded. */ | |
2725 | ||
2726 | static bool | |
2727 | fold_gimple_cond (gimple stmt) | |
2728 | { | |
2729 | tree result = fold_binary (gimple_cond_code (stmt), | |
2730 | boolean_type_node, | |
2731 | gimple_cond_lhs (stmt), | |
2732 | gimple_cond_rhs (stmt)); | |
2733 | ||
2734 | if (result) | |
2735 | { | |
2736 | STRIP_USELESS_TYPE_CONVERSION (result); | |
2737 | if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result)) | |
2738 | { | |
2739 | gimple_cond_set_condition_from_tree (stmt, result); | |
2740 | return true; | |
2741 | } | |
2742 | } | |
2743 | ||
2744 | return false; | |
2745 | } | |
2746 | ||
2747 | ||
2748 | /* Attempt to fold a call statement referenced by the statement iterator GSI. | |
2749 | The statement may be replaced by another statement, e.g., if the call | |
2750 | simplifies to a constant value. Return true if any changes were made. | |
2751 | It is assumed that the operands have been previously folded. */ | |
2752 | ||
2753 | static bool | |
2754 | fold_gimple_call (gimple_stmt_iterator *gsi) | |
2755 | { | |
2756 | gimple stmt = gsi_stmt (*gsi); | |
2757 | ||
2758 | tree callee = gimple_call_fndecl (stmt); | |
2759 | ||
2760 | /* Check for builtins that CCP can handle using information not | |
2761 | available in the generic fold routines. */ | |
2762 | if (callee && DECL_BUILT_IN (callee)) | |
2763 | { | |
2764 | tree result = ccp_fold_builtin (stmt); | |
2765 | ||
2766 | if (result) | |
2767 | return update_call_from_tree (gsi, result); | |
2768 | } | |
2769 | else | |
2770 | { | |
2771 | /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve | |
2772 | here are when we've propagated the address of a decl into the | |
2773 | object slot. */ | |
2774 | /* ??? Should perhaps do this in fold proper. However, doing it | |
2775 | there requires that we create a new CALL_EXPR, and that requires | |
2776 | copying EH region info to the new node. Easier to just do it | |
2777 | here where we can just smash the call operand. */ | |
2778 | /* ??? Is there a good reason not to do this in fold_stmt_inplace? */ | |
2779 | callee = gimple_call_fn (stmt); | |
2780 | if (TREE_CODE (callee) == OBJ_TYPE_REF | |
2781 | && lang_hooks.fold_obj_type_ref | |
2782 | && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR | |
2783 | && DECL_P (TREE_OPERAND | |
2784 | (OBJ_TYPE_REF_OBJECT (callee), 0))) | |
2785 | { | |
2786 | tree t; | |
2787 | ||
2788 | /* ??? Caution: Broken ADDR_EXPR semantics means that | |
2789 | looking at the type of the operand of the addr_expr | |
2790 | can yield an array type. See silly exception in | |
2791 | check_pointer_types_r. */ | |
2792 | t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee))); | |
2793 | t = lang_hooks.fold_obj_type_ref (callee, t); | |
2794 | if (t) | |
2795 | { | |
2796 | gimple_call_set_fn (stmt, t); | |
2797 | return true; | |
2798 | } | |
2799 | } | |
2800 | } | |
2801 | ||
2802 | return false; | |
2803 | } | |
6de9cd9a | 2804 | |
726a989a | 2805 | /* Fold the statement pointed to by GSI. In some cases, this function may |
750628d8 DN |
2806 | replace the whole statement with a new one. Returns true iff folding |
2807 | makes any changes. */ | |
6de9cd9a | 2808 | |
750628d8 | 2809 | bool |
726a989a | 2810 | fold_stmt (gimple_stmt_iterator *gsi) |
6de9cd9a | 2811 | { |
726a989a | 2812 | tree res; |
622f91ba | 2813 | struct fold_stmt_r_data fold_stmt_r_data; |
726a989a RB |
2814 | struct walk_stmt_info wi; |
2815 | ||
750628d8 | 2816 | bool changed = false; |
622f91ba JL |
2817 | bool inside_addr_expr = false; |
2818 | ||
726a989a | 2819 | gimple stmt = gsi_stmt (*gsi); |
6ac01510 ILT |
2820 | |
2821 | fold_stmt_r_data.stmt = stmt; | |
622f91ba JL |
2822 | fold_stmt_r_data.changed_p = &changed; |
2823 | fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr; | |
6de9cd9a | 2824 | |
726a989a RB |
2825 | memset (&wi, 0, sizeof (wi)); |
2826 | wi.info = &fold_stmt_r_data; | |
6de9cd9a | 2827 | |
726a989a RB |
2828 | /* Fold the individual operands. |
2829 | For example, fold instances of *&VAR into VAR, etc. */ | |
2830 | res = walk_gimple_op (stmt, fold_stmt_r, &wi); | |
2831 | gcc_assert (!res); | |
6de9cd9a | 2832 | |
726a989a RB |
2833 | /* Fold the main computation performed by the statement. */ |
2834 | switch (gimple_code (stmt)) | |
6de9cd9a | 2835 | { |
726a989a RB |
2836 | case GIMPLE_ASSIGN: |
2837 | { | |
2838 | tree new_rhs = fold_gimple_assign (gsi); | |
2839 | if (new_rhs != NULL_TREE) | |
2840 | { | |
2841 | gimple_assign_set_rhs_from_tree (gsi, new_rhs); | |
2842 | changed = true; | |
2843 | } | |
2844 | stmt = gsi_stmt (*gsi); | |
2845 | break; | |
2846 | } | |
2847 | case GIMPLE_COND: | |
2848 | changed |= fold_gimple_cond (stmt); | |
2849 | break; | |
2850 | case GIMPLE_CALL: | |
2851 | /* The entire statement may be replaced in this case. */ | |
2852 | changed |= fold_gimple_call (gsi); | |
2853 | break; | |
1809ff6b | 2854 | |
726a989a RB |
2855 | default: |
2856 | return changed; | |
2857 | break; | |
f255541f | 2858 | } |
6de9cd9a | 2859 | |
750628d8 | 2860 | return changed; |
6de9cd9a DN |
2861 | } |
2862 | ||
38965eb2 ZD |
2863 | /* Perform the minimal folding on statement STMT. Only operations like |
2864 | *&x created by constant propagation are handled. The statement cannot | |
726a989a RB |
2865 | be replaced with a new one. Return true if the statement was |
2866 | changed, false otherwise. */ | |
38965eb2 ZD |
2867 | |
2868 | bool | |
726a989a | 2869 | fold_stmt_inplace (gimple stmt) |
38965eb2 | 2870 | { |
726a989a | 2871 | tree res; |
622f91ba | 2872 | struct fold_stmt_r_data fold_stmt_r_data; |
726a989a RB |
2873 | struct walk_stmt_info wi; |
2874 | gimple_stmt_iterator si; | |
2875 | ||
38965eb2 | 2876 | bool changed = false; |
622f91ba JL |
2877 | bool inside_addr_expr = false; |
2878 | ||
6ac01510 | 2879 | fold_stmt_r_data.stmt = stmt; |
622f91ba JL |
2880 | fold_stmt_r_data.changed_p = &changed; |
2881 | fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr; | |
38965eb2 | 2882 | |
726a989a RB |
2883 | memset (&wi, 0, sizeof (wi)); |
2884 | wi.info = &fold_stmt_r_data; | |
2885 | ||
2886 | /* Fold the individual operands. | |
2887 | For example, fold instances of *&VAR into VAR, etc. | |
38965eb2 | 2888 | |
726a989a RB |
2889 | It appears that, at one time, maybe_fold_stmt_indirect |
2890 | would cause the walk to return non-null in order to | |
2891 | signal that the entire statement should be replaced with | |
2892 | a call to _builtin_trap. This functionality is currently | |
2893 | disabled, as noted in a FIXME, and cannot be supported here. */ | |
2894 | res = walk_gimple_op (stmt, fold_stmt_r, &wi); | |
2895 | gcc_assert (!res); | |
38965eb2 | 2896 | |
726a989a RB |
2897 | /* Fold the main computation performed by the statement. */ |
2898 | switch (gimple_code (stmt)) | |
2899 | { | |
2900 | case GIMPLE_ASSIGN: | |
2901 | { | |
2902 | unsigned old_num_ops; | |
2903 | tree new_rhs; | |
2904 | old_num_ops = gimple_num_ops (stmt); | |
2905 | si = gsi_for_stmt (stmt); | |
2906 | new_rhs = fold_gimple_assign (&si); | |
2907 | if (new_rhs != NULL_TREE | |
2908 | && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops) | |
2909 | { | |
2910 | gimple_assign_set_rhs_from_tree (&si, new_rhs); | |
2911 | changed = true; | |
2912 | } | |
2913 | gcc_assert (gsi_stmt (si) == stmt); | |
2914 | break; | |
2915 | } | |
2916 | case GIMPLE_COND: | |
2917 | changed |= fold_gimple_cond (stmt); | |
2918 | break; | |
38965eb2 | 2919 | |
726a989a RB |
2920 | default: |
2921 | break; | |
2922 | } | |
38965eb2 ZD |
2923 | |
2924 | return changed; | |
2925 | } | |
726a989a | 2926 | |
cb8e078d JJ |
2927 | /* Try to optimize out __builtin_stack_restore. Optimize it out |
2928 | if there is another __builtin_stack_restore in the same basic | |
2929 | block and no calls or ASM_EXPRs are in between, or if this block's | |
2930 | only outgoing edge is to EXIT_BLOCK and there are no calls or | |
2931 | ASM_EXPRs after this __builtin_stack_restore. */ | |
2932 | ||
2933 | static tree | |
726a989a | 2934 | optimize_stack_restore (gimple_stmt_iterator i) |
cb8e078d | 2935 | { |
726a989a RB |
2936 | tree callee, rhs; |
2937 | gimple stmt, stack_save; | |
2938 | gimple_stmt_iterator stack_save_gsi; | |
2939 | ||
2940 | basic_block bb = gsi_bb (i); | |
2941 | gimple call = gsi_stmt (i); | |
cb8e078d | 2942 | |
726a989a RB |
2943 | if (gimple_code (call) != GIMPLE_CALL |
2944 | || gimple_call_num_args (call) != 1 | |
2945 | || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME | |
2946 | || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0)))) | |
cb8e078d JJ |
2947 | return NULL_TREE; |
2948 | ||
726a989a | 2949 | for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i)) |
cb8e078d | 2950 | { |
726a989a RB |
2951 | stmt = gsi_stmt (i); |
2952 | if (gimple_code (stmt) == GIMPLE_ASM) | |
cb8e078d | 2953 | return NULL_TREE; |
726a989a | 2954 | if (gimple_code (stmt) != GIMPLE_CALL) |
cb8e078d JJ |
2955 | continue; |
2956 | ||
726a989a | 2957 | callee = gimple_call_fndecl (stmt); |
cb8e078d JJ |
2958 | if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) |
2959 | return NULL_TREE; | |
2960 | ||
2961 | if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE) | |
2962 | break; | |
2963 | } | |
2964 | ||
726a989a | 2965 | if (gsi_end_p (i) |
cb8e078d JJ |
2966 | && (! single_succ_p (bb) |
2967 | || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)) | |
2968 | return NULL_TREE; | |
2969 | ||
726a989a RB |
2970 | stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0)); |
2971 | if (gimple_code (stack_save) != GIMPLE_CALL | |
2972 | || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0) | |
2973 | || stmt_could_throw_p (stack_save) | |
2974 | || !has_single_use (gimple_call_arg (call, 0))) | |
cb8e078d JJ |
2975 | return NULL_TREE; |
2976 | ||
726a989a | 2977 | callee = gimple_call_fndecl (stack_save); |
cb8e078d JJ |
2978 | if (!callee |
2979 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL | |
2980 | || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE | |
726a989a | 2981 | || gimple_call_num_args (stack_save) != 0) |
cb8e078d JJ |
2982 | return NULL_TREE; |
2983 | ||
726a989a RB |
2984 | stack_save_gsi = gsi_for_stmt (stack_save); |
2985 | push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi)); | |
2986 | rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0); | |
2987 | if (!update_call_from_tree (&stack_save_gsi, rhs)) | |
cb8e078d | 2988 | { |
726a989a | 2989 | discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi)); |
cb8e078d JJ |
2990 | return NULL_TREE; |
2991 | } | |
726a989a | 2992 | pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi)); |
cb8e078d | 2993 | |
726a989a | 2994 | /* No effect, so the statement will be deleted. */ |
cb8e078d JJ |
2995 | return integer_zero_node; |
2996 | } | |
726a989a | 2997 | |
d7bd8aeb JJ |
2998 | /* If va_list type is a simple pointer and nothing special is needed, |
2999 | optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0), | |
3000 | __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple | |
3001 | pointer assignment. */ | |
3002 | ||
3003 | static tree | |
726a989a | 3004 | optimize_stdarg_builtin (gimple call) |
d7bd8aeb | 3005 | { |
35cbb299 | 3006 | tree callee, lhs, rhs, cfun_va_list; |
d7bd8aeb JJ |
3007 | bool va_list_simple_ptr; |
3008 | ||
726a989a | 3009 | if (gimple_code (call) != GIMPLE_CALL) |
d7bd8aeb JJ |
3010 | return NULL_TREE; |
3011 | ||
726a989a | 3012 | callee = gimple_call_fndecl (call); |
35cbb299 KT |
3013 | |
3014 | cfun_va_list = targetm.fn_abi_va_list (callee); | |
3015 | va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) | |
3016 | && (TREE_TYPE (cfun_va_list) == void_type_node | |
3017 | || TREE_TYPE (cfun_va_list) == char_type_node); | |
3018 | ||
d7bd8aeb JJ |
3019 | switch (DECL_FUNCTION_CODE (callee)) |
3020 | { | |
3021 | case BUILT_IN_VA_START: | |
3022 | if (!va_list_simple_ptr | |
3023 | || targetm.expand_builtin_va_start != NULL | |
726a989a | 3024 | || built_in_decls[BUILT_IN_NEXT_ARG] == NULL) |
d7bd8aeb JJ |
3025 | return NULL_TREE; |
3026 | ||
726a989a | 3027 | if (gimple_call_num_args (call) != 2) |
d7bd8aeb JJ |
3028 | return NULL_TREE; |
3029 | ||
726a989a | 3030 | lhs = gimple_call_arg (call, 0); |
d7bd8aeb JJ |
3031 | if (!POINTER_TYPE_P (TREE_TYPE (lhs)) |
3032 | || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) | |
35cbb299 | 3033 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
d7bd8aeb | 3034 | return NULL_TREE; |
726a989a | 3035 | |
d7bd8aeb JJ |
3036 | lhs = build_fold_indirect_ref (lhs); |
3037 | rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG], | |
726a989a | 3038 | 1, integer_zero_node); |
d7bd8aeb JJ |
3039 | rhs = fold_convert (TREE_TYPE (lhs), rhs); |
3040 | return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); | |
3041 | ||
3042 | case BUILT_IN_VA_COPY: | |
3043 | if (!va_list_simple_ptr) | |
3044 | return NULL_TREE; | |
3045 | ||
726a989a | 3046 | if (gimple_call_num_args (call) != 2) |
d7bd8aeb JJ |
3047 | return NULL_TREE; |
3048 | ||
726a989a | 3049 | lhs = gimple_call_arg (call, 0); |
d7bd8aeb JJ |
3050 | if (!POINTER_TYPE_P (TREE_TYPE (lhs)) |
3051 | || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) | |
35cbb299 | 3052 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
d7bd8aeb JJ |
3053 | return NULL_TREE; |
3054 | ||
3055 | lhs = build_fold_indirect_ref (lhs); | |
726a989a | 3056 | rhs = gimple_call_arg (call, 1); |
d7bd8aeb | 3057 | if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs)) |
35cbb299 | 3058 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
d7bd8aeb JJ |
3059 | return NULL_TREE; |
3060 | ||
3061 | rhs = fold_convert (TREE_TYPE (lhs), rhs); | |
3062 | return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); | |
3063 | ||
3064 | case BUILT_IN_VA_END: | |
726a989a | 3065 | /* No effect, so the statement will be deleted. */ |
d7bd8aeb JJ |
3066 | return integer_zero_node; |
3067 | ||
3068 | default: | |
3069 | gcc_unreachable (); | |
3070 | } | |
3071 | } | |
726a989a | 3072 | |
b28b1600 JJ |
3073 | /* Convert EXPR into a GIMPLE value suitable for substitution on the |
3074 | RHS of an assignment. Insert the necessary statements before | |
726a989a RB |
3075 | iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL |
3076 | is replaced. If the call is expected to produces a result, then it | |
3077 | is replaced by an assignment of the new RHS to the result variable. | |
3078 | If the result is to be ignored, then the call is replaced by a | |
3079 | GIMPLE_NOP. */ | |
b28b1600 | 3080 | |
726a989a RB |
3081 | static void |
3082 | gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) | |
b28b1600 | 3083 | { |
726a989a RB |
3084 | tree lhs; |
3085 | tree tmp = NULL_TREE; /* Silence warning. */ | |
3086 | gimple stmt, new_stmt; | |
3087 | gimple_stmt_iterator i; | |
3088 | gimple_seq stmts = gimple_seq_alloc(); | |
d406b663 | 3089 | struct gimplify_ctx gctx; |
b28b1600 | 3090 | |
726a989a RB |
3091 | stmt = gsi_stmt (*si_p); |
3092 | ||
3093 | gcc_assert (is_gimple_call (stmt)); | |
3094 | ||
3095 | lhs = gimple_call_lhs (stmt); | |
3096 | ||
d406b663 | 3097 | push_gimplify_context (&gctx); |
726a989a RB |
3098 | |
3099 | if (lhs == NULL_TREE) | |
3100 | gimplify_and_add (expr, &stmts); | |
3101 | else | |
2e929cf3 | 3102 | tmp = get_initialized_tmp_var (expr, &stmts, NULL); |
726a989a | 3103 | |
b28b1600 JJ |
3104 | pop_gimplify_context (NULL); |
3105 | ||
726a989a RB |
3106 | if (gimple_has_location (stmt)) |
3107 | annotate_all_with_location (stmts, gimple_location (stmt)); | |
f47c96aa | 3108 | |
b28b1600 | 3109 | /* The replacement can expose previously unreferenced variables. */ |
726a989a RB |
3110 | for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i)) |
3111 | { | |
3112 | new_stmt = gsi_stmt (i); | |
3113 | find_new_referenced_vars (new_stmt); | |
3114 | gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT); | |
3115 | mark_symbols_for_renaming (new_stmt); | |
3116 | gsi_next (si_p); | |
3117 | } | |
3118 | ||
3119 | if (lhs == NULL_TREE) | |
3120 | new_stmt = gimple_build_nop (); | |
3121 | else | |
b28b1600 | 3122 | { |
726a989a RB |
3123 | new_stmt = gimple_build_assign (lhs, tmp); |
3124 | copy_virtual_operands (new_stmt, stmt); | |
3125 | move_ssa_defining_stmt_for_defs (new_stmt, stmt); | |
b28b1600 JJ |
3126 | } |
3127 | ||
726a989a RB |
3128 | gimple_set_location (new_stmt, gimple_location (stmt)); |
3129 | gsi_replace (si_p, new_stmt, false); | |
b28b1600 JJ |
3130 | } |
3131 | ||
6de9cd9a DN |
3132 | /* A simple pass that attempts to fold all builtin functions. This pass |
3133 | is run after we've propagated as many constants as we can. */ | |
3134 | ||
c2924966 | 3135 | static unsigned int |
6de9cd9a DN |
3136 | execute_fold_all_builtins (void) |
3137 | { | |
a7d6ba24 | 3138 | bool cfg_changed = false; |
6de9cd9a | 3139 | basic_block bb; |
7b0e48fb DB |
3140 | unsigned int todoflags = 0; |
3141 | ||
6de9cd9a DN |
3142 | FOR_EACH_BB (bb) |
3143 | { | |
726a989a RB |
3144 | gimple_stmt_iterator i; |
3145 | for (i = gsi_start_bb (bb); !gsi_end_p (i); ) | |
6de9cd9a | 3146 | { |
726a989a | 3147 | gimple stmt, old_stmt; |
6de9cd9a | 3148 | tree callee, result; |
10a0d495 | 3149 | enum built_in_function fcode; |
6de9cd9a | 3150 | |
726a989a RB |
3151 | stmt = gsi_stmt (i); |
3152 | ||
3153 | if (gimple_code (stmt) != GIMPLE_CALL) | |
10a0d495 | 3154 | { |
726a989a | 3155 | gsi_next (&i); |
10a0d495 JJ |
3156 | continue; |
3157 | } | |
726a989a | 3158 | callee = gimple_call_fndecl (stmt); |
6de9cd9a | 3159 | if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) |
10a0d495 | 3160 | { |
726a989a | 3161 | gsi_next (&i); |
10a0d495 JJ |
3162 | continue; |
3163 | } | |
3164 | fcode = DECL_FUNCTION_CODE (callee); | |
6de9cd9a | 3165 | |
726a989a | 3166 | result = ccp_fold_builtin (stmt); |
53a8f709 UB |
3167 | |
3168 | if (result) | |
726a989a | 3169 | gimple_remove_stmt_histograms (cfun, stmt); |
53a8f709 | 3170 | |
6de9cd9a DN |
3171 | if (!result) |
3172 | switch (DECL_FUNCTION_CODE (callee)) | |
3173 | { | |
3174 | case BUILT_IN_CONSTANT_P: | |
3175 | /* Resolve __builtin_constant_p. If it hasn't been | |
3176 | folded to integer_one_node by now, it's fairly | |
3177 | certain that the value simply isn't constant. */ | |
726a989a | 3178 | result = integer_zero_node; |
6de9cd9a DN |
3179 | break; |
3180 | ||
cb8e078d | 3181 | case BUILT_IN_STACK_RESTORE: |
726a989a | 3182 | result = optimize_stack_restore (i); |
d7bd8aeb JJ |
3183 | if (result) |
3184 | break; | |
726a989a | 3185 | gsi_next (&i); |
d7bd8aeb JJ |
3186 | continue; |
3187 | ||
3188 | case BUILT_IN_VA_START: | |
3189 | case BUILT_IN_VA_END: | |
3190 | case BUILT_IN_VA_COPY: | |
3191 | /* These shouldn't be folded before pass_stdarg. */ | |
726a989a | 3192 | result = optimize_stdarg_builtin (stmt); |
cb8e078d JJ |
3193 | if (result) |
3194 | break; | |
3195 | /* FALLTHRU */ | |
3196 | ||
6de9cd9a | 3197 | default: |
726a989a | 3198 | gsi_next (&i); |
6de9cd9a DN |
3199 | continue; |
3200 | } | |
3201 | ||
3202 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3203 | { | |
3204 | fprintf (dump_file, "Simplified\n "); | |
726a989a | 3205 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
6de9cd9a DN |
3206 | } |
3207 | ||
726a989a RB |
3208 | old_stmt = stmt; |
3209 | push_stmt_changes (gsi_stmt_ptr (&i)); | |
cfaab3a9 | 3210 | |
726a989a RB |
3211 | if (!update_call_from_tree (&i, result)) |
3212 | { | |
3213 | gimplify_and_update_call_from_tree (&i, result); | |
3214 | todoflags |= TODO_rebuild_alias; | |
3215 | } | |
cfaab3a9 | 3216 | |
726a989a RB |
3217 | stmt = gsi_stmt (i); |
3218 | pop_stmt_changes (gsi_stmt_ptr (&i)); | |
cfaab3a9 | 3219 | |
726a989a RB |
3220 | if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt) |
3221 | && gimple_purge_dead_eh_edges (bb)) | |
a7d6ba24 | 3222 | cfg_changed = true; |
6de9cd9a DN |
3223 | |
3224 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3225 | { | |
3226 | fprintf (dump_file, "to\n "); | |
726a989a | 3227 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
6de9cd9a DN |
3228 | fprintf (dump_file, "\n"); |
3229 | } | |
10a0d495 JJ |
3230 | |
3231 | /* Retry the same statement if it changed into another | |
3232 | builtin, there might be new opportunities now. */ | |
726a989a | 3233 | if (gimple_code (stmt) != GIMPLE_CALL) |
10a0d495 | 3234 | { |
726a989a | 3235 | gsi_next (&i); |
10a0d495 JJ |
3236 | continue; |
3237 | } | |
726a989a | 3238 | callee = gimple_call_fndecl (stmt); |
10a0d495 | 3239 | if (!callee |
726a989a | 3240 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL |
10a0d495 | 3241 | || DECL_FUNCTION_CODE (callee) == fcode) |
726a989a | 3242 | gsi_next (&i); |
6de9cd9a DN |
3243 | } |
3244 | } | |
7b0e48fb | 3245 | |
a7d6ba24 | 3246 | /* Delete unreachable blocks. */ |
7b0e48fb DB |
3247 | if (cfg_changed) |
3248 | todoflags |= TODO_cleanup_cfg; | |
3249 | ||
3250 | return todoflags; | |
6de9cd9a DN |
3251 | } |
3252 | ||
750628d8 | 3253 | |
8ddbbcae | 3254 | struct gimple_opt_pass pass_fold_builtins = |
6de9cd9a | 3255 | { |
8ddbbcae JH |
3256 | { |
3257 | GIMPLE_PASS, | |
6de9cd9a DN |
3258 | "fab", /* name */ |
3259 | NULL, /* gate */ | |
3260 | execute_fold_all_builtins, /* execute */ | |
3261 | NULL, /* sub */ | |
3262 | NULL, /* next */ | |
3263 | 0, /* static_pass_number */ | |
3264 | 0, /* tv_id */ | |
7faade0f | 3265 | PROP_cfg | PROP_ssa, /* properties_required */ |
6de9cd9a DN |
3266 | 0, /* properties_provided */ |
3267 | 0, /* properties_destroyed */ | |
3268 | 0, /* todo_flags_start */ | |
b28b1600 JJ |
3269 | TODO_dump_func |
3270 | | TODO_verify_ssa | |
8ddbbcae JH |
3271 | | TODO_update_ssa /* todo_flags_finish */ |
3272 | } | |
6de9cd9a | 3273 | }; |