]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* Conditional constant propagation pass for the GNU compiler. |
add6ee5e | 2 | Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 |
000657b5 | 3 | Free Software Foundation, Inc. |
4ee9c684 | 4 | Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org> |
5 | Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com> | |
6 | ||
7 | This file is part of GCC. | |
8 | ||
9 | GCC is free software; you can redistribute it and/or modify it | |
10 | under the terms of the GNU General Public License as published by the | |
8c4c00c1 | 11 | Free Software Foundation; either version 3, or (at your option) any |
4ee9c684 | 12 | later version. |
13 | ||
14 | GCC is distributed in the hope that it will be useful, but WITHOUT | |
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
17 | for more details. | |
18 | ||
19 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 22 | |
88dbf20f | 23 | /* Conditional constant propagation (CCP) is based on the SSA |
24 | propagation engine (tree-ssa-propagate.c). Constant assignments of | |
25 | the form VAR = CST are propagated from the assignments into uses of | |
26 | VAR, which in turn may generate new constants. The simulation uses | |
27 | a four level lattice to keep track of constant values associated | |
28 | with SSA names. Given an SSA name V_i, it may take one of the | |
29 | following values: | |
30 | ||
bfa30570 | 31 | UNINITIALIZED -> the initial state of the value. This value |
32 | is replaced with a correct initial value | |
33 | the first time the value is used, so the | |
34 | rest of the pass does not need to care about | |
35 | it. Using this value simplifies initialization | |
36 | of the pass, and prevents us from needlessly | |
37 | scanning statements that are never reached. | |
88dbf20f | 38 | |
39 | UNDEFINED -> V_i is a local variable whose definition | |
40 | has not been processed yet. Therefore we | |
41 | don't yet know if its value is a constant | |
42 | or not. | |
43 | ||
44 | CONSTANT -> V_i has been found to hold a constant | |
45 | value C. | |
46 | ||
47 | VARYING -> V_i cannot take a constant value, or if it | |
48 | does, it is not possible to determine it | |
49 | at compile time. | |
50 | ||
51 | The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node: | |
52 | ||
53 | 1- In ccp_visit_stmt, we are interested in assignments whose RHS | |
54 | evaluates into a constant and conditional jumps whose predicate | |
55 | evaluates into a boolean true or false. When an assignment of | |
56 | the form V_i = CONST is found, V_i's lattice value is set to | |
57 | CONSTANT and CONST is associated with it. This causes the | |
58 | propagation engine to add all the SSA edges coming out the | |
59 | assignment into the worklists, so that statements that use V_i | |
60 | can be visited. | |
61 | ||
62 | If the statement is a conditional with a constant predicate, we | |
63 | mark the outgoing edges as executable or not executable | |
64 | depending on the predicate's value. This is then used when | |
65 | visiting PHI nodes to know when a PHI argument can be ignored. | |
66 | ||
67 | ||
68 | 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the | |
69 | same constant C, then the LHS of the PHI is set to C. This | |
70 | evaluation is known as the "meet operation". Since one of the | |
71 | goals of this evaluation is to optimistically return constant | |
72 | values as often as possible, it uses two main short cuts: | |
73 | ||
74 | - If an argument is flowing in through a non-executable edge, it | |
75 | is ignored. This is useful in cases like this: | |
76 | ||
77 | if (PRED) | |
78 | a_9 = 3; | |
79 | else | |
80 | a_10 = 100; | |
81 | a_11 = PHI (a_9, a_10) | |
82 | ||
83 | If PRED is known to always evaluate to false, then we can | |
84 | assume that a_11 will always take its value from a_10, meaning | |
85 | that instead of consider it VARYING (a_9 and a_10 have | |
86 | different values), we can consider it CONSTANT 100. | |
87 | ||
88 | - If an argument has an UNDEFINED value, then it does not affect | |
89 | the outcome of the meet operation. If a variable V_i has an | |
90 | UNDEFINED value, it means that either its defining statement | |
91 | hasn't been visited yet or V_i has no defining statement, in | |
92 | which case the original symbol 'V' is being used | |
93 | uninitialized. Since 'V' is a local variable, the compiler | |
94 | may assume any initial value for it. | |
95 | ||
96 | ||
97 | After propagation, every variable V_i that ends up with a lattice | |
98 | value of CONSTANT will have the associated constant value in the | |
99 | array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for | |
100 | final substitution and folding. | |
101 | ||
102 | ||
103 | Constant propagation in stores and loads (STORE-CCP) | |
104 | ---------------------------------------------------- | |
105 | ||
106 | While CCP has all the logic to propagate constants in GIMPLE | |
107 | registers, it is missing the ability to associate constants with | |
108 | stores and loads (i.e., pointer dereferences, structures and | |
109 | global/aliased variables). We don't keep loads and stores in | |
110 | SSA, but we do build a factored use-def web for them (in the | |
111 | virtual operands). | |
112 | ||
113 | For instance, consider the following code fragment: | |
114 | ||
115 | struct A a; | |
116 | const int B = 42; | |
117 | ||
118 | void foo (int i) | |
119 | { | |
120 | if (i > 10) | |
121 | a.a = 42; | |
122 | else | |
123 | { | |
124 | a.b = 21; | |
125 | a.a = a.b + 21; | |
126 | } | |
127 | ||
128 | if (a.a != B) | |
129 | never_executed (); | |
130 | } | |
131 | ||
132 | We should be able to deduce that the predicate 'a.a != B' is always | |
133 | false. To achieve this, we associate constant values to the SSA | |
4fb5e5ca | 134 | names in the VDEF operands for each store. Additionally, |
135 | since we also glob partial loads/stores with the base symbol, we | |
136 | also keep track of the memory reference where the constant value | |
137 | was stored (in the MEM_REF field of PROP_VALUE_T). For instance, | |
88dbf20f | 138 | |
4fb5e5ca | 139 | # a_5 = VDEF <a_4> |
88dbf20f | 140 | a.a = 2; |
141 | ||
142 | # VUSE <a_5> | |
143 | x_3 = a.b; | |
144 | ||
145 | In the example above, CCP will associate value '2' with 'a_5', but | |
146 | it would be wrong to replace the load from 'a.b' with '2', because | |
147 | '2' had been stored into a.a. | |
148 | ||
bfa30570 | 149 | Note that the initial value of virtual operands is VARYING, not |
150 | UNDEFINED. Consider, for instance global variables: | |
88dbf20f | 151 | |
152 | int A; | |
153 | ||
154 | foo (int i) | |
155 | { | |
156 | if (i_3 > 10) | |
157 | A_4 = 3; | |
158 | # A_5 = PHI (A_4, A_2); | |
159 | ||
160 | # VUSE <A_5> | |
161 | A.0_6 = A; | |
162 | ||
163 | return A.0_6; | |
164 | } | |
165 | ||
166 | The value of A_2 cannot be assumed to be UNDEFINED, as it may have | |
167 | been defined outside of foo. If we were to assume it UNDEFINED, we | |
bfa30570 | 168 | would erroneously optimize the above into 'return 3;'. |
88dbf20f | 169 | |
170 | Though STORE-CCP is not too expensive, it does have to do more work | |
171 | than regular CCP, so it is only enabled at -O2. Both regular CCP | |
172 | and STORE-CCP use the exact same algorithm. The only distinction | |
173 | is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is | |
174 | set to true. This affects the evaluation of statements and PHI | |
175 | nodes. | |
4ee9c684 | 176 | |
177 | References: | |
178 | ||
179 | Constant propagation with conditional branches, | |
180 | Wegman and Zadeck, ACM TOPLAS 13(2):181-210. | |
181 | ||
182 | Building an Optimizing Compiler, | |
183 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
184 | ||
185 | Advanced Compiler Design and Implementation, | |
186 | Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */ | |
187 | ||
188 | #include "config.h" | |
189 | #include "system.h" | |
190 | #include "coretypes.h" | |
191 | #include "tm.h" | |
4ee9c684 | 192 | #include "tree.h" |
41511585 | 193 | #include "flags.h" |
4ee9c684 | 194 | #include "rtl.h" |
195 | #include "tm_p.h" | |
41511585 | 196 | #include "ggc.h" |
4ee9c684 | 197 | #include "basic-block.h" |
41511585 | 198 | #include "output.h" |
41511585 | 199 | #include "expr.h" |
200 | #include "function.h" | |
4ee9c684 | 201 | #include "diagnostic.h" |
41511585 | 202 | #include "timevar.h" |
4ee9c684 | 203 | #include "tree-dump.h" |
41511585 | 204 | #include "tree-flow.h" |
4ee9c684 | 205 | #include "tree-pass.h" |
41511585 | 206 | #include "tree-ssa-propagate.h" |
207 | #include "langhooks.h" | |
8782adcf | 208 | #include "target.h" |
add6ee5e | 209 | #include "toplev.h" |
4ee9c684 | 210 | |
211 | ||
212 | /* Possible lattice values. */ | |
213 | typedef enum | |
214 | { | |
bfa30570 | 215 | UNINITIALIZED, |
4ee9c684 | 216 | UNDEFINED, |
217 | CONSTANT, | |
218 | VARYING | |
88dbf20f | 219 | } ccp_lattice_t; |
4ee9c684 | 220 | |
88dbf20f | 221 | /* Array of propagated constant values. After propagation, |
222 | CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If | |
223 | the constant is held in an SSA name representing a memory store | |
4fb5e5ca | 224 | (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual |
225 | memory reference used to store (i.e., the LHS of the assignment | |
226 | doing the store). */ | |
20140406 | 227 | static prop_value_t *const_val; |
4ee9c684 | 228 | |
88dbf20f | 229 | /* True if we are also propagating constants in stores and loads. */ |
230 | static bool do_store_ccp; | |
4ee9c684 | 231 | |
88dbf20f | 232 | /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */ |
01406fc0 | 233 | |
234 | static void | |
88dbf20f | 235 | dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val) |
01406fc0 | 236 | { |
41511585 | 237 | switch (val.lattice_val) |
01406fc0 | 238 | { |
88dbf20f | 239 | case UNINITIALIZED: |
240 | fprintf (outf, "%sUNINITIALIZED", prefix); | |
241 | break; | |
41511585 | 242 | case UNDEFINED: |
243 | fprintf (outf, "%sUNDEFINED", prefix); | |
244 | break; | |
245 | case VARYING: | |
246 | fprintf (outf, "%sVARYING", prefix); | |
247 | break; | |
41511585 | 248 | case CONSTANT: |
249 | fprintf (outf, "%sCONSTANT ", prefix); | |
88dbf20f | 250 | print_generic_expr (outf, val.value, dump_flags); |
41511585 | 251 | break; |
252 | default: | |
8c0963c4 | 253 | gcc_unreachable (); |
41511585 | 254 | } |
01406fc0 | 255 | } |
4ee9c684 | 256 | |
4ee9c684 | 257 | |
88dbf20f | 258 | /* Print lattice value VAL to stderr. */ |
259 | ||
260 | void debug_lattice_value (prop_value_t val); | |
261 | ||
262 | void | |
263 | debug_lattice_value (prop_value_t val) | |
264 | { | |
265 | dump_lattice_value (stderr, "", val); | |
266 | fprintf (stderr, "\n"); | |
267 | } | |
4ee9c684 | 268 | |
4ee9c684 | 269 | |
d03bd588 | 270 | /* The regular is_gimple_min_invariant does a shallow test of the object. |
271 | It assumes that full gimplification has happened, or will happen on the | |
272 | object. For a value coming from DECL_INITIAL, this is not true, so we | |
191ec5a2 | 273 | have to be more strict ourselves. */ |
d03bd588 | 274 | |
275 | static bool | |
276 | ccp_decl_initial_min_invariant (tree t) | |
277 | { | |
278 | if (!is_gimple_min_invariant (t)) | |
279 | return false; | |
280 | if (TREE_CODE (t) == ADDR_EXPR) | |
281 | { | |
282 | /* Inline and unroll is_gimple_addressable. */ | |
283 | while (1) | |
284 | { | |
285 | t = TREE_OPERAND (t, 0); | |
286 | if (is_gimple_id (t)) | |
287 | return true; | |
288 | if (!handled_component_p (t)) | |
289 | return false; | |
290 | } | |
291 | } | |
292 | return true; | |
293 | } | |
294 | ||
aecfc21d | 295 | /* If SYM is a constant variable with known value, return the value. |
296 | NULL_TREE is returned otherwise. */ | |
297 | ||
298 | static tree | |
299 | get_symbol_constant_value (tree sym) | |
300 | { | |
301 | if (TREE_STATIC (sym) | |
302 | && TREE_READONLY (sym) | |
303 | && !MTAG_P (sym)) | |
304 | { | |
305 | tree val = DECL_INITIAL (sym); | |
306 | if (val | |
307 | && ccp_decl_initial_min_invariant (val)) | |
308 | return val; | |
309 | } | |
310 | ||
311 | return NULL_TREE; | |
312 | } | |
d03bd588 | 313 | |
88dbf20f | 314 | /* Compute a default value for variable VAR and store it in the |
315 | CONST_VAL array. The following rules are used to get default | |
316 | values: | |
01406fc0 | 317 | |
88dbf20f | 318 | 1- Global and static variables that are declared constant are |
319 | considered CONSTANT. | |
320 | ||
321 | 2- Any other value is considered UNDEFINED. This is useful when | |
41511585 | 322 | considering PHI nodes. PHI arguments that are undefined do not |
323 | change the constant value of the PHI node, which allows for more | |
88dbf20f | 324 | constants to be propagated. |
4ee9c684 | 325 | |
88dbf20f | 326 | 3- If SSA_NAME_VALUE is set and it is a constant, its value is |
327 | used. | |
4ee9c684 | 328 | |
88dbf20f | 329 | 4- Variables defined by statements other than assignments and PHI |
330 | nodes are considered VARYING. | |
4ee9c684 | 331 | |
bfa30570 | 332 | 5- Initial values of variables that are not GIMPLE registers are |
333 | considered VARYING. */ | |
4ee9c684 | 334 | |
88dbf20f | 335 | static prop_value_t |
336 | get_default_value (tree var) | |
337 | { | |
338 | tree sym = SSA_NAME_VAR (var); | |
339 | prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE }; | |
aecfc21d | 340 | tree cst_val; |
bfa30570 | 341 | |
88dbf20f | 342 | if (!do_store_ccp && !is_gimple_reg (var)) |
4ee9c684 | 343 | { |
88dbf20f | 344 | /* Short circuit for regular CCP. We are not interested in any |
345 | non-register when DO_STORE_CCP is false. */ | |
41511585 | 346 | val.lattice_val = VARYING; |
4ee9c684 | 347 | } |
88dbf20f | 348 | else if (SSA_NAME_VALUE (var) |
349 | && is_gimple_min_invariant (SSA_NAME_VALUE (var))) | |
41511585 | 350 | { |
88dbf20f | 351 | val.lattice_val = CONSTANT; |
352 | val.value = SSA_NAME_VALUE (var); | |
41511585 | 353 | } |
aecfc21d | 354 | else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE) |
41511585 | 355 | { |
88dbf20f | 356 | /* Globals and static variables declared 'const' take their |
357 | initial value. */ | |
358 | val.lattice_val = CONSTANT; | |
aecfc21d | 359 | val.value = cst_val; |
88dbf20f | 360 | val.mem_ref = sym; |
41511585 | 361 | } |
362 | else | |
363 | { | |
41511585 | 364 | tree stmt = SSA_NAME_DEF_STMT (var); |
4ee9c684 | 365 | |
88dbf20f | 366 | if (IS_EMPTY_STMT (stmt)) |
367 | { | |
368 | /* Variables defined by an empty statement are those used | |
369 | before being initialized. If VAR is a local variable, we | |
bfa30570 | 370 | can assume initially that it is UNDEFINED, otherwise we must |
371 | consider it VARYING. */ | |
88dbf20f | 372 | if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL) |
373 | val.lattice_val = UNDEFINED; | |
88dbf20f | 374 | else |
41511585 | 375 | val.lattice_val = VARYING; |
376 | } | |
35cc02b5 | 377 | else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT |
88dbf20f | 378 | || TREE_CODE (stmt) == PHI_NODE) |
379 | { | |
380 | /* Any other variable defined by an assignment or a PHI node | |
bfa30570 | 381 | is considered UNDEFINED. */ |
382 | val.lattice_val = UNDEFINED; | |
88dbf20f | 383 | } |
384 | else | |
385 | { | |
386 | /* Otherwise, VAR will never take on a constant value. */ | |
387 | val.lattice_val = VARYING; | |
388 | } | |
41511585 | 389 | } |
4ee9c684 | 390 | |
41511585 | 391 | return val; |
392 | } | |
4ee9c684 | 393 | |
4ee9c684 | 394 | |
bfa30570 | 395 | /* Get the constant value associated with variable VAR. */ |
4ee9c684 | 396 | |
bfa30570 | 397 | static inline prop_value_t * |
398 | get_value (tree var) | |
88dbf20f | 399 | { |
400 | prop_value_t *val = &const_val[SSA_NAME_VERSION (var)]; | |
bfa30570 | 401 | |
402 | if (val->lattice_val == UNINITIALIZED) | |
4ee9c684 | 403 | *val = get_default_value (var); |
404 | ||
405 | return val; | |
406 | } | |
407 | ||
bfa30570 | 408 | /* Sets the value associated with VAR to VARYING. */ |
409 | ||
410 | static inline void | |
411 | set_value_varying (tree var) | |
412 | { | |
413 | prop_value_t *val = &const_val[SSA_NAME_VERSION (var)]; | |
414 | ||
415 | val->lattice_val = VARYING; | |
416 | val->value = NULL_TREE; | |
417 | val->mem_ref = NULL_TREE; | |
418 | } | |
4ee9c684 | 419 | |
b31eb493 | 420 | /* For float types, modify the value of VAL to make ccp work correctly |
421 | for non-standard values (-0, NaN): | |
422 | ||
423 | If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0. | |
424 | If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED. | |
425 | This is to fix the following problem (see PR 29921): Suppose we have | |
426 | ||
427 | x = 0.0 * y | |
428 | ||
429 | and we set value of y to NaN. This causes value of x to be set to NaN. | |
430 | When we later determine that y is in fact VARYING, fold uses the fact | |
431 | that HONOR_NANS is false, and we try to change the value of x to 0, | |
432 | causing an ICE. With HONOR_NANS being false, the real appearance of | |
433 | NaN would cause undefined behavior, though, so claiming that y (and x) | |
434 | are UNDEFINED initially is correct. */ | |
435 | ||
436 | static void | |
437 | canonicalize_float_value (prop_value_t *val) | |
438 | { | |
439 | enum machine_mode mode; | |
440 | tree type; | |
441 | REAL_VALUE_TYPE d; | |
442 | ||
443 | if (val->lattice_val != CONSTANT | |
444 | || TREE_CODE (val->value) != REAL_CST) | |
445 | return; | |
446 | ||
447 | d = TREE_REAL_CST (val->value); | |
448 | type = TREE_TYPE (val->value); | |
449 | mode = TYPE_MODE (type); | |
450 | ||
451 | if (!HONOR_SIGNED_ZEROS (mode) | |
452 | && REAL_VALUE_MINUS_ZERO (d)) | |
453 | { | |
454 | val->value = build_real (type, dconst0); | |
455 | return; | |
456 | } | |
457 | ||
458 | if (!HONOR_NANS (mode) | |
459 | && REAL_VALUE_ISNAN (d)) | |
460 | { | |
461 | val->lattice_val = UNDEFINED; | |
462 | val->value = NULL; | |
463 | val->mem_ref = NULL; | |
464 | return; | |
465 | } | |
466 | } | |
467 | ||
88dbf20f | 468 | /* Set the value for variable VAR to NEW_VAL. Return true if the new |
469 | value is different from VAR's previous value. */ | |
4ee9c684 | 470 | |
41511585 | 471 | static bool |
88dbf20f | 472 | set_lattice_value (tree var, prop_value_t new_val) |
4ee9c684 | 473 | { |
bfa30570 | 474 | prop_value_t *old_val = get_value (var); |
88dbf20f | 475 | |
b31eb493 | 476 | canonicalize_float_value (&new_val); |
477 | ||
88dbf20f | 478 | /* Lattice transitions must always be monotonically increasing in |
bfa30570 | 479 | value. If *OLD_VAL and NEW_VAL are the same, return false to |
480 | inform the caller that this was a non-transition. */ | |
481 | ||
aecfc21d | 482 | gcc_assert (old_val->lattice_val < new_val.lattice_val |
88dbf20f | 483 | || (old_val->lattice_val == new_val.lattice_val |
aecfc21d | 484 | && ((!old_val->value && !new_val.value) |
485 | || operand_equal_p (old_val->value, new_val.value, 0)) | |
bfa30570 | 486 | && old_val->mem_ref == new_val.mem_ref)); |
88dbf20f | 487 | |
488 | if (old_val->lattice_val != new_val.lattice_val) | |
4ee9c684 | 489 | { |
41511585 | 490 | if (dump_file && (dump_flags & TDF_DETAILS)) |
491 | { | |
88dbf20f | 492 | dump_lattice_value (dump_file, "Lattice value changed to ", new_val); |
bfa30570 | 493 | fprintf (dump_file, ". Adding SSA edges to worklist.\n"); |
41511585 | 494 | } |
495 | ||
88dbf20f | 496 | *old_val = new_val; |
497 | ||
bfa30570 | 498 | gcc_assert (new_val.lattice_val != UNDEFINED); |
499 | return true; | |
4ee9c684 | 500 | } |
41511585 | 501 | |
502 | return false; | |
4ee9c684 | 503 | } |
504 | ||
505 | ||
88dbf20f | 506 | /* Return the likely CCP lattice value for STMT. |
4ee9c684 | 507 | |
41511585 | 508 | If STMT has no operands, then return CONSTANT. |
4ee9c684 | 509 | |
41511585 | 510 | Else if any operands of STMT are undefined, then return UNDEFINED. |
4ee9c684 | 511 | |
41511585 | 512 | Else if any operands of STMT are constants, then return CONSTANT. |
4ee9c684 | 513 | |
41511585 | 514 | Else return VARYING. */ |
4ee9c684 | 515 | |
88dbf20f | 516 | static ccp_lattice_t |
41511585 | 517 | likely_value (tree stmt) |
518 | { | |
bfa30570 | 519 | bool has_constant_operand; |
41511585 | 520 | stmt_ann_t ann; |
521 | tree use; | |
522 | ssa_op_iter iter; | |
4ee9c684 | 523 | |
41511585 | 524 | ann = stmt_ann (stmt); |
88dbf20f | 525 | |
526 | /* If the statement has volatile operands, it won't fold to a | |
527 | constant value. */ | |
528 | if (ann->has_volatile_ops) | |
529 | return VARYING; | |
530 | ||
531 | /* If we are not doing store-ccp, statements with loads | |
532 | and/or stores will never fold into a constant. */ | |
533 | if (!do_store_ccp | |
9d637cc5 | 534 | && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)) |
41511585 | 535 | return VARYING; |
4ee9c684 | 536 | |
88dbf20f | 537 | |
538 | /* A CALL_EXPR is assumed to be varying. NOTE: This may be overly | |
539 | conservative, in the presence of const and pure calls. */ | |
41511585 | 540 | if (get_call_expr_in (stmt) != NULL_TREE) |
541 | return VARYING; | |
4ee9c684 | 542 | |
88dbf20f | 543 | /* Anything other than assignments and conditional jumps are not |
544 | interesting for CCP. */ | |
35cc02b5 | 545 | if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT |
bfa30570 | 546 | && !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE) |
88dbf20f | 547 | && TREE_CODE (stmt) != COND_EXPR |
548 | && TREE_CODE (stmt) != SWITCH_EXPR) | |
549 | return VARYING; | |
550 | ||
b765fa12 | 551 | if (is_gimple_min_invariant (get_rhs (stmt))) |
552 | return CONSTANT; | |
553 | ||
bfa30570 | 554 | has_constant_operand = false; |
555 | FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE) | |
41511585 | 556 | { |
bfa30570 | 557 | prop_value_t *val = get_value (use); |
41511585 | 558 | |
bfa30570 | 559 | if (val->lattice_val == UNDEFINED) |
560 | return UNDEFINED; | |
88dbf20f | 561 | |
41511585 | 562 | if (val->lattice_val == CONSTANT) |
bfa30570 | 563 | has_constant_operand = true; |
4ee9c684 | 564 | } |
41511585 | 565 | |
bfa30570 | 566 | if (has_constant_operand |
567 | /* We do not consider virtual operands here -- load from read-only | |
568 | memory may have only VARYING virtual operands, but still be | |
569 | constant. */ | |
570 | || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE)) | |
88dbf20f | 571 | return CONSTANT; |
572 | ||
bfa30570 | 573 | return VARYING; |
4ee9c684 | 574 | } |
575 | ||
bfa30570 | 576 | /* Returns true if STMT cannot be constant. */ |
577 | ||
578 | static bool | |
579 | surely_varying_stmt_p (tree stmt) | |
580 | { | |
581 | /* If the statement has operands that we cannot handle, it cannot be | |
582 | constant. */ | |
583 | if (stmt_ann (stmt)->has_volatile_ops) | |
584 | return true; | |
585 | ||
586 | if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)) | |
587 | { | |
588 | if (!do_store_ccp) | |
589 | return true; | |
590 | ||
591 | /* We can only handle simple loads and stores. */ | |
592 | if (!stmt_makes_single_load (stmt) | |
593 | && !stmt_makes_single_store (stmt)) | |
594 | return true; | |
595 | } | |
596 | ||
597 | /* If it contains a call, it is varying. */ | |
598 | if (get_call_expr_in (stmt) != NULL_TREE) | |
599 | return true; | |
600 | ||
601 | /* Anything other than assignments and conditional jumps are not | |
602 | interesting for CCP. */ | |
35cc02b5 | 603 | if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT |
bfa30570 | 604 | && !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE) |
605 | && TREE_CODE (stmt) != COND_EXPR | |
606 | && TREE_CODE (stmt) != SWITCH_EXPR) | |
607 | return true; | |
608 | ||
609 | return false; | |
610 | } | |
4ee9c684 | 611 | |
41511585 | 612 | /* Initialize local data structures for CCP. */ |
4ee9c684 | 613 | |
614 | static void | |
41511585 | 615 | ccp_initialize (void) |
4ee9c684 | 616 | { |
41511585 | 617 | basic_block bb; |
4ee9c684 | 618 | |
43959b95 | 619 | const_val = XCNEWVEC (prop_value_t, num_ssa_names); |
4ee9c684 | 620 | |
41511585 | 621 | /* Initialize simulation flags for PHI nodes and statements. */ |
622 | FOR_EACH_BB (bb) | |
4ee9c684 | 623 | { |
41511585 | 624 | block_stmt_iterator i; |
4ee9c684 | 625 | |
41511585 | 626 | for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i)) |
627 | { | |
41511585 | 628 | tree stmt = bsi_stmt (i); |
bfa30570 | 629 | bool is_varying = surely_varying_stmt_p (stmt); |
4ee9c684 | 630 | |
bfa30570 | 631 | if (is_varying) |
41511585 | 632 | { |
88dbf20f | 633 | tree def; |
634 | ssa_op_iter iter; | |
635 | ||
636 | /* If the statement will not produce a constant, mark | |
637 | all its outputs VARYING. */ | |
638 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) | |
bfa30570 | 639 | { |
640 | if (is_varying) | |
641 | set_value_varying (def); | |
642 | } | |
41511585 | 643 | } |
644 | ||
41511585 | 645 | DONT_SIMULATE_AGAIN (stmt) = is_varying; |
646 | } | |
4ee9c684 | 647 | } |
648 | ||
bfa30570 | 649 | /* Now process PHI nodes. We never set DONT_SIMULATE_AGAIN on phi node, |
650 | since we do not know which edges are executable yet, except for | |
651 | phi nodes for virtual operands when we do not do store ccp. */ | |
41511585 | 652 | FOR_EACH_BB (bb) |
4ee9c684 | 653 | { |
88dbf20f | 654 | tree phi; |
41511585 | 655 | |
656 | for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi)) | |
657 | { | |
bfa30570 | 658 | if (!do_store_ccp && !is_gimple_reg (PHI_RESULT (phi))) |
659 | DONT_SIMULATE_AGAIN (phi) = true; | |
660 | else | |
661 | DONT_SIMULATE_AGAIN (phi) = false; | |
41511585 | 662 | } |
4ee9c684 | 663 | } |
41511585 | 664 | } |
4ee9c684 | 665 | |
4ee9c684 | 666 | |
88dbf20f | 667 | /* Do final substitution of propagated values, cleanup the flowgraph and |
33a34f1e | 668 | free allocated storage. |
4ee9c684 | 669 | |
33a34f1e | 670 | Return TRUE when something was optimized. */ |
671 | ||
672 | static bool | |
88dbf20f | 673 | ccp_finalize (void) |
4ee9c684 | 674 | { |
88dbf20f | 675 | /* Perform substitutions based on the known constant values. */ |
33a34f1e | 676 | bool something_changed = substitute_and_fold (const_val, false); |
4ee9c684 | 677 | |
88dbf20f | 678 | free (const_val); |
33a34f1e | 679 | return something_changed;; |
4ee9c684 | 680 | } |
681 | ||
682 | ||
88dbf20f | 683 | /* Compute the meet operator between *VAL1 and *VAL2. Store the result |
684 | in VAL1. | |
685 | ||
686 | any M UNDEFINED = any | |
88dbf20f | 687 | any M VARYING = VARYING |
688 | Ci M Cj = Ci if (i == j) | |
689 | Ci M Cj = VARYING if (i != j) | |
bfa30570 | 690 | */ |
4ee9c684 | 691 | |
692 | static void | |
88dbf20f | 693 | ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2) |
4ee9c684 | 694 | { |
88dbf20f | 695 | if (val1->lattice_val == UNDEFINED) |
4ee9c684 | 696 | { |
88dbf20f | 697 | /* UNDEFINED M any = any */ |
698 | *val1 = *val2; | |
41511585 | 699 | } |
88dbf20f | 700 | else if (val2->lattice_val == UNDEFINED) |
92481a4d | 701 | { |
88dbf20f | 702 | /* any M UNDEFINED = any |
703 | Nothing to do. VAL1 already contains the value we want. */ | |
704 | ; | |
92481a4d | 705 | } |
88dbf20f | 706 | else if (val1->lattice_val == VARYING |
707 | || val2->lattice_val == VARYING) | |
41511585 | 708 | { |
88dbf20f | 709 | /* any M VARYING = VARYING. */ |
710 | val1->lattice_val = VARYING; | |
711 | val1->value = NULL_TREE; | |
712 | val1->mem_ref = NULL_TREE; | |
41511585 | 713 | } |
88dbf20f | 714 | else if (val1->lattice_val == CONSTANT |
715 | && val2->lattice_val == CONSTANT | |
716 | && simple_cst_equal (val1->value, val2->value) == 1 | |
717 | && (!do_store_ccp | |
b765fa12 | 718 | || (val1->mem_ref && val2->mem_ref |
719 | && operand_equal_p (val1->mem_ref, val2->mem_ref, 0)))) | |
41511585 | 720 | { |
88dbf20f | 721 | /* Ci M Cj = Ci if (i == j) |
722 | Ci M Cj = VARYING if (i != j) | |
723 | ||
724 | If these two values come from memory stores, make sure that | |
725 | they come from the same memory reference. */ | |
726 | val1->lattice_val = CONSTANT; | |
727 | val1->value = val1->value; | |
728 | val1->mem_ref = val1->mem_ref; | |
41511585 | 729 | } |
730 | else | |
731 | { | |
88dbf20f | 732 | /* Any other combination is VARYING. */ |
733 | val1->lattice_val = VARYING; | |
734 | val1->value = NULL_TREE; | |
735 | val1->mem_ref = NULL_TREE; | |
41511585 | 736 | } |
4ee9c684 | 737 | } |
738 | ||
739 | ||
41511585 | 740 | /* Loop through the PHI_NODE's parameters for BLOCK and compare their |
741 | lattice values to determine PHI_NODE's lattice value. The value of a | |
88dbf20f | 742 | PHI node is determined calling ccp_lattice_meet with all the arguments |
41511585 | 743 | of the PHI node that are incoming via executable edges. */ |
4ee9c684 | 744 | |
41511585 | 745 | static enum ssa_prop_result |
746 | ccp_visit_phi_node (tree phi) | |
4ee9c684 | 747 | { |
41511585 | 748 | int i; |
88dbf20f | 749 | prop_value_t *old_val, new_val; |
4ee9c684 | 750 | |
41511585 | 751 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4ee9c684 | 752 | { |
41511585 | 753 | fprintf (dump_file, "\nVisiting PHI node: "); |
754 | print_generic_expr (dump_file, phi, dump_flags); | |
4ee9c684 | 755 | } |
4ee9c684 | 756 | |
bfa30570 | 757 | old_val = get_value (PHI_RESULT (phi)); |
41511585 | 758 | switch (old_val->lattice_val) |
759 | { | |
760 | case VARYING: | |
88dbf20f | 761 | return SSA_PROP_VARYING; |
4ee9c684 | 762 | |
41511585 | 763 | case CONSTANT: |
764 | new_val = *old_val; | |
765 | break; | |
4ee9c684 | 766 | |
41511585 | 767 | case UNDEFINED: |
41511585 | 768 | new_val.lattice_val = UNDEFINED; |
88dbf20f | 769 | new_val.value = NULL_TREE; |
770 | new_val.mem_ref = NULL_TREE; | |
41511585 | 771 | break; |
4ee9c684 | 772 | |
41511585 | 773 | default: |
8c0963c4 | 774 | gcc_unreachable (); |
41511585 | 775 | } |
4ee9c684 | 776 | |
41511585 | 777 | for (i = 0; i < PHI_NUM_ARGS (phi); i++) |
778 | { | |
88dbf20f | 779 | /* Compute the meet operator over all the PHI arguments flowing |
780 | through executable edges. */ | |
41511585 | 781 | edge e = PHI_ARG_EDGE (phi, i); |
4ee9c684 | 782 | |
41511585 | 783 | if (dump_file && (dump_flags & TDF_DETAILS)) |
784 | { | |
785 | fprintf (dump_file, | |
786 | "\n Argument #%d (%d -> %d %sexecutable)\n", | |
787 | i, e->src->index, e->dest->index, | |
788 | (e->flags & EDGE_EXECUTABLE) ? "" : "not "); | |
789 | } | |
790 | ||
791 | /* If the incoming edge is executable, Compute the meet operator for | |
792 | the existing value of the PHI node and the current PHI argument. */ | |
793 | if (e->flags & EDGE_EXECUTABLE) | |
794 | { | |
88dbf20f | 795 | tree arg = PHI_ARG_DEF (phi, i); |
796 | prop_value_t arg_val; | |
4ee9c684 | 797 | |
88dbf20f | 798 | if (is_gimple_min_invariant (arg)) |
41511585 | 799 | { |
88dbf20f | 800 | arg_val.lattice_val = CONSTANT; |
801 | arg_val.value = arg; | |
802 | arg_val.mem_ref = NULL_TREE; | |
41511585 | 803 | } |
804 | else | |
bfa30570 | 805 | arg_val = *(get_value (arg)); |
4ee9c684 | 806 | |
88dbf20f | 807 | ccp_lattice_meet (&new_val, &arg_val); |
4ee9c684 | 808 | |
41511585 | 809 | if (dump_file && (dump_flags & TDF_DETAILS)) |
810 | { | |
811 | fprintf (dump_file, "\t"); | |
88dbf20f | 812 | print_generic_expr (dump_file, arg, dump_flags); |
813 | dump_lattice_value (dump_file, "\tValue: ", arg_val); | |
41511585 | 814 | fprintf (dump_file, "\n"); |
815 | } | |
4ee9c684 | 816 | |
41511585 | 817 | if (new_val.lattice_val == VARYING) |
818 | break; | |
819 | } | |
820 | } | |
4ee9c684 | 821 | |
822 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
41511585 | 823 | { |
824 | dump_lattice_value (dump_file, "\n PHI node value: ", new_val); | |
825 | fprintf (dump_file, "\n\n"); | |
826 | } | |
827 | ||
bfa30570 | 828 | /* Make the transition to the new value. */ |
41511585 | 829 | if (set_lattice_value (PHI_RESULT (phi), new_val)) |
830 | { | |
831 | if (new_val.lattice_val == VARYING) | |
832 | return SSA_PROP_VARYING; | |
833 | else | |
834 | return SSA_PROP_INTERESTING; | |
835 | } | |
836 | else | |
837 | return SSA_PROP_NOT_INTERESTING; | |
4ee9c684 | 838 | } |
839 | ||
840 | ||
41511585 | 841 | /* CCP specific front-end to the non-destructive constant folding |
842 | routines. | |
4ee9c684 | 843 | |
844 | Attempt to simplify the RHS of STMT knowing that one or more | |
845 | operands are constants. | |
846 | ||
847 | If simplification is possible, return the simplified RHS, | |
848 | otherwise return the original RHS. */ | |
849 | ||
850 | static tree | |
851 | ccp_fold (tree stmt) | |
852 | { | |
853 | tree rhs = get_rhs (stmt); | |
854 | enum tree_code code = TREE_CODE (rhs); | |
ce45a448 | 855 | enum tree_code_class kind = TREE_CODE_CLASS (code); |
4ee9c684 | 856 | tree retval = NULL_TREE; |
857 | ||
4ee9c684 | 858 | if (TREE_CODE (rhs) == SSA_NAME) |
88dbf20f | 859 | { |
860 | /* If the RHS is an SSA_NAME, return its known constant value, | |
861 | if any. */ | |
bfa30570 | 862 | return get_value (rhs)->value; |
88dbf20f | 863 | } |
864 | else if (do_store_ccp && stmt_makes_single_load (stmt)) | |
865 | { | |
866 | /* If the RHS is a memory load, see if the VUSEs associated with | |
867 | it are a valid constant for that memory load. */ | |
868 | prop_value_t *val = get_value_loaded_by (stmt, const_val); | |
5d66655a | 869 | if (val && val->mem_ref) |
870 | { | |
871 | if (operand_equal_p (val->mem_ref, rhs, 0)) | |
872 | return val->value; | |
873 | ||
874 | /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a | |
875 | complex type with a known constant value, return it. */ | |
876 | if ((TREE_CODE (rhs) == REALPART_EXPR | |
877 | || TREE_CODE (rhs) == IMAGPART_EXPR) | |
878 | && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0)) | |
879 | return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value); | |
880 | } | |
881 | return NULL_TREE; | |
88dbf20f | 882 | } |
4ee9c684 | 883 | |
884 | /* Unary operators. Note that we know the single operand must | |
885 | be a constant. So this should almost always return a | |
886 | simplified RHS. */ | |
ce45a448 | 887 | if (kind == tcc_unary) |
4ee9c684 | 888 | { |
889 | /* Handle unary operators which can appear in GIMPLE form. */ | |
890 | tree op0 = TREE_OPERAND (rhs, 0); | |
891 | ||
892 | /* Simplify the operand down to a constant. */ | |
893 | if (TREE_CODE (op0) == SSA_NAME) | |
894 | { | |
bfa30570 | 895 | prop_value_t *val = get_value (op0); |
4ee9c684 | 896 | if (val->lattice_val == CONSTANT) |
bfa30570 | 897 | op0 = get_value (op0)->value; |
4ee9c684 | 898 | } |
899 | ||
ad78532f | 900 | if ((code == NOP_EXPR || code == CONVERT_EXPR) |
548044d8 | 901 | && useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (op0))) |
ad78532f | 902 | return op0; |
1675c594 | 903 | return fold_unary (code, TREE_TYPE (rhs), op0); |
4ee9c684 | 904 | } |
905 | ||
906 | /* Binary and comparison operators. We know one or both of the | |
907 | operands are constants. */ | |
ce45a448 | 908 | else if (kind == tcc_binary |
909 | || kind == tcc_comparison | |
4ee9c684 | 910 | || code == TRUTH_AND_EXPR |
911 | || code == TRUTH_OR_EXPR | |
912 | || code == TRUTH_XOR_EXPR) | |
913 | { | |
914 | /* Handle binary and comparison operators that can appear in | |
915 | GIMPLE form. */ | |
916 | tree op0 = TREE_OPERAND (rhs, 0); | |
917 | tree op1 = TREE_OPERAND (rhs, 1); | |
918 | ||
919 | /* Simplify the operands down to constants when appropriate. */ | |
920 | if (TREE_CODE (op0) == SSA_NAME) | |
921 | { | |
bfa30570 | 922 | prop_value_t *val = get_value (op0); |
4ee9c684 | 923 | if (val->lattice_val == CONSTANT) |
88dbf20f | 924 | op0 = val->value; |
4ee9c684 | 925 | } |
926 | ||
927 | if (TREE_CODE (op1) == SSA_NAME) | |
928 | { | |
bfa30570 | 929 | prop_value_t *val = get_value (op1); |
4ee9c684 | 930 | if (val->lattice_val == CONSTANT) |
88dbf20f | 931 | op1 = val->value; |
4ee9c684 | 932 | } |
933 | ||
1675c594 | 934 | return fold_binary (code, TREE_TYPE (rhs), op0, op1); |
4ee9c684 | 935 | } |
936 | ||
937 | /* We may be able to fold away calls to builtin functions if their | |
0bed3869 | 938 | arguments are constants. */ |
4ee9c684 | 939 | else if (code == CALL_EXPR |
c2f47e15 | 940 | && TREE_CODE (CALL_EXPR_FN (rhs)) == ADDR_EXPR |
941 | && TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)) == FUNCTION_DECL | |
942 | && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (rhs), 0))) | |
4ee9c684 | 943 | { |
b66731e8 | 944 | if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_USE)) |
4ee9c684 | 945 | { |
b66731e8 | 946 | tree *orig, var; |
b66731e8 | 947 | size_t i = 0; |
948 | ssa_op_iter iter; | |
949 | use_operand_p var_p; | |
4ee9c684 | 950 | |
951 | /* Preserve the original values of every operand. */ | |
680a19b9 | 952 | orig = XNEWVEC (tree, NUM_SSA_OPERANDS (stmt, SSA_OP_USE)); |
b66731e8 | 953 | FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE) |
954 | orig[i++] = var; | |
4ee9c684 | 955 | |
956 | /* Substitute operands with their values and try to fold. */ | |
88dbf20f | 957 | replace_uses_in (stmt, NULL, const_val); |
c2f47e15 | 958 | retval = fold_call_expr (rhs, false); |
4ee9c684 | 959 | |
960 | /* Restore operands to their original form. */ | |
b66731e8 | 961 | i = 0; |
962 | FOR_EACH_SSA_USE_OPERAND (var_p, stmt, iter, SSA_OP_USE) | |
963 | SET_USE (var_p, orig[i++]); | |
4ee9c684 | 964 | free (orig); |
965 | } | |
966 | } | |
967 | else | |
968 | return rhs; | |
969 | ||
970 | /* If we got a simplified form, see if we need to convert its type. */ | |
971 | if (retval) | |
f0613857 | 972 | return fold_convert (TREE_TYPE (rhs), retval); |
4ee9c684 | 973 | |
974 | /* No simplification was possible. */ | |
975 | return rhs; | |
976 | } | |
977 | ||
978 | ||
8782adcf | 979 | /* Return the tree representing the element referenced by T if T is an |
980 | ARRAY_REF or COMPONENT_REF into constant aggregates. Return | |
981 | NULL_TREE otherwise. */ | |
982 | ||
983 | static tree | |
984 | fold_const_aggregate_ref (tree t) | |
985 | { | |
986 | prop_value_t *value; | |
c75b4594 | 987 | tree base, ctor, idx, field; |
988 | unsigned HOST_WIDE_INT cnt; | |
989 | tree cfield, cval; | |
8782adcf | 990 | |
991 | switch (TREE_CODE (t)) | |
992 | { | |
993 | case ARRAY_REF: | |
994 | /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its | |
995 | DECL_INITIAL. If BASE is a nested reference into another | |
996 | ARRAY_REF or COMPONENT_REF, make a recursive call to resolve | |
997 | the inner reference. */ | |
998 | base = TREE_OPERAND (t, 0); | |
999 | switch (TREE_CODE (base)) | |
1000 | { | |
1001 | case VAR_DECL: | |
1002 | if (!TREE_READONLY (base) | |
1003 | || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE | |
1004 | || !targetm.binds_local_p (base)) | |
1005 | return NULL_TREE; | |
1006 | ||
1007 | ctor = DECL_INITIAL (base); | |
1008 | break; | |
1009 | ||
1010 | case ARRAY_REF: | |
1011 | case COMPONENT_REF: | |
1012 | ctor = fold_const_aggregate_ref (base); | |
1013 | break; | |
1014 | ||
1015 | default: | |
1016 | return NULL_TREE; | |
1017 | } | |
1018 | ||
1019 | if (ctor == NULL_TREE | |
4f61cce6 | 1020 | || (TREE_CODE (ctor) != CONSTRUCTOR |
1021 | && TREE_CODE (ctor) != STRING_CST) | |
8782adcf | 1022 | || !TREE_STATIC (ctor)) |
1023 | return NULL_TREE; | |
1024 | ||
1025 | /* Get the index. If we have an SSA_NAME, try to resolve it | |
1026 | with the current lattice value for the SSA_NAME. */ | |
1027 | idx = TREE_OPERAND (t, 1); | |
1028 | switch (TREE_CODE (idx)) | |
1029 | { | |
1030 | case SSA_NAME: | |
bfa30570 | 1031 | if ((value = get_value (idx)) |
8782adcf | 1032 | && value->lattice_val == CONSTANT |
1033 | && TREE_CODE (value->value) == INTEGER_CST) | |
1034 | idx = value->value; | |
1035 | else | |
1036 | return NULL_TREE; | |
1037 | break; | |
1038 | ||
1039 | case INTEGER_CST: | |
1040 | break; | |
1041 | ||
1042 | default: | |
1043 | return NULL_TREE; | |
1044 | } | |
1045 | ||
4f61cce6 | 1046 | /* Fold read from constant string. */ |
1047 | if (TREE_CODE (ctor) == STRING_CST) | |
1048 | { | |
1049 | if ((TYPE_MODE (TREE_TYPE (t)) | |
1050 | == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1051 | && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1052 | == MODE_INT) | |
1053 | && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1 | |
1054 | && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0) | |
7b050b7b | 1055 | return build_int_cst_type (TREE_TYPE (t), |
1056 | (TREE_STRING_POINTER (ctor) | |
1057 | [TREE_INT_CST_LOW (idx)])); | |
4f61cce6 | 1058 | return NULL_TREE; |
1059 | } | |
1060 | ||
8782adcf | 1061 | /* Whoo-hoo! I'll fold ya baby. Yeah! */ |
c75b4594 | 1062 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) |
1063 | if (tree_int_cst_equal (cfield, idx)) | |
1064 | return cval; | |
8782adcf | 1065 | break; |
1066 | ||
1067 | case COMPONENT_REF: | |
1068 | /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its | |
1069 | DECL_INITIAL. If BASE is a nested reference into another | |
1070 | ARRAY_REF or COMPONENT_REF, make a recursive call to resolve | |
1071 | the inner reference. */ | |
1072 | base = TREE_OPERAND (t, 0); | |
1073 | switch (TREE_CODE (base)) | |
1074 | { | |
1075 | case VAR_DECL: | |
1076 | if (!TREE_READONLY (base) | |
1077 | || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE | |
1078 | || !targetm.binds_local_p (base)) | |
1079 | return NULL_TREE; | |
1080 | ||
1081 | ctor = DECL_INITIAL (base); | |
1082 | break; | |
1083 | ||
1084 | case ARRAY_REF: | |
1085 | case COMPONENT_REF: | |
1086 | ctor = fold_const_aggregate_ref (base); | |
1087 | break; | |
1088 | ||
1089 | default: | |
1090 | return NULL_TREE; | |
1091 | } | |
1092 | ||
1093 | if (ctor == NULL_TREE | |
1094 | || TREE_CODE (ctor) != CONSTRUCTOR | |
1095 | || !TREE_STATIC (ctor)) | |
1096 | return NULL_TREE; | |
1097 | ||
1098 | field = TREE_OPERAND (t, 1); | |
1099 | ||
c75b4594 | 1100 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) |
1101 | if (cfield == field | |
8782adcf | 1102 | /* FIXME: Handle bit-fields. */ |
c75b4594 | 1103 | && ! DECL_BIT_FIELD (cfield)) |
1104 | return cval; | |
8782adcf | 1105 | break; |
1106 | ||
908cb59d | 1107 | case REALPART_EXPR: |
1108 | case IMAGPART_EXPR: | |
1109 | { | |
1110 | tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0)); | |
1111 | if (c && TREE_CODE (c) == COMPLEX_CST) | |
1112 | return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c); | |
1113 | break; | |
1114 | } | |
1115 | ||
8782adcf | 1116 | default: |
1117 | break; | |
1118 | } | |
1119 | ||
1120 | return NULL_TREE; | |
1121 | } | |
1122 | ||
4ee9c684 | 1123 | /* Evaluate statement STMT. */ |
1124 | ||
88dbf20f | 1125 | static prop_value_t |
4ee9c684 | 1126 | evaluate_stmt (tree stmt) |
1127 | { | |
88dbf20f | 1128 | prop_value_t val; |
4f61cce6 | 1129 | tree simplified = NULL_TREE; |
88dbf20f | 1130 | ccp_lattice_t likelyvalue = likely_value (stmt); |
add6ee5e | 1131 | bool is_constant; |
88dbf20f | 1132 | |
1133 | val.mem_ref = NULL_TREE; | |
4ee9c684 | 1134 | |
add6ee5e | 1135 | fold_defer_overflow_warnings (); |
1136 | ||
4ee9c684 | 1137 | /* If the statement is likely to have a CONSTANT result, then try |
1138 | to fold the statement to determine the constant value. */ | |
1139 | if (likelyvalue == CONSTANT) | |
1140 | simplified = ccp_fold (stmt); | |
1141 | /* If the statement is likely to have a VARYING result, then do not | |
1142 | bother folding the statement. */ | |
4f61cce6 | 1143 | if (likelyvalue == VARYING) |
4ee9c684 | 1144 | simplified = get_rhs (stmt); |
8782adcf | 1145 | /* If the statement is an ARRAY_REF or COMPONENT_REF into constant |
1146 | aggregates, extract the referenced constant. Otherwise the | |
1147 | statement is likely to have an UNDEFINED value, and there will be | |
1148 | nothing to do. Note that fold_const_aggregate_ref returns | |
1149 | NULL_TREE if the first case does not match. */ | |
4f61cce6 | 1150 | else if (!simplified) |
8782adcf | 1151 | simplified = fold_const_aggregate_ref (get_rhs (stmt)); |
4ee9c684 | 1152 | |
add6ee5e | 1153 | is_constant = simplified && is_gimple_min_invariant (simplified); |
1154 | ||
1155 | fold_undefer_overflow_warnings (is_constant, stmt, 0); | |
1156 | ||
1157 | if (is_constant) | |
4ee9c684 | 1158 | { |
1159 | /* The statement produced a constant value. */ | |
1160 | val.lattice_val = CONSTANT; | |
88dbf20f | 1161 | val.value = simplified; |
4ee9c684 | 1162 | } |
1163 | else | |
1164 | { | |
1165 | /* The statement produced a nonconstant value. If the statement | |
88dbf20f | 1166 | had UNDEFINED operands, then the result of the statement |
1167 | should be UNDEFINED. Otherwise, the statement is VARYING. */ | |
bfa30570 | 1168 | if (likelyvalue == UNDEFINED) |
b765fa12 | 1169 | val.lattice_val = likelyvalue; |
1170 | else | |
1171 | val.lattice_val = VARYING; | |
1172 | ||
88dbf20f | 1173 | val.value = NULL_TREE; |
4ee9c684 | 1174 | } |
41511585 | 1175 | |
1176 | return val; | |
4ee9c684 | 1177 | } |
1178 | ||
1179 | ||
41511585 | 1180 | /* Visit the assignment statement STMT. Set the value of its LHS to the |
88dbf20f | 1181 | value computed by the RHS and store LHS in *OUTPUT_P. If STMT |
1182 | creates virtual definitions, set the value of each new name to that | |
1183 | of the RHS (if we can derive a constant out of the RHS). */ | |
4ee9c684 | 1184 | |
41511585 | 1185 | static enum ssa_prop_result |
1186 | visit_assignment (tree stmt, tree *output_p) | |
4ee9c684 | 1187 | { |
88dbf20f | 1188 | prop_value_t val; |
41511585 | 1189 | tree lhs, rhs; |
88dbf20f | 1190 | enum ssa_prop_result retval; |
4ee9c684 | 1191 | |
35cc02b5 | 1192 | lhs = GIMPLE_STMT_OPERAND (stmt, 0); |
1193 | rhs = GIMPLE_STMT_OPERAND (stmt, 1); | |
4ee9c684 | 1194 | |
41511585 | 1195 | if (TREE_CODE (rhs) == SSA_NAME) |
1196 | { | |
1197 | /* For a simple copy operation, we copy the lattice values. */ | |
bfa30570 | 1198 | prop_value_t *nval = get_value (rhs); |
41511585 | 1199 | val = *nval; |
1200 | } | |
88dbf20f | 1201 | else if (do_store_ccp && stmt_makes_single_load (stmt)) |
41511585 | 1202 | { |
88dbf20f | 1203 | /* Same as above, but the RHS is not a gimple register and yet |
bfa30570 | 1204 | has a known VUSE. If STMT is loading from the same memory |
88dbf20f | 1205 | location that created the SSA_NAMEs for the virtual operands, |
1206 | we can propagate the value on the RHS. */ | |
1207 | prop_value_t *nval = get_value_loaded_by (stmt, const_val); | |
1208 | ||
bfa30570 | 1209 | if (nval |
1210 | && nval->mem_ref | |
b765fa12 | 1211 | && operand_equal_p (nval->mem_ref, rhs, 0)) |
88dbf20f | 1212 | val = *nval; |
1213 | else | |
1214 | val = evaluate_stmt (stmt); | |
41511585 | 1215 | } |
1216 | else | |
a065a588 | 1217 | /* Evaluate the statement. */ |
41511585 | 1218 | val = evaluate_stmt (stmt); |
4ee9c684 | 1219 | |
a065a588 | 1220 | /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant |
eb716043 | 1221 | value to be a VIEW_CONVERT_EXPR of the old constant value. |
a065a588 | 1222 | |
1223 | ??? Also, if this was a definition of a bitfield, we need to widen | |
41511585 | 1224 | the constant value into the type of the destination variable. This |
1225 | should not be necessary if GCC represented bitfields properly. */ | |
1226 | { | |
35cc02b5 | 1227 | tree orig_lhs = GIMPLE_STMT_OPERAND (stmt, 0); |
a065a588 | 1228 | |
1229 | if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR | |
1230 | && val.lattice_val == CONSTANT) | |
1231 | { | |
5f9acd88 | 1232 | tree w = fold_unary (VIEW_CONVERT_EXPR, |
1233 | TREE_TYPE (TREE_OPERAND (orig_lhs, 0)), | |
1234 | val.value); | |
eb716043 | 1235 | |
662f5fa5 | 1236 | orig_lhs = TREE_OPERAND (orig_lhs, 0); |
eb716043 | 1237 | if (w && is_gimple_min_invariant (w)) |
88dbf20f | 1238 | val.value = w; |
eb716043 | 1239 | else |
1240 | { | |
1241 | val.lattice_val = VARYING; | |
88dbf20f | 1242 | val.value = NULL; |
eb716043 | 1243 | } |
a065a588 | 1244 | } |
1245 | ||
41511585 | 1246 | if (val.lattice_val == CONSTANT |
a065a588 | 1247 | && TREE_CODE (orig_lhs) == COMPONENT_REF |
1248 | && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1))) | |
4ee9c684 | 1249 | { |
88dbf20f | 1250 | tree w = widen_bitfield (val.value, TREE_OPERAND (orig_lhs, 1), |
a065a588 | 1251 | orig_lhs); |
41511585 | 1252 | |
1253 | if (w && is_gimple_min_invariant (w)) | |
88dbf20f | 1254 | val.value = w; |
41511585 | 1255 | else |
4ee9c684 | 1256 | { |
41511585 | 1257 | val.lattice_val = VARYING; |
88dbf20f | 1258 | val.value = NULL_TREE; |
1259 | val.mem_ref = NULL_TREE; | |
4ee9c684 | 1260 | } |
4ee9c684 | 1261 | } |
41511585 | 1262 | } |
4ee9c684 | 1263 | |
88dbf20f | 1264 | retval = SSA_PROP_NOT_INTERESTING; |
4ee9c684 | 1265 | |
41511585 | 1266 | /* Set the lattice value of the statement's output. */ |
88dbf20f | 1267 | if (TREE_CODE (lhs) == SSA_NAME) |
4ee9c684 | 1268 | { |
88dbf20f | 1269 | /* If STMT is an assignment to an SSA_NAME, we only have one |
1270 | value to set. */ | |
1271 | if (set_lattice_value (lhs, val)) | |
1272 | { | |
1273 | *output_p = lhs; | |
1274 | if (val.lattice_val == VARYING) | |
1275 | retval = SSA_PROP_VARYING; | |
1276 | else | |
1277 | retval = SSA_PROP_INTERESTING; | |
1278 | } | |
4ee9c684 | 1279 | } |
88dbf20f | 1280 | else if (do_store_ccp && stmt_makes_single_store (stmt)) |
1281 | { | |
4fb5e5ca | 1282 | /* Otherwise, set the names in VDEF operands to the new |
1283 | constant value and mark the LHS as the memory reference | |
1284 | associated with VAL. */ | |
88dbf20f | 1285 | ssa_op_iter i; |
1286 | tree vdef; | |
1287 | bool changed; | |
1288 | ||
88dbf20f | 1289 | /* Mark VAL as stored in the LHS of this assignment. */ |
bfa30570 | 1290 | if (val.lattice_val == CONSTANT) |
1291 | val.mem_ref = lhs; | |
88dbf20f | 1292 | |
1293 | /* Set the value of every VDEF to VAL. */ | |
1294 | changed = false; | |
1295 | FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS) | |
aecfc21d | 1296 | { |
1297 | /* See PR 29801. We may have VDEFs for read-only variables | |
1298 | (see the handling of unmodifiable variables in | |
1299 | add_virtual_operand); do not attempt to change their value. */ | |
1300 | if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE) | |
1301 | continue; | |
1302 | ||
1303 | changed |= set_lattice_value (vdef, val); | |
1304 | } | |
88dbf20f | 1305 | |
1306 | /* Note that for propagation purposes, we are only interested in | |
1307 | visiting statements that load the exact same memory reference | |
1308 | stored here. Those statements will have the exact same list | |
1309 | of virtual uses, so it is enough to set the output of this | |
1310 | statement to be its first virtual definition. */ | |
1311 | *output_p = first_vdef (stmt); | |
1312 | if (changed) | |
1313 | { | |
1314 | if (val.lattice_val == VARYING) | |
1315 | retval = SSA_PROP_VARYING; | |
1316 | else | |
1317 | retval = SSA_PROP_INTERESTING; | |
1318 | } | |
1319 | } | |
1320 | ||
1321 | return retval; | |
4ee9c684 | 1322 | } |
1323 | ||
4ee9c684 | 1324 | |
41511585 | 1325 | /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING |
1326 | if it can determine which edge will be taken. Otherwise, return | |
1327 | SSA_PROP_VARYING. */ | |
1328 | ||
1329 | static enum ssa_prop_result | |
1330 | visit_cond_stmt (tree stmt, edge *taken_edge_p) | |
4ee9c684 | 1331 | { |
88dbf20f | 1332 | prop_value_t val; |
41511585 | 1333 | basic_block block; |
1334 | ||
1335 | block = bb_for_stmt (stmt); | |
1336 | val = evaluate_stmt (stmt); | |
1337 | ||
1338 | /* Find which edge out of the conditional block will be taken and add it | |
1339 | to the worklist. If no single edge can be determined statically, | |
1340 | return SSA_PROP_VARYING to feed all the outgoing edges to the | |
1341 | propagation engine. */ | |
88dbf20f | 1342 | *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0; |
41511585 | 1343 | if (*taken_edge_p) |
1344 | return SSA_PROP_INTERESTING; | |
1345 | else | |
1346 | return SSA_PROP_VARYING; | |
4ee9c684 | 1347 | } |
1348 | ||
4ee9c684 | 1349 | |
41511585 | 1350 | /* Evaluate statement STMT. If the statement produces an output value and |
1351 | its evaluation changes the lattice value of its output, return | |
1352 | SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the | |
1353 | output value. | |
1354 | ||
1355 | If STMT is a conditional branch and we can determine its truth | |
1356 | value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying | |
1357 | value, return SSA_PROP_VARYING. */ | |
4ee9c684 | 1358 | |
41511585 | 1359 | static enum ssa_prop_result |
1360 | ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p) | |
1361 | { | |
41511585 | 1362 | tree def; |
1363 | ssa_op_iter iter; | |
4ee9c684 | 1364 | |
41511585 | 1365 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4ee9c684 | 1366 | { |
88dbf20f | 1367 | fprintf (dump_file, "\nVisiting statement:\n"); |
1368 | print_generic_stmt (dump_file, stmt, dump_flags); | |
41511585 | 1369 | fprintf (dump_file, "\n"); |
4ee9c684 | 1370 | } |
4ee9c684 | 1371 | |
35cc02b5 | 1372 | if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT) |
4ee9c684 | 1373 | { |
41511585 | 1374 | /* If the statement is an assignment that produces a single |
1375 | output value, evaluate its RHS to see if the lattice value of | |
1376 | its output has changed. */ | |
1377 | return visit_assignment (stmt, output_p); | |
4ee9c684 | 1378 | } |
41511585 | 1379 | else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR) |
4ee9c684 | 1380 | { |
41511585 | 1381 | /* If STMT is a conditional branch, see if we can determine |
1382 | which branch will be taken. */ | |
1383 | return visit_cond_stmt (stmt, taken_edge_p); | |
4ee9c684 | 1384 | } |
4ee9c684 | 1385 | |
41511585 | 1386 | /* Any other kind of statement is not interesting for constant |
1387 | propagation and, therefore, not worth simulating. */ | |
41511585 | 1388 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1389 | fprintf (dump_file, "No interesting values produced. Marked VARYING.\n"); | |
4ee9c684 | 1390 | |
41511585 | 1391 | /* Definitions made by statements other than assignments to |
1392 | SSA_NAMEs represent unknown modifications to their outputs. | |
1393 | Mark them VARYING. */ | |
88dbf20f | 1394 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) |
1395 | { | |
1396 | prop_value_t v = { VARYING, NULL_TREE, NULL_TREE }; | |
1397 | set_lattice_value (def, v); | |
1398 | } | |
4ee9c684 | 1399 | |
41511585 | 1400 | return SSA_PROP_VARYING; |
1401 | } | |
4ee9c684 | 1402 | |
4ee9c684 | 1403 | |
88dbf20f | 1404 | /* Main entry point for SSA Conditional Constant Propagation. */ |
41511585 | 1405 | |
33a34f1e | 1406 | static unsigned int |
88dbf20f | 1407 | execute_ssa_ccp (bool store_ccp) |
41511585 | 1408 | { |
88dbf20f | 1409 | do_store_ccp = store_ccp; |
41511585 | 1410 | ccp_initialize (); |
1411 | ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node); | |
33a34f1e | 1412 | if (ccp_finalize ()) |
eb9161e7 | 1413 | return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals); |
33a34f1e | 1414 | else |
1415 | return 0; | |
4ee9c684 | 1416 | } |
1417 | ||
5664499b | 1418 | |
2a1990e9 | 1419 | static unsigned int |
88dbf20f | 1420 | do_ssa_ccp (void) |
1421 | { | |
33a34f1e | 1422 | return execute_ssa_ccp (false); |
88dbf20f | 1423 | } |
1424 | ||
1425 | ||
5664499b | 1426 | static bool |
41511585 | 1427 | gate_ccp (void) |
5664499b | 1428 | { |
41511585 | 1429 | return flag_tree_ccp != 0; |
5664499b | 1430 | } |
1431 | ||
4ee9c684 | 1432 | |
41511585 | 1433 | struct tree_opt_pass pass_ccp = |
1434 | { | |
1435 | "ccp", /* name */ | |
1436 | gate_ccp, /* gate */ | |
88dbf20f | 1437 | do_ssa_ccp, /* execute */ |
41511585 | 1438 | NULL, /* sub */ |
1439 | NULL, /* next */ | |
1440 | 0, /* static_pass_number */ | |
1441 | TV_TREE_CCP, /* tv_id */ | |
49290934 | 1442 | PROP_cfg | PROP_ssa, /* properties_required */ |
41511585 | 1443 | 0, /* properties_provided */ |
b6246c40 | 1444 | 0, /* properties_destroyed */ |
41511585 | 1445 | 0, /* todo_flags_start */ |
33a34f1e | 1446 | TODO_dump_func | TODO_verify_ssa |
1447 | | TODO_verify_stmts | TODO_ggc_collect,/* todo_flags_finish */ | |
0f9005dd | 1448 | 0 /* letter */ |
41511585 | 1449 | }; |
4ee9c684 | 1450 | |
4ee9c684 | 1451 | |
2a1990e9 | 1452 | static unsigned int |
88dbf20f | 1453 | do_ssa_store_ccp (void) |
1454 | { | |
1455 | /* If STORE-CCP is not enabled, we just run regular CCP. */ | |
33a34f1e | 1456 | return execute_ssa_ccp (flag_tree_store_ccp != 0); |
88dbf20f | 1457 | } |
1458 | ||
1459 | static bool | |
1460 | gate_store_ccp (void) | |
1461 | { | |
1462 | /* STORE-CCP is enabled only with -ftree-store-ccp, but when | |
1463 | -fno-tree-store-ccp is specified, we should run regular CCP. | |
1464 | That's why the pass is enabled with either flag. */ | |
1465 | return flag_tree_store_ccp != 0 || flag_tree_ccp != 0; | |
1466 | } | |
1467 | ||
1468 | ||
1469 | struct tree_opt_pass pass_store_ccp = | |
1470 | { | |
1471 | "store_ccp", /* name */ | |
1472 | gate_store_ccp, /* gate */ | |
1473 | do_ssa_store_ccp, /* execute */ | |
1474 | NULL, /* sub */ | |
1475 | NULL, /* next */ | |
1476 | 0, /* static_pass_number */ | |
1477 | TV_TREE_STORE_CCP, /* tv_id */ | |
1478 | PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */ | |
1479 | 0, /* properties_provided */ | |
b6246c40 | 1480 | 0, /* properties_destroyed */ |
88dbf20f | 1481 | 0, /* todo_flags_start */ |
33a34f1e | 1482 | TODO_dump_func | TODO_verify_ssa |
1483 | | TODO_verify_stmts | TODO_ggc_collect,/* todo_flags_finish */ | |
88dbf20f | 1484 | 0 /* letter */ |
1485 | }; | |
1486 | ||
41511585 | 1487 | /* Given a constant value VAL for bitfield FIELD, and a destination |
1488 | variable VAR, return VAL appropriately widened to fit into VAR. If | |
1489 | FIELD is wider than HOST_WIDE_INT, NULL is returned. */ | |
4ee9c684 | 1490 | |
41511585 | 1491 | tree |
1492 | widen_bitfield (tree val, tree field, tree var) | |
4ee9c684 | 1493 | { |
41511585 | 1494 | unsigned HOST_WIDE_INT var_size, field_size; |
1495 | tree wide_val; | |
1496 | unsigned HOST_WIDE_INT mask; | |
1497 | unsigned int i; | |
4ee9c684 | 1498 | |
41511585 | 1499 | /* We can only do this if the size of the type and field and VAL are |
1500 | all constants representable in HOST_WIDE_INT. */ | |
1501 | if (!host_integerp (TYPE_SIZE (TREE_TYPE (var)), 1) | |
1502 | || !host_integerp (DECL_SIZE (field), 1) | |
1503 | || !host_integerp (val, 0)) | |
1504 | return NULL_TREE; | |
4ee9c684 | 1505 | |
41511585 | 1506 | var_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1); |
1507 | field_size = tree_low_cst (DECL_SIZE (field), 1); | |
4ee9c684 | 1508 | |
41511585 | 1509 | /* Give up if either the bitfield or the variable are too wide. */ |
1510 | if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT) | |
1511 | return NULL_TREE; | |
4ee9c684 | 1512 | |
8c0963c4 | 1513 | gcc_assert (var_size >= field_size); |
4ee9c684 | 1514 | |
41511585 | 1515 | /* If the sign bit of the value is not set or the field's type is unsigned, |
1516 | just mask off the high order bits of the value. */ | |
1517 | if (DECL_UNSIGNED (field) | |
1518 | || !(tree_low_cst (val, 0) & (((HOST_WIDE_INT)1) << (field_size - 1)))) | |
1519 | { | |
1520 | /* Zero extension. Build a mask with the lower 'field_size' bits | |
1521 | set and a BIT_AND_EXPR node to clear the high order bits of | |
1522 | the value. */ | |
1523 | for (i = 0, mask = 0; i < field_size; i++) | |
1524 | mask |= ((HOST_WIDE_INT) 1) << i; | |
4ee9c684 | 1525 | |
e7be49a3 | 1526 | wide_val = fold_build2 (BIT_AND_EXPR, TREE_TYPE (var), val, |
1527 | build_int_cst (TREE_TYPE (var), mask)); | |
4ee9c684 | 1528 | } |
41511585 | 1529 | else |
5664499b | 1530 | { |
41511585 | 1531 | /* Sign extension. Create a mask with the upper 'field_size' |
1532 | bits set and a BIT_IOR_EXPR to set the high order bits of the | |
1533 | value. */ | |
1534 | for (i = 0, mask = 0; i < (var_size - field_size); i++) | |
1535 | mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1); | |
1536 | ||
e7be49a3 | 1537 | wide_val = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (var), val, |
1538 | build_int_cst (TREE_TYPE (var), mask)); | |
5664499b | 1539 | } |
4ee9c684 | 1540 | |
e7be49a3 | 1541 | return wide_val; |
4ee9c684 | 1542 | } |
1543 | ||
41511585 | 1544 | |
4ee9c684 | 1545 | /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X]. |
1546 | BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE | |
0bed3869 | 1547 | is the desired result type. */ |
4ee9c684 | 1548 | |
1549 | static tree | |
1550 | maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type) | |
1551 | { | |
e71da05f | 1552 | tree min_idx, idx, idx_type, elt_offset = integer_zero_node; |
6374121b | 1553 | tree array_type, elt_type, elt_size; |
1554 | ||
1555 | /* If BASE is an ARRAY_REF, we can pick up another offset (this time | |
1556 | measured in units of the size of elements type) from that ARRAY_REF). | |
1557 | We can't do anything if either is variable. | |
1558 | ||
1559 | The case we handle here is *(&A[N]+O). */ | |
1560 | if (TREE_CODE (base) == ARRAY_REF) | |
1561 | { | |
1562 | tree low_bound = array_ref_low_bound (base); | |
1563 | ||
1564 | elt_offset = TREE_OPERAND (base, 1); | |
1565 | if (TREE_CODE (low_bound) != INTEGER_CST | |
1566 | || TREE_CODE (elt_offset) != INTEGER_CST) | |
1567 | return NULL_TREE; | |
1568 | ||
1569 | elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0); | |
1570 | base = TREE_OPERAND (base, 0); | |
1571 | } | |
4ee9c684 | 1572 | |
1573 | /* Ignore stupid user tricks of indexing non-array variables. */ | |
1574 | array_type = TREE_TYPE (base); | |
1575 | if (TREE_CODE (array_type) != ARRAY_TYPE) | |
1576 | return NULL_TREE; | |
1577 | elt_type = TREE_TYPE (array_type); | |
c8ca3ee7 | 1578 | if (!useless_type_conversion_p (orig_type, elt_type)) |
4ee9c684 | 1579 | return NULL_TREE; |
e71da05f | 1580 | |
1581 | /* Use signed size type for intermediate computation on the index. */ | |
1582 | idx_type = signed_type_for (size_type_node); | |
1583 | ||
6374121b | 1584 | /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the |
1585 | element type (so we can use the alignment if it's not constant). | |
1586 | Otherwise, compute the offset as an index by using a division. If the | |
1587 | division isn't exact, then don't do anything. */ | |
4ee9c684 | 1588 | elt_size = TYPE_SIZE_UNIT (elt_type); |
3b45913d | 1589 | if (!elt_size) |
1590 | return NULL; | |
6374121b | 1591 | if (integer_zerop (offset)) |
1592 | { | |
1593 | if (TREE_CODE (elt_size) != INTEGER_CST) | |
1594 | elt_size = size_int (TYPE_ALIGN (elt_type)); | |
4ee9c684 | 1595 | |
e71da05f | 1596 | idx = build_int_cst (idx_type, 0); |
6374121b | 1597 | } |
1598 | else | |
1599 | { | |
1600 | unsigned HOST_WIDE_INT lquo, lrem; | |
1601 | HOST_WIDE_INT hquo, hrem; | |
e71da05f | 1602 | double_int soffset; |
6374121b | 1603 | |
e71da05f | 1604 | /* The final array offset should be signed, so we need |
1605 | to sign-extend the (possibly pointer) offset here | |
1606 | and use signed division. */ | |
1607 | soffset = double_int_sext (tree_to_double_int (offset), | |
1608 | TYPE_PRECISION (TREE_TYPE (offset))); | |
6374121b | 1609 | if (TREE_CODE (elt_size) != INTEGER_CST |
e71da05f | 1610 | || div_and_round_double (TRUNC_DIV_EXPR, 0, |
1611 | soffset.low, soffset.high, | |
6374121b | 1612 | TREE_INT_CST_LOW (elt_size), |
1613 | TREE_INT_CST_HIGH (elt_size), | |
1614 | &lquo, &hquo, &lrem, &hrem) | |
1615 | || lrem || hrem) | |
1616 | return NULL_TREE; | |
4ee9c684 | 1617 | |
e71da05f | 1618 | idx = build_int_cst_wide (idx_type, lquo, hquo); |
6374121b | 1619 | } |
1620 | ||
1621 | /* Assume the low bound is zero. If there is a domain type, get the | |
1622 | low bound, if any, convert the index into that type, and add the | |
1623 | low bound. */ | |
e71da05f | 1624 | min_idx = build_int_cst (idx_type, 0); |
6374121b | 1625 | if (TYPE_DOMAIN (array_type)) |
4ee9c684 | 1626 | { |
e71da05f | 1627 | idx_type = TYPE_DOMAIN (array_type); |
1628 | if (TYPE_MIN_VALUE (idx_type)) | |
1629 | min_idx = TYPE_MIN_VALUE (idx_type); | |
6374121b | 1630 | else |
e71da05f | 1631 | min_idx = fold_convert (idx_type, min_idx); |
6374121b | 1632 | |
1633 | if (TREE_CODE (min_idx) != INTEGER_CST) | |
1634 | return NULL_TREE; | |
1635 | ||
e71da05f | 1636 | elt_offset = fold_convert (idx_type, elt_offset); |
4ee9c684 | 1637 | } |
1638 | ||
6374121b | 1639 | if (!integer_zerop (min_idx)) |
1640 | idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0); | |
1641 | if (!integer_zerop (elt_offset)) | |
1642 | idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0); | |
1643 | ||
e71da05f | 1644 | /* Make sure to possibly truncate late after offsetting. */ |
1645 | idx = fold_convert (idx_type, idx); | |
1646 | ||
d3828421 | 1647 | return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE); |
4ee9c684 | 1648 | } |
1649 | ||
41511585 | 1650 | |
3b45913d | 1651 | /* Attempt to fold *(S+O) to S.X. |
4ee9c684 | 1652 | BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE |
1653 | is the desired result type. */ | |
4ee9c684 | 1654 | |
3b45913d | 1655 | static tree |
4ee9c684 | 1656 | maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset, |
1657 | tree orig_type, bool base_is_ptr) | |
1658 | { | |
6d5d8428 | 1659 | tree f, t, field_type, tail_array_field, field_offset; |
d9745cea | 1660 | tree ret; |
1661 | tree new_base; | |
4ee9c684 | 1662 | |
1663 | if (TREE_CODE (record_type) != RECORD_TYPE | |
1664 | && TREE_CODE (record_type) != UNION_TYPE | |
1665 | && TREE_CODE (record_type) != QUAL_UNION_TYPE) | |
1666 | return NULL_TREE; | |
1667 | ||
1668 | /* Short-circuit silly cases. */ | |
c8ca3ee7 | 1669 | if (useless_type_conversion_p (record_type, orig_type)) |
4ee9c684 | 1670 | return NULL_TREE; |
1671 | ||
1672 | tail_array_field = NULL_TREE; | |
1673 | for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f)) | |
1674 | { | |
1675 | int cmp; | |
1676 | ||
1677 | if (TREE_CODE (f) != FIELD_DECL) | |
1678 | continue; | |
1679 | if (DECL_BIT_FIELD (f)) | |
1680 | continue; | |
6d5d8428 | 1681 | |
3b45913d | 1682 | if (!DECL_FIELD_OFFSET (f)) |
1683 | continue; | |
6d5d8428 | 1684 | field_offset = byte_position (f); |
1685 | if (TREE_CODE (field_offset) != INTEGER_CST) | |
4ee9c684 | 1686 | continue; |
1687 | ||
1688 | /* ??? Java creates "interesting" fields for representing base classes. | |
1689 | They have no name, and have no context. With no context, we get into | |
1690 | trouble with nonoverlapping_component_refs_p. Skip them. */ | |
1691 | if (!DECL_FIELD_CONTEXT (f)) | |
1692 | continue; | |
1693 | ||
1694 | /* The previous array field isn't at the end. */ | |
1695 | tail_array_field = NULL_TREE; | |
1696 | ||
1697 | /* Check to see if this offset overlaps with the field. */ | |
6d5d8428 | 1698 | cmp = tree_int_cst_compare (field_offset, offset); |
4ee9c684 | 1699 | if (cmp > 0) |
1700 | continue; | |
1701 | ||
1702 | field_type = TREE_TYPE (f); | |
4ee9c684 | 1703 | |
1704 | /* Here we exactly match the offset being checked. If the types match, | |
1705 | then we can return that field. */ | |
115073ff | 1706 | if (cmp == 0 |
c8ca3ee7 | 1707 | && useless_type_conversion_p (orig_type, field_type)) |
4ee9c684 | 1708 | { |
1709 | if (base_is_ptr) | |
1710 | base = build1 (INDIRECT_REF, record_type, base); | |
40b19772 | 1711 | t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); |
4ee9c684 | 1712 | return t; |
1713 | } | |
115073ff | 1714 | |
1715 | /* Don't care about offsets into the middle of scalars. */ | |
1716 | if (!AGGREGATE_TYPE_P (field_type)) | |
1717 | continue; | |
4ee9c684 | 1718 | |
115073ff | 1719 | /* Check for array at the end of the struct. This is often |
1720 | used as for flexible array members. We should be able to | |
1721 | turn this into an array access anyway. */ | |
1722 | if (TREE_CODE (field_type) == ARRAY_TYPE) | |
1723 | tail_array_field = f; | |
1724 | ||
1725 | /* Check the end of the field against the offset. */ | |
1726 | if (!DECL_SIZE_UNIT (f) | |
1727 | || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST) | |
1728 | continue; | |
1729 | t = int_const_binop (MINUS_EXPR, offset, field_offset, 1); | |
1730 | if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f))) | |
1731 | continue; | |
4ee9c684 | 1732 | |
115073ff | 1733 | /* If we matched, then set offset to the displacement into |
1734 | this field. */ | |
d9745cea | 1735 | if (base_is_ptr) |
1736 | new_base = build1 (INDIRECT_REF, record_type, base); | |
1737 | else | |
1738 | new_base = base; | |
1739 | new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE); | |
1740 | ||
1741 | /* Recurse to possibly find the match. */ | |
1742 | ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type); | |
1743 | if (ret) | |
1744 | return ret; | |
1745 | ret = maybe_fold_offset_to_component_ref (field_type, new_base, t, | |
1746 | orig_type, false); | |
1747 | if (ret) | |
1748 | return ret; | |
4ee9c684 | 1749 | } |
1750 | ||
1751 | if (!tail_array_field) | |
1752 | return NULL_TREE; | |
1753 | ||
1754 | f = tail_array_field; | |
1755 | field_type = TREE_TYPE (f); | |
115073ff | 1756 | offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1); |
4ee9c684 | 1757 | |
4ee9c684 | 1758 | /* If we get here, we've got an aggregate field, and a possibly |
365db11e | 1759 | nonzero offset into them. Recurse and hope for a valid match. */ |
4ee9c684 | 1760 | if (base_is_ptr) |
1761 | base = build1 (INDIRECT_REF, record_type, base); | |
40b19772 | 1762 | base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); |
4ee9c684 | 1763 | |
1764 | t = maybe_fold_offset_to_array_ref (base, offset, orig_type); | |
1765 | if (t) | |
1766 | return t; | |
1767 | return maybe_fold_offset_to_component_ref (field_type, base, offset, | |
1768 | orig_type, false); | |
1769 | } | |
1770 | ||
3b45913d | 1771 | /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type |
1772 | or BASE[index] or by combination of those. | |
1773 | ||
1774 | Before attempting the conversion strip off existing ADDR_EXPRs and | |
1775 | handled component refs. */ | |
1776 | ||
1777 | tree | |
1778 | maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type) | |
1779 | { | |
1780 | tree ret; | |
1781 | tree type; | |
1782 | bool base_is_ptr = true; | |
1783 | ||
1784 | STRIP_NOPS (base); | |
1785 | if (TREE_CODE (base) == ADDR_EXPR) | |
1786 | { | |
1787 | base_is_ptr = false; | |
1788 | ||
1789 | base = TREE_OPERAND (base, 0); | |
1790 | ||
1791 | /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union, | |
1792 | so it needs to be removed and new COMPONENT_REF constructed. | |
1793 | The wrong COMPONENT_REF are often constructed by folding the | |
1794 | (type *)&object within the expression (type *)&object+offset */ | |
1795 | if (handled_component_p (base) && 0) | |
1796 | { | |
1797 | HOST_WIDE_INT sub_offset, size, maxsize; | |
1798 | tree newbase; | |
1799 | newbase = get_ref_base_and_extent (base, &sub_offset, | |
1800 | &size, &maxsize); | |
1801 | gcc_assert (newbase); | |
1802 | gcc_assert (!(sub_offset & (BITS_PER_UNIT - 1))); | |
1803 | if (size == maxsize) | |
1804 | { | |
1805 | base = newbase; | |
1806 | if (sub_offset) | |
1807 | offset = int_const_binop (PLUS_EXPR, offset, | |
1808 | build_int_cst (TREE_TYPE (offset), | |
1809 | sub_offset / BITS_PER_UNIT), 1); | |
1810 | } | |
1811 | } | |
c8ca3ee7 | 1812 | if (useless_type_conversion_p (orig_type, TREE_TYPE (base)) |
3b45913d | 1813 | && integer_zerop (offset)) |
1814 | return base; | |
1815 | type = TREE_TYPE (base); | |
1816 | } | |
1817 | else | |
1818 | { | |
1819 | base_is_ptr = true; | |
1820 | if (!POINTER_TYPE_P (TREE_TYPE (base))) | |
1821 | return NULL_TREE; | |
1822 | type = TREE_TYPE (TREE_TYPE (base)); | |
1823 | } | |
1824 | ret = maybe_fold_offset_to_component_ref (type, base, offset, | |
1825 | orig_type, base_is_ptr); | |
1826 | if (!ret) | |
1827 | { | |
1828 | if (base_is_ptr) | |
1829 | base = build1 (INDIRECT_REF, type, base); | |
1830 | ret = maybe_fold_offset_to_array_ref (base, offset, orig_type); | |
1831 | } | |
1832 | return ret; | |
1833 | } | |
41511585 | 1834 | |
4ee9c684 | 1835 | /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET). |
1836 | Return the simplified expression, or NULL if nothing could be done. */ | |
1837 | ||
1838 | static tree | |
1839 | maybe_fold_stmt_indirect (tree expr, tree base, tree offset) | |
1840 | { | |
1841 | tree t; | |
5acf8305 | 1842 | bool volatile_p = TREE_THIS_VOLATILE (expr); |
4ee9c684 | 1843 | |
1844 | /* We may well have constructed a double-nested PLUS_EXPR via multiple | |
1845 | substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that | |
1846 | are sometimes added. */ | |
1847 | base = fold (base); | |
40bcfc86 | 1848 | STRIP_TYPE_NOPS (base); |
4ee9c684 | 1849 | TREE_OPERAND (expr, 0) = base; |
1850 | ||
1851 | /* One possibility is that the address reduces to a string constant. */ | |
1852 | t = fold_read_from_constant_string (expr); | |
1853 | if (t) | |
1854 | return t; | |
1855 | ||
0de36bdb | 1856 | /* Add in any offset from a POINTER_PLUS_EXPR. */ |
1857 | if (TREE_CODE (base) == POINTER_PLUS_EXPR) | |
4ee9c684 | 1858 | { |
1859 | tree offset2; | |
1860 | ||
1861 | offset2 = TREE_OPERAND (base, 1); | |
1862 | if (TREE_CODE (offset2) != INTEGER_CST) | |
1863 | return NULL_TREE; | |
1864 | base = TREE_OPERAND (base, 0); | |
1865 | ||
0de36bdb | 1866 | offset = fold_convert (sizetype, |
1867 | int_const_binop (PLUS_EXPR, offset, offset2, 1)); | |
4ee9c684 | 1868 | } |
1869 | ||
1870 | if (TREE_CODE (base) == ADDR_EXPR) | |
1871 | { | |
3b45913d | 1872 | tree base_addr = base; |
1873 | ||
4ee9c684 | 1874 | /* Strip the ADDR_EXPR. */ |
1875 | base = TREE_OPERAND (base, 0); | |
1876 | ||
e67e5e1f | 1877 | /* Fold away CONST_DECL to its value, if the type is scalar. */ |
1878 | if (TREE_CODE (base) == CONST_DECL | |
d03bd588 | 1879 | && ccp_decl_initial_min_invariant (DECL_INITIAL (base))) |
e67e5e1f | 1880 | return DECL_INITIAL (base); |
1881 | ||
4ee9c684 | 1882 | /* Try folding *(&B+O) to B.X. */ |
3b45913d | 1883 | t = maybe_fold_offset_to_reference (base_addr, offset, |
1884 | TREE_TYPE (expr)); | |
4ee9c684 | 1885 | if (t) |
5acf8305 | 1886 | { |
1887 | TREE_THIS_VOLATILE (t) = volatile_p; | |
1888 | return t; | |
1889 | } | |
4ee9c684 | 1890 | } |
1891 | else | |
1892 | { | |
1893 | /* We can get here for out-of-range string constant accesses, | |
1894 | such as "_"[3]. Bail out of the entire substitution search | |
1895 | and arrange for the entire statement to be replaced by a | |
06b27565 | 1896 | call to __builtin_trap. In all likelihood this will all be |
4ee9c684 | 1897 | constant-folded away, but in the meantime we can't leave with |
1898 | something that get_expr_operands can't understand. */ | |
1899 | ||
1900 | t = base; | |
1901 | STRIP_NOPS (t); | |
1902 | if (TREE_CODE (t) == ADDR_EXPR | |
1903 | && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST) | |
1904 | { | |
1905 | /* FIXME: Except that this causes problems elsewhere with dead | |
1fa3a8f6 | 1906 | code not being deleted, and we die in the rtl expanders |
4ee9c684 | 1907 | because we failed to remove some ssa_name. In the meantime, |
1908 | just return zero. */ | |
1909 | /* FIXME2: This condition should be signaled by | |
1910 | fold_read_from_constant_string directly, rather than | |
1911 | re-checking for it here. */ | |
1912 | return integer_zero_node; | |
1913 | } | |
1914 | ||
1915 | /* Try folding *(B+O) to B->X. Still an improvement. */ | |
1916 | if (POINTER_TYPE_P (TREE_TYPE (base))) | |
1917 | { | |
3b45913d | 1918 | t = maybe_fold_offset_to_reference (base, offset, |
1919 | TREE_TYPE (expr)); | |
4ee9c684 | 1920 | if (t) |
1921 | return t; | |
1922 | } | |
1923 | } | |
1924 | ||
1925 | /* Otherwise we had an offset that we could not simplify. */ | |
1926 | return NULL_TREE; | |
1927 | } | |
1928 | ||
41511585 | 1929 | |
0de36bdb | 1930 | /* A subroutine of fold_stmt_r. EXPR is a POINTER_PLUS_EXPR. |
4ee9c684 | 1931 | |
1932 | A quaint feature extant in our address arithmetic is that there | |
1933 | can be hidden type changes here. The type of the result need | |
1934 | not be the same as the type of the input pointer. | |
1935 | ||
1936 | What we're after here is an expression of the form | |
1937 | (T *)(&array + const) | |
1938 | where the cast doesn't actually exist, but is implicit in the | |
0de36bdb | 1939 | type of the POINTER_PLUS_EXPR. We'd like to turn this into |
4ee9c684 | 1940 | &array[x] |
1941 | which may be able to propagate further. */ | |
1942 | ||
1943 | static tree | |
1944 | maybe_fold_stmt_addition (tree expr) | |
1945 | { | |
1946 | tree op0 = TREE_OPERAND (expr, 0); | |
1947 | tree op1 = TREE_OPERAND (expr, 1); | |
1948 | tree ptr_type = TREE_TYPE (expr); | |
1949 | tree ptd_type; | |
1950 | tree t; | |
4ee9c684 | 1951 | |
0de36bdb | 1952 | gcc_assert (TREE_CODE (expr) == POINTER_PLUS_EXPR); |
1953 | ||
4ee9c684 | 1954 | /* It had better be a constant. */ |
1955 | if (TREE_CODE (op1) != INTEGER_CST) | |
1956 | return NULL_TREE; | |
1957 | /* The first operand should be an ADDR_EXPR. */ | |
1958 | if (TREE_CODE (op0) != ADDR_EXPR) | |
1959 | return NULL_TREE; | |
1960 | op0 = TREE_OPERAND (op0, 0); | |
1961 | ||
1962 | /* If the first operand is an ARRAY_REF, expand it so that we can fold | |
1963 | the offset into it. */ | |
1964 | while (TREE_CODE (op0) == ARRAY_REF) | |
1965 | { | |
1966 | tree array_obj = TREE_OPERAND (op0, 0); | |
1967 | tree array_idx = TREE_OPERAND (op0, 1); | |
1968 | tree elt_type = TREE_TYPE (op0); | |
1969 | tree elt_size = TYPE_SIZE_UNIT (elt_type); | |
1970 | tree min_idx; | |
1971 | ||
1972 | if (TREE_CODE (array_idx) != INTEGER_CST) | |
1973 | break; | |
1974 | if (TREE_CODE (elt_size) != INTEGER_CST) | |
1975 | break; | |
1976 | ||
1977 | /* Un-bias the index by the min index of the array type. */ | |
1978 | min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj)); | |
1979 | if (min_idx) | |
1980 | { | |
1981 | min_idx = TYPE_MIN_VALUE (min_idx); | |
1982 | if (min_idx) | |
1983 | { | |
6374121b | 1984 | if (TREE_CODE (min_idx) != INTEGER_CST) |
1985 | break; | |
1986 | ||
535664e3 | 1987 | array_idx = fold_convert (TREE_TYPE (min_idx), array_idx); |
4ee9c684 | 1988 | if (!integer_zerop (min_idx)) |
1989 | array_idx = int_const_binop (MINUS_EXPR, array_idx, | |
1990 | min_idx, 0); | |
1991 | } | |
1992 | } | |
1993 | ||
1994 | /* Convert the index to a byte offset. */ | |
535664e3 | 1995 | array_idx = fold_convert (sizetype, array_idx); |
4ee9c684 | 1996 | array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0); |
1997 | ||
1998 | /* Update the operands for the next round, or for folding. */ | |
0de36bdb | 1999 | op1 = int_const_binop (PLUS_EXPR, |
4ee9c684 | 2000 | array_idx, op1, 0); |
4ee9c684 | 2001 | op0 = array_obj; |
2002 | } | |
2003 | ||
4ee9c684 | 2004 | ptd_type = TREE_TYPE (ptr_type); |
2005 | ||
2006 | /* At which point we can try some of the same things as for indirects. */ | |
2007 | t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type); | |
2008 | if (!t) | |
2009 | t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1, | |
2010 | ptd_type, false); | |
2011 | if (t) | |
2012 | t = build1 (ADDR_EXPR, ptr_type, t); | |
2013 | ||
2014 | return t; | |
2015 | } | |
2016 | ||
0d759d9c | 2017 | /* For passing state through walk_tree into fold_stmt_r and its |
2018 | children. */ | |
2019 | ||
2020 | struct fold_stmt_r_data | |
2021 | { | |
add6ee5e | 2022 | tree stmt; |
2023 | bool *changed_p; | |
2024 | bool *inside_addr_expr_p; | |
0d759d9c | 2025 | }; |
2026 | ||
4ee9c684 | 2027 | /* Subroutine of fold_stmt called via walk_tree. We perform several |
2028 | simplifications of EXPR_P, mostly having to do with pointer arithmetic. */ | |
2029 | ||
2030 | static tree | |
2031 | fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data) | |
2032 | { | |
680a19b9 | 2033 | struct fold_stmt_r_data *fold_stmt_r_data = (struct fold_stmt_r_data *) data; |
0d759d9c | 2034 | bool *inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p; |
2035 | bool *changed_p = fold_stmt_r_data->changed_p; | |
4ee9c684 | 2036 | tree expr = *expr_p, t; |
2037 | ||
2038 | /* ??? It'd be nice if walk_tree had a pre-order option. */ | |
2039 | switch (TREE_CODE (expr)) | |
2040 | { | |
2041 | case INDIRECT_REF: | |
2042 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
2043 | if (t) | |
2044 | return t; | |
2045 | *walk_subtrees = 0; | |
2046 | ||
2047 | t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0), | |
2048 | integer_zero_node); | |
2049 | break; | |
2050 | ||
3b45913d | 2051 | case NOP_EXPR: |
2052 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
2053 | if (t) | |
2054 | return t; | |
2055 | *walk_subtrees = 0; | |
2056 | ||
2057 | if (POINTER_TYPE_P (TREE_TYPE (expr)) | |
2058 | && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0))) | |
2059 | && (t = maybe_fold_offset_to_reference | |
2060 | (TREE_OPERAND (expr, 0), | |
2061 | integer_zero_node, | |
2062 | TREE_TYPE (TREE_TYPE (expr))))) | |
f97743d3 | 2063 | { |
2064 | tree ptr_type = build_pointer_type (TREE_TYPE (t)); | |
2065 | if (!useless_type_conversion_p (TREE_TYPE (expr), ptr_type)) | |
2066 | return NULL_TREE; | |
2067 | t = build_fold_addr_expr_with_type (t, ptr_type); | |
2068 | } | |
3b45913d | 2069 | break; |
2070 | ||
0d759d9c | 2071 | /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF. |
4ee9c684 | 2072 | We'd only want to bother decomposing an existing ARRAY_REF if |
2073 | the base array is found to have another offset contained within. | |
2074 | Otherwise we'd be wasting time. */ | |
0d759d9c | 2075 | case ARRAY_REF: |
2076 | /* If we are not processing expressions found within an | |
2077 | ADDR_EXPR, then we can fold constant array references. */ | |
2078 | if (!*inside_addr_expr_p) | |
2079 | t = fold_read_from_constant_string (expr); | |
2080 | else | |
2081 | t = NULL; | |
2082 | break; | |
4ee9c684 | 2083 | |
2084 | case ADDR_EXPR: | |
0d759d9c | 2085 | *inside_addr_expr_p = true; |
4ee9c684 | 2086 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); |
0d759d9c | 2087 | *inside_addr_expr_p = false; |
4ee9c684 | 2088 | if (t) |
2089 | return t; | |
2090 | *walk_subtrees = 0; | |
2091 | ||
2092 | /* Set TREE_INVARIANT properly so that the value is properly | |
2093 | considered constant, and so gets propagated as expected. */ | |
2094 | if (*changed_p) | |
750ad201 | 2095 | recompute_tree_invariant_for_addr_expr (expr); |
4ee9c684 | 2096 | return NULL_TREE; |
2097 | ||
0de36bdb | 2098 | case POINTER_PLUS_EXPR: |
4ee9c684 | 2099 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); |
2100 | if (t) | |
2101 | return t; | |
2102 | t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL); | |
2103 | if (t) | |
2104 | return t; | |
2105 | *walk_subtrees = 0; | |
2106 | ||
2107 | t = maybe_fold_stmt_addition (expr); | |
2108 | break; | |
2109 | ||
2110 | case COMPONENT_REF: | |
2111 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
2112 | if (t) | |
2113 | return t; | |
2114 | *walk_subtrees = 0; | |
2115 | ||
504d3463 | 2116 | /* Make sure the FIELD_DECL is actually a field in the type on the lhs. |
2117 | We've already checked that the records are compatible, so we should | |
2118 | come up with a set of compatible fields. */ | |
2119 | { | |
2120 | tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0)); | |
2121 | tree expr_field = TREE_OPERAND (expr, 1); | |
2122 | ||
2123 | if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record)) | |
2124 | { | |
2125 | expr_field = find_compatible_field (expr_record, expr_field); | |
2126 | TREE_OPERAND (expr, 1) = expr_field; | |
2127 | } | |
2128 | } | |
4ee9c684 | 2129 | break; |
2130 | ||
aed164c3 | 2131 | case TARGET_MEM_REF: |
2132 | t = maybe_fold_tmr (expr); | |
2133 | break; | |
2134 | ||
bb8a9715 | 2135 | case COND_EXPR: |
2136 | if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0))) | |
2137 | { | |
2138 | tree op0 = TREE_OPERAND (expr, 0); | |
add6ee5e | 2139 | tree tem; |
2140 | bool set; | |
2141 | ||
2142 | fold_defer_overflow_warnings (); | |
2143 | tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0), | |
2144 | TREE_OPERAND (op0, 0), | |
2145 | TREE_OPERAND (op0, 1)); | |
2146 | set = tem && set_rhs (expr_p, tem); | |
2147 | fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0); | |
2148 | if (set) | |
f2532264 | 2149 | { |
2150 | t = *expr_p; | |
2151 | break; | |
2152 | } | |
bb8a9715 | 2153 | } |
f2532264 | 2154 | return NULL_TREE; |
bb8a9715 | 2155 | |
4ee9c684 | 2156 | default: |
2157 | return NULL_TREE; | |
2158 | } | |
2159 | ||
2160 | if (t) | |
2161 | { | |
2162 | *expr_p = t; | |
2163 | *changed_p = true; | |
2164 | } | |
2165 | ||
2166 | return NULL_TREE; | |
2167 | } | |
2168 | ||
4ee9c684 | 2169 | |
0a39fd54 | 2170 | /* Return the string length, maximum string length or maximum value of |
2171 | ARG in LENGTH. | |
2172 | If ARG is an SSA name variable, follow its use-def chains. If LENGTH | |
2173 | is not NULL and, for TYPE == 0, its value is not equal to the length | |
2174 | we determine or if we are unable to determine the length or value, | |
2175 | return false. VISITED is a bitmap of visited variables. | |
2176 | TYPE is 0 if string length should be returned, 1 for maximum string | |
2177 | length and 2 for maximum value ARG can have. */ | |
4ee9c684 | 2178 | |
72648a0e | 2179 | static bool |
0a39fd54 | 2180 | get_maxval_strlen (tree arg, tree *length, bitmap visited, int type) |
4ee9c684 | 2181 | { |
41511585 | 2182 | tree var, def_stmt, val; |
2183 | ||
2184 | if (TREE_CODE (arg) != SSA_NAME) | |
72648a0e | 2185 | { |
ec0fa513 | 2186 | if (TREE_CODE (arg) == COND_EXPR) |
2187 | return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type) | |
2188 | && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type); | |
2189 | ||
0a39fd54 | 2190 | if (type == 2) |
2191 | { | |
2192 | val = arg; | |
2193 | if (TREE_CODE (val) != INTEGER_CST | |
2194 | || tree_int_cst_sgn (val) < 0) | |
2195 | return false; | |
2196 | } | |
2197 | else | |
2198 | val = c_strlen (arg, 1); | |
41511585 | 2199 | if (!val) |
72648a0e | 2200 | return false; |
e37235f0 | 2201 | |
0a39fd54 | 2202 | if (*length) |
2203 | { | |
2204 | if (type > 0) | |
2205 | { | |
2206 | if (TREE_CODE (*length) != INTEGER_CST | |
2207 | || TREE_CODE (val) != INTEGER_CST) | |
2208 | return false; | |
2209 | ||
2210 | if (tree_int_cst_lt (*length, val)) | |
2211 | *length = val; | |
2212 | return true; | |
2213 | } | |
2214 | else if (simple_cst_equal (val, *length) != 1) | |
2215 | return false; | |
2216 | } | |
4ee9c684 | 2217 | |
41511585 | 2218 | *length = val; |
2219 | return true; | |
4ee9c684 | 2220 | } |
72648a0e | 2221 | |
41511585 | 2222 | /* If we were already here, break the infinite cycle. */ |
2223 | if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg))) | |
2224 | return true; | |
2225 | bitmap_set_bit (visited, SSA_NAME_VERSION (arg)); | |
2226 | ||
2227 | var = arg; | |
2228 | def_stmt = SSA_NAME_DEF_STMT (var); | |
4ee9c684 | 2229 | |
41511585 | 2230 | switch (TREE_CODE (def_stmt)) |
2231 | { | |
35cc02b5 | 2232 | case GIMPLE_MODIFY_STMT: |
41511585 | 2233 | { |
0a39fd54 | 2234 | tree rhs; |
2235 | ||
41511585 | 2236 | /* The RHS of the statement defining VAR must either have a |
2237 | constant length or come from another SSA_NAME with a constant | |
2238 | length. */ | |
35cc02b5 | 2239 | rhs = GIMPLE_STMT_OPERAND (def_stmt, 1); |
41511585 | 2240 | STRIP_NOPS (rhs); |
0a39fd54 | 2241 | return get_maxval_strlen (rhs, length, visited, type); |
41511585 | 2242 | } |
4ee9c684 | 2243 | |
41511585 | 2244 | case PHI_NODE: |
2245 | { | |
2246 | /* All the arguments of the PHI node must have the same constant | |
2247 | length. */ | |
2248 | int i; | |
4ee9c684 | 2249 | |
41511585 | 2250 | for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++) |
2251 | { | |
2252 | tree arg = PHI_ARG_DEF (def_stmt, i); | |
4ee9c684 | 2253 | |
41511585 | 2254 | /* If this PHI has itself as an argument, we cannot |
2255 | determine the string length of this argument. However, | |
2256 | if we can find a constant string length for the other | |
2257 | PHI args then we can still be sure that this is a | |
2258 | constant string length. So be optimistic and just | |
2259 | continue with the next argument. */ | |
2260 | if (arg == PHI_RESULT (def_stmt)) | |
2261 | continue; | |
4ee9c684 | 2262 | |
0a39fd54 | 2263 | if (!get_maxval_strlen (arg, length, visited, type)) |
41511585 | 2264 | return false; |
2265 | } | |
4ee9c684 | 2266 | |
41511585 | 2267 | return true; |
5664499b | 2268 | } |
4ee9c684 | 2269 | |
41511585 | 2270 | default: |
2271 | break; | |
4ee9c684 | 2272 | } |
2273 | ||
41511585 | 2274 | |
2275 | return false; | |
4ee9c684 | 2276 | } |
2277 | ||
2278 | ||
2279 | /* Fold builtin call FN in statement STMT. If it cannot be folded into a | |
2280 | constant, return NULL_TREE. Otherwise, return its constant value. */ | |
2281 | ||
2282 | static tree | |
2283 | ccp_fold_builtin (tree stmt, tree fn) | |
2284 | { | |
0a39fd54 | 2285 | tree result, val[3]; |
c2f47e15 | 2286 | tree callee, a; |
0a39fd54 | 2287 | int arg_mask, i, type; |
f0613857 | 2288 | bitmap visited; |
2289 | bool ignore; | |
c2f47e15 | 2290 | call_expr_arg_iterator iter; |
2291 | int nargs; | |
4ee9c684 | 2292 | |
35cc02b5 | 2293 | ignore = TREE_CODE (stmt) != GIMPLE_MODIFY_STMT; |
4ee9c684 | 2294 | |
2295 | /* First try the generic builtin folder. If that succeeds, return the | |
2296 | result directly. */ | |
c2f47e15 | 2297 | result = fold_call_expr (fn, ignore); |
4ee9c684 | 2298 | if (result) |
0a39fd54 | 2299 | { |
2300 | if (ignore) | |
2301 | STRIP_NOPS (result); | |
2302 | return result; | |
2303 | } | |
f0613857 | 2304 | |
2305 | /* Ignore MD builtins. */ | |
c2f47e15 | 2306 | callee = get_callee_fndecl (fn); |
f0613857 | 2307 | if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD) |
2308 | return NULL_TREE; | |
4ee9c684 | 2309 | |
2310 | /* If the builtin could not be folded, and it has no argument list, | |
2311 | we're done. */ | |
c2f47e15 | 2312 | nargs = call_expr_nargs (fn); |
2313 | if (nargs == 0) | |
4ee9c684 | 2314 | return NULL_TREE; |
2315 | ||
2316 | /* Limit the work only for builtins we know how to simplify. */ | |
2317 | switch (DECL_FUNCTION_CODE (callee)) | |
2318 | { | |
2319 | case BUILT_IN_STRLEN: | |
2320 | case BUILT_IN_FPUTS: | |
2321 | case BUILT_IN_FPUTS_UNLOCKED: | |
0a39fd54 | 2322 | arg_mask = 1; |
2323 | type = 0; | |
4ee9c684 | 2324 | break; |
2325 | case BUILT_IN_STRCPY: | |
2326 | case BUILT_IN_STRNCPY: | |
0a39fd54 | 2327 | arg_mask = 2; |
2328 | type = 0; | |
2329 | break; | |
2330 | case BUILT_IN_MEMCPY_CHK: | |
2331 | case BUILT_IN_MEMPCPY_CHK: | |
2332 | case BUILT_IN_MEMMOVE_CHK: | |
2333 | case BUILT_IN_MEMSET_CHK: | |
2334 | case BUILT_IN_STRNCPY_CHK: | |
2335 | arg_mask = 4; | |
2336 | type = 2; | |
2337 | break; | |
2338 | case BUILT_IN_STRCPY_CHK: | |
2339 | case BUILT_IN_STPCPY_CHK: | |
2340 | arg_mask = 2; | |
2341 | type = 1; | |
2342 | break; | |
2343 | case BUILT_IN_SNPRINTF_CHK: | |
2344 | case BUILT_IN_VSNPRINTF_CHK: | |
2345 | arg_mask = 2; | |
2346 | type = 2; | |
4ee9c684 | 2347 | break; |
2348 | default: | |
2349 | return NULL_TREE; | |
2350 | } | |
2351 | ||
2352 | /* Try to use the dataflow information gathered by the CCP process. */ | |
27335ffd | 2353 | visited = BITMAP_ALLOC (NULL); |
4ee9c684 | 2354 | |
0a39fd54 | 2355 | memset (val, 0, sizeof (val)); |
c2f47e15 | 2356 | init_call_expr_arg_iterator (fn, &iter); |
2357 | for (i = 0; arg_mask; i++, arg_mask >>= 1) | |
2358 | { | |
2359 | a = next_call_expr_arg (&iter); | |
2360 | if (arg_mask & 1) | |
2361 | { | |
2362 | bitmap_clear (visited); | |
2363 | if (!get_maxval_strlen (a, &val[i], visited, type)) | |
2364 | val[i] = NULL_TREE; | |
2365 | } | |
2366 | } | |
4ee9c684 | 2367 | |
27335ffd | 2368 | BITMAP_FREE (visited); |
4ee9c684 | 2369 | |
f0613857 | 2370 | result = NULL_TREE; |
4ee9c684 | 2371 | switch (DECL_FUNCTION_CODE (callee)) |
2372 | { | |
2373 | case BUILT_IN_STRLEN: | |
0a39fd54 | 2374 | if (val[0]) |
4ee9c684 | 2375 | { |
f0d6e81c | 2376 | tree new_val = fold_convert (TREE_TYPE (fn), val[0]); |
4ee9c684 | 2377 | |
2378 | /* If the result is not a valid gimple value, or not a cast | |
2379 | of a valid gimple value, then we can not use the result. */ | |
f0d6e81c | 2380 | if (is_gimple_val (new_val) |
2381 | || (is_gimple_cast (new_val) | |
2382 | && is_gimple_val (TREE_OPERAND (new_val, 0)))) | |
2383 | return new_val; | |
4ee9c684 | 2384 | } |
f0613857 | 2385 | break; |
2386 | ||
4ee9c684 | 2387 | case BUILT_IN_STRCPY: |
c2f47e15 | 2388 | if (val[1] && is_gimple_val (val[1]) && nargs == 2) |
2389 | result = fold_builtin_strcpy (callee, | |
2390 | CALL_EXPR_ARG (fn, 0), | |
2391 | CALL_EXPR_ARG (fn, 1), | |
2392 | val[1]); | |
f0613857 | 2393 | break; |
2394 | ||
4ee9c684 | 2395 | case BUILT_IN_STRNCPY: |
c2f47e15 | 2396 | if (val[1] && is_gimple_val (val[1]) && nargs == 3) |
2397 | result = fold_builtin_strncpy (callee, | |
2398 | CALL_EXPR_ARG (fn, 0), | |
2399 | CALL_EXPR_ARG (fn, 1), | |
2400 | CALL_EXPR_ARG (fn, 2), | |
2401 | val[1]); | |
f0613857 | 2402 | break; |
2403 | ||
4ee9c684 | 2404 | case BUILT_IN_FPUTS: |
c2f47e15 | 2405 | result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0), |
2406 | CALL_EXPR_ARG (fn, 1), | |
35cc02b5 | 2407 | TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 0, |
0a39fd54 | 2408 | val[0]); |
f0613857 | 2409 | break; |
2410 | ||
4ee9c684 | 2411 | case BUILT_IN_FPUTS_UNLOCKED: |
c2f47e15 | 2412 | result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0), |
2413 | CALL_EXPR_ARG (fn, 1), | |
35cc02b5 | 2414 | TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 1, |
0a39fd54 | 2415 | val[0]); |
2416 | break; | |
2417 | ||
2418 | case BUILT_IN_MEMCPY_CHK: | |
2419 | case BUILT_IN_MEMPCPY_CHK: | |
2420 | case BUILT_IN_MEMMOVE_CHK: | |
2421 | case BUILT_IN_MEMSET_CHK: | |
2422 | if (val[2] && is_gimple_val (val[2])) | |
c2f47e15 | 2423 | result = fold_builtin_memory_chk (callee, |
2424 | CALL_EXPR_ARG (fn, 0), | |
2425 | CALL_EXPR_ARG (fn, 1), | |
2426 | CALL_EXPR_ARG (fn, 2), | |
2427 | CALL_EXPR_ARG (fn, 3), | |
2428 | val[2], ignore, | |
0a39fd54 | 2429 | DECL_FUNCTION_CODE (callee)); |
2430 | break; | |
2431 | ||
2432 | case BUILT_IN_STRCPY_CHK: | |
2433 | case BUILT_IN_STPCPY_CHK: | |
2434 | if (val[1] && is_gimple_val (val[1])) | |
c2f47e15 | 2435 | result = fold_builtin_stxcpy_chk (callee, |
2436 | CALL_EXPR_ARG (fn, 0), | |
2437 | CALL_EXPR_ARG (fn, 1), | |
2438 | CALL_EXPR_ARG (fn, 2), | |
2439 | val[1], ignore, | |
0a39fd54 | 2440 | DECL_FUNCTION_CODE (callee)); |
2441 | break; | |
2442 | ||
2443 | case BUILT_IN_STRNCPY_CHK: | |
2444 | if (val[2] && is_gimple_val (val[2])) | |
c2f47e15 | 2445 | result = fold_builtin_strncpy_chk (CALL_EXPR_ARG (fn, 0), |
2446 | CALL_EXPR_ARG (fn, 1), | |
2447 | CALL_EXPR_ARG (fn, 2), | |
2448 | CALL_EXPR_ARG (fn, 3), | |
2449 | val[2]); | |
0a39fd54 | 2450 | break; |
2451 | ||
2452 | case BUILT_IN_SNPRINTF_CHK: | |
2453 | case BUILT_IN_VSNPRINTF_CHK: | |
2454 | if (val[1] && is_gimple_val (val[1])) | |
c2f47e15 | 2455 | result = fold_builtin_snprintf_chk (fn, val[1], |
0a39fd54 | 2456 | DECL_FUNCTION_CODE (callee)); |
f0613857 | 2457 | break; |
4ee9c684 | 2458 | |
2459 | default: | |
8c0963c4 | 2460 | gcc_unreachable (); |
4ee9c684 | 2461 | } |
2462 | ||
f0613857 | 2463 | if (result && ignore) |
db97ad41 | 2464 | result = fold_ignored_result (result); |
f0613857 | 2465 | return result; |
4ee9c684 | 2466 | } |
2467 | ||
2468 | ||
5206b159 | 2469 | /* Fold the statement pointed to by STMT_P. In some cases, this function may |
41511585 | 2470 | replace the whole statement with a new one. Returns true iff folding |
2471 | makes any changes. */ | |
4ee9c684 | 2472 | |
41511585 | 2473 | bool |
2474 | fold_stmt (tree *stmt_p) | |
4ee9c684 | 2475 | { |
41511585 | 2476 | tree rhs, result, stmt; |
0d759d9c | 2477 | struct fold_stmt_r_data fold_stmt_r_data; |
41511585 | 2478 | bool changed = false; |
0d759d9c | 2479 | bool inside_addr_expr = false; |
2480 | ||
add6ee5e | 2481 | stmt = *stmt_p; |
2482 | ||
2483 | fold_stmt_r_data.stmt = stmt; | |
0d759d9c | 2484 | fold_stmt_r_data.changed_p = &changed; |
2485 | fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr; | |
4ee9c684 | 2486 | |
41511585 | 2487 | /* If we replaced constants and the statement makes pointer dereferences, |
2488 | then we may need to fold instances of *&VAR into VAR, etc. */ | |
0d759d9c | 2489 | if (walk_tree (stmt_p, fold_stmt_r, &fold_stmt_r_data, NULL)) |
41511585 | 2490 | { |
c2f47e15 | 2491 | *stmt_p = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0); |
4ee9c684 | 2492 | return true; |
2493 | } | |
2494 | ||
41511585 | 2495 | rhs = get_rhs (stmt); |
2496 | if (!rhs) | |
2497 | return changed; | |
2498 | result = NULL_TREE; | |
4ee9c684 | 2499 | |
41511585 | 2500 | if (TREE_CODE (rhs) == CALL_EXPR) |
4ee9c684 | 2501 | { |
41511585 | 2502 | tree callee; |
4ee9c684 | 2503 | |
41511585 | 2504 | /* Check for builtins that CCP can handle using information not |
2505 | available in the generic fold routines. */ | |
2506 | callee = get_callee_fndecl (rhs); | |
2507 | if (callee && DECL_BUILT_IN (callee)) | |
2508 | result = ccp_fold_builtin (stmt, rhs); | |
e77b8618 | 2509 | else |
2510 | { | |
2511 | /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve | |
2512 | here are when we've propagated the address of a decl into the | |
2513 | object slot. */ | |
2514 | /* ??? Should perhaps do this in fold proper. However, doing it | |
2515 | there requires that we create a new CALL_EXPR, and that requires | |
2516 | copying EH region info to the new node. Easier to just do it | |
2517 | here where we can just smash the call operand. Also | |
2518 | CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and | |
c2f47e15 | 2519 | copied, fold_call_expr does not have not information. */ |
2520 | callee = CALL_EXPR_FN (rhs); | |
e77b8618 | 2521 | if (TREE_CODE (callee) == OBJ_TYPE_REF |
2522 | && lang_hooks.fold_obj_type_ref | |
2523 | && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR | |
2524 | && DECL_P (TREE_OPERAND | |
2525 | (OBJ_TYPE_REF_OBJECT (callee), 0))) | |
2526 | { | |
2527 | tree t; | |
2528 | ||
2529 | /* ??? Caution: Broken ADDR_EXPR semantics means that | |
2530 | looking at the type of the operand of the addr_expr | |
2531 | can yield an array type. See silly exception in | |
2532 | check_pointer_types_r. */ | |
2533 | ||
2534 | t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee))); | |
2535 | t = lang_hooks.fold_obj_type_ref (callee, t); | |
2536 | if (t) | |
2537 | { | |
c2f47e15 | 2538 | CALL_EXPR_FN (rhs) = t; |
e77b8618 | 2539 | changed = true; |
2540 | } | |
2541 | } | |
2542 | } | |
4ee9c684 | 2543 | } |
ec0fa513 | 2544 | else if (TREE_CODE (rhs) == COND_EXPR) |
2545 | { | |
2546 | tree temp = fold (COND_EXPR_COND (rhs)); | |
2547 | if (temp != COND_EXPR_COND (rhs)) | |
2548 | result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp, | |
2549 | COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs)); | |
2550 | } | |
4ee9c684 | 2551 | |
41511585 | 2552 | /* If we couldn't fold the RHS, hand over to the generic fold routines. */ |
2553 | if (result == NULL_TREE) | |
2554 | result = fold (rhs); | |
4ee9c684 | 2555 | |
41511585 | 2556 | /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that |
2557 | may have been added by fold, and "useless" type conversions that might | |
2558 | now be apparent due to propagation. */ | |
2559 | STRIP_USELESS_TYPE_CONVERSION (result); | |
2560 | ||
2561 | if (result != rhs) | |
2562 | changed |= set_rhs (stmt_p, result); | |
2563 | ||
2564 | return changed; | |
4ee9c684 | 2565 | } |
2566 | ||
8171a1dd | 2567 | /* Perform the minimal folding on statement STMT. Only operations like |
2568 | *&x created by constant propagation are handled. The statement cannot | |
2569 | be replaced with a new one. */ | |
2570 | ||
2571 | bool | |
2572 | fold_stmt_inplace (tree stmt) | |
2573 | { | |
2574 | tree old_stmt = stmt, rhs, new_rhs; | |
0d759d9c | 2575 | struct fold_stmt_r_data fold_stmt_r_data; |
8171a1dd | 2576 | bool changed = false; |
0d759d9c | 2577 | bool inside_addr_expr = false; |
2578 | ||
add6ee5e | 2579 | fold_stmt_r_data.stmt = stmt; |
0d759d9c | 2580 | fold_stmt_r_data.changed_p = &changed; |
2581 | fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr; | |
8171a1dd | 2582 | |
0d759d9c | 2583 | walk_tree (&stmt, fold_stmt_r, &fold_stmt_r_data, NULL); |
8171a1dd | 2584 | gcc_assert (stmt == old_stmt); |
2585 | ||
2586 | rhs = get_rhs (stmt); | |
2587 | if (!rhs || rhs == stmt) | |
2588 | return changed; | |
2589 | ||
2590 | new_rhs = fold (rhs); | |
ff09445d | 2591 | STRIP_USELESS_TYPE_CONVERSION (new_rhs); |
8171a1dd | 2592 | if (new_rhs == rhs) |
2593 | return changed; | |
2594 | ||
2595 | changed |= set_rhs (&stmt, new_rhs); | |
2596 | gcc_assert (stmt == old_stmt); | |
2597 | ||
2598 | return changed; | |
2599 | } | |
4ee9c684 | 2600 | \f |
909e5ecb | 2601 | /* Convert EXPR into a GIMPLE value suitable for substitution on the |
2602 | RHS of an assignment. Insert the necessary statements before | |
a280136a | 2603 | iterator *SI_P. |
2604 | When IGNORE is set, don't worry about the return value. */ | |
909e5ecb | 2605 | |
2606 | static tree | |
a280136a | 2607 | convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr, bool ignore) |
909e5ecb | 2608 | { |
2609 | tree_stmt_iterator ti; | |
2610 | tree stmt = bsi_stmt (*si_p); | |
2611 | tree tmp, stmts = NULL; | |
2612 | ||
2613 | push_gimplify_context (); | |
a280136a | 2614 | if (ignore) |
2615 | { | |
2616 | tmp = build_empty_stmt (); | |
2617 | gimplify_and_add (expr, &stmts); | |
2618 | } | |
2619 | else | |
2620 | tmp = get_initialized_tmp_var (expr, &stmts, NULL); | |
909e5ecb | 2621 | pop_gimplify_context (NULL); |
2622 | ||
b66731e8 | 2623 | if (EXPR_HAS_LOCATION (stmt)) |
2624 | annotate_all_with_locus (&stmts, EXPR_LOCATION (stmt)); | |
2625 | ||
909e5ecb | 2626 | /* The replacement can expose previously unreferenced variables. */ |
2627 | for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti)) | |
2628 | { | |
b66731e8 | 2629 | tree new_stmt = tsi_stmt (ti); |
909e5ecb | 2630 | find_new_referenced_vars (tsi_stmt_ptr (ti)); |
b66731e8 | 2631 | bsi_insert_before (si_p, new_stmt, BSI_NEW_STMT); |
de6ed584 | 2632 | mark_symbols_for_renaming (new_stmt); |
b66731e8 | 2633 | bsi_next (si_p); |
909e5ecb | 2634 | } |
2635 | ||
909e5ecb | 2636 | return tmp; |
2637 | } | |
2638 | ||
2639 | ||
4ee9c684 | 2640 | /* A simple pass that attempts to fold all builtin functions. This pass |
2641 | is run after we've propagated as many constants as we can. */ | |
2642 | ||
2a1990e9 | 2643 | static unsigned int |
4ee9c684 | 2644 | execute_fold_all_builtins (void) |
2645 | { | |
b36237eb | 2646 | bool cfg_changed = false; |
4ee9c684 | 2647 | basic_block bb; |
b1b7c0c4 | 2648 | unsigned int todoflags = 0; |
2649 | ||
4ee9c684 | 2650 | FOR_EACH_BB (bb) |
2651 | { | |
2652 | block_stmt_iterator i; | |
0a39fd54 | 2653 | for (i = bsi_start (bb); !bsi_end_p (i); ) |
4ee9c684 | 2654 | { |
2655 | tree *stmtp = bsi_stmt_ptr (i); | |
4c27dd45 | 2656 | tree old_stmt = *stmtp; |
4ee9c684 | 2657 | tree call = get_rhs (*stmtp); |
2658 | tree callee, result; | |
0a39fd54 | 2659 | enum built_in_function fcode; |
4ee9c684 | 2660 | |
2661 | if (!call || TREE_CODE (call) != CALL_EXPR) | |
0a39fd54 | 2662 | { |
2663 | bsi_next (&i); | |
2664 | continue; | |
2665 | } | |
4ee9c684 | 2666 | callee = get_callee_fndecl (call); |
2667 | if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) | |
0a39fd54 | 2668 | { |
2669 | bsi_next (&i); | |
2670 | continue; | |
2671 | } | |
2672 | fcode = DECL_FUNCTION_CODE (callee); | |
4ee9c684 | 2673 | |
2674 | result = ccp_fold_builtin (*stmtp, call); | |
2675 | if (!result) | |
2676 | switch (DECL_FUNCTION_CODE (callee)) | |
2677 | { | |
2678 | case BUILT_IN_CONSTANT_P: | |
2679 | /* Resolve __builtin_constant_p. If it hasn't been | |
2680 | folded to integer_one_node by now, it's fairly | |
2681 | certain that the value simply isn't constant. */ | |
2682 | result = integer_zero_node; | |
2683 | break; | |
2684 | ||
2685 | default: | |
0a39fd54 | 2686 | bsi_next (&i); |
4ee9c684 | 2687 | continue; |
2688 | } | |
2689 | ||
2690 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2691 | { | |
2692 | fprintf (dump_file, "Simplified\n "); | |
2693 | print_generic_stmt (dump_file, *stmtp, dump_flags); | |
2694 | } | |
2695 | ||
de6ed584 | 2696 | push_stmt_changes (stmtp); |
2697 | ||
909e5ecb | 2698 | if (!set_rhs (stmtp, result)) |
2699 | { | |
a280136a | 2700 | result = convert_to_gimple_builtin (&i, result, |
2701 | TREE_CODE (old_stmt) | |
35cc02b5 | 2702 | != GIMPLE_MODIFY_STMT); |
876760f6 | 2703 | if (result) |
2704 | { | |
2705 | bool ok = set_rhs (stmtp, result); | |
876760f6 | 2706 | gcc_assert (ok); |
b1b7c0c4 | 2707 | todoflags |= TODO_rebuild_alias; |
876760f6 | 2708 | } |
909e5ecb | 2709 | } |
de6ed584 | 2710 | |
2711 | pop_stmt_changes (stmtp); | |
2712 | ||
4c27dd45 | 2713 | if (maybe_clean_or_replace_eh_stmt (old_stmt, *stmtp) |
b36237eb | 2714 | && tree_purge_dead_eh_edges (bb)) |
2715 | cfg_changed = true; | |
4ee9c684 | 2716 | |
2717 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2718 | { | |
2719 | fprintf (dump_file, "to\n "); | |
2720 | print_generic_stmt (dump_file, *stmtp, dump_flags); | |
2721 | fprintf (dump_file, "\n"); | |
2722 | } | |
0a39fd54 | 2723 | |
2724 | /* Retry the same statement if it changed into another | |
2725 | builtin, there might be new opportunities now. */ | |
2726 | call = get_rhs (*stmtp); | |
2727 | if (!call || TREE_CODE (call) != CALL_EXPR) | |
2728 | { | |
2729 | bsi_next (&i); | |
2730 | continue; | |
2731 | } | |
2732 | callee = get_callee_fndecl (call); | |
2733 | if (!callee | |
2734 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL | |
2735 | || DECL_FUNCTION_CODE (callee) == fcode) | |
2736 | bsi_next (&i); | |
4ee9c684 | 2737 | } |
2738 | } | |
b1b7c0c4 | 2739 | |
b36237eb | 2740 | /* Delete unreachable blocks. */ |
b1b7c0c4 | 2741 | if (cfg_changed) |
2742 | todoflags |= TODO_cleanup_cfg; | |
2743 | ||
2744 | return todoflags; | |
4ee9c684 | 2745 | } |
2746 | ||
41511585 | 2747 | |
4ee9c684 | 2748 | struct tree_opt_pass pass_fold_builtins = |
2749 | { | |
2750 | "fab", /* name */ | |
2751 | NULL, /* gate */ | |
2752 | execute_fold_all_builtins, /* execute */ | |
2753 | NULL, /* sub */ | |
2754 | NULL, /* next */ | |
2755 | 0, /* static_pass_number */ | |
2756 | 0, /* tv_id */ | |
49290934 | 2757 | PROP_cfg | PROP_ssa, /* properties_required */ |
4ee9c684 | 2758 | 0, /* properties_provided */ |
2759 | 0, /* properties_destroyed */ | |
2760 | 0, /* todo_flags_start */ | |
909e5ecb | 2761 | TODO_dump_func |
2762 | | TODO_verify_ssa | |
88dbf20f | 2763 | | TODO_update_ssa, /* todo_flags_finish */ |
0f9005dd | 2764 | 0 /* letter */ |
4ee9c684 | 2765 | }; |