]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* Conditional constant propagation pass for the GNU compiler. |
1cdaa211 RG |
2 | Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, |
3 | 2010 Free Software Foundation, Inc. | |
6de9cd9a DN |
4 | Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org> |
5 | Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com> | |
6 | ||
7 | This file is part of GCC. | |
b8698a0f | 8 | |
6de9cd9a DN |
9 | GCC is free software; you can redistribute it and/or modify it |
10 | under the terms of the GNU General Public License as published by the | |
9dcd6f09 | 11 | Free Software Foundation; either version 3, or (at your option) any |
6de9cd9a | 12 | later version. |
b8698a0f | 13 | |
6de9cd9a DN |
14 | GCC is distributed in the hope that it will be useful, but WITHOUT |
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
17 | for more details. | |
b8698a0f | 18 | |
6de9cd9a | 19 | You should have received a copy of the GNU General Public License |
9dcd6f09 NC |
20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a | 22 | |
0bca51f0 DN |
23 | /* Conditional constant propagation (CCP) is based on the SSA |
24 | propagation engine (tree-ssa-propagate.c). Constant assignments of | |
25 | the form VAR = CST are propagated from the assignments into uses of | |
26 | VAR, which in turn may generate new constants. The simulation uses | |
27 | a four level lattice to keep track of constant values associated | |
28 | with SSA names. Given an SSA name V_i, it may take one of the | |
29 | following values: | |
30 | ||
106dec71 ZD |
31 | UNINITIALIZED -> the initial state of the value. This value |
32 | is replaced with a correct initial value | |
33 | the first time the value is used, so the | |
34 | rest of the pass does not need to care about | |
35 | it. Using this value simplifies initialization | |
36 | of the pass, and prevents us from needlessly | |
37 | scanning statements that are never reached. | |
0bca51f0 DN |
38 | |
39 | UNDEFINED -> V_i is a local variable whose definition | |
40 | has not been processed yet. Therefore we | |
41 | don't yet know if its value is a constant | |
42 | or not. | |
43 | ||
44 | CONSTANT -> V_i has been found to hold a constant | |
45 | value C. | |
46 | ||
47 | VARYING -> V_i cannot take a constant value, or if it | |
48 | does, it is not possible to determine it | |
49 | at compile time. | |
50 | ||
51 | The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node: | |
52 | ||
53 | 1- In ccp_visit_stmt, we are interested in assignments whose RHS | |
54 | evaluates into a constant and conditional jumps whose predicate | |
55 | evaluates into a boolean true or false. When an assignment of | |
56 | the form V_i = CONST is found, V_i's lattice value is set to | |
57 | CONSTANT and CONST is associated with it. This causes the | |
58 | propagation engine to add all the SSA edges coming out the | |
59 | assignment into the worklists, so that statements that use V_i | |
60 | can be visited. | |
61 | ||
62 | If the statement is a conditional with a constant predicate, we | |
63 | mark the outgoing edges as executable or not executable | |
64 | depending on the predicate's value. This is then used when | |
65 | visiting PHI nodes to know when a PHI argument can be ignored. | |
b8698a0f | 66 | |
0bca51f0 DN |
67 | |
68 | 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the | |
69 | same constant C, then the LHS of the PHI is set to C. This | |
70 | evaluation is known as the "meet operation". Since one of the | |
71 | goals of this evaluation is to optimistically return constant | |
72 | values as often as possible, it uses two main short cuts: | |
73 | ||
74 | - If an argument is flowing in through a non-executable edge, it | |
75 | is ignored. This is useful in cases like this: | |
76 | ||
77 | if (PRED) | |
78 | a_9 = 3; | |
79 | else | |
80 | a_10 = 100; | |
81 | a_11 = PHI (a_9, a_10) | |
82 | ||
83 | If PRED is known to always evaluate to false, then we can | |
84 | assume that a_11 will always take its value from a_10, meaning | |
85 | that instead of consider it VARYING (a_9 and a_10 have | |
86 | different values), we can consider it CONSTANT 100. | |
87 | ||
88 | - If an argument has an UNDEFINED value, then it does not affect | |
89 | the outcome of the meet operation. If a variable V_i has an | |
90 | UNDEFINED value, it means that either its defining statement | |
91 | hasn't been visited yet or V_i has no defining statement, in | |
92 | which case the original symbol 'V' is being used | |
93 | uninitialized. Since 'V' is a local variable, the compiler | |
94 | may assume any initial value for it. | |
95 | ||
96 | ||
97 | After propagation, every variable V_i that ends up with a lattice | |
98 | value of CONSTANT will have the associated constant value in the | |
99 | array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for | |
100 | final substitution and folding. | |
101 | ||
6de9cd9a DN |
102 | References: |
103 | ||
104 | Constant propagation with conditional branches, | |
105 | Wegman and Zadeck, ACM TOPLAS 13(2):181-210. | |
106 | ||
107 | Building an Optimizing Compiler, | |
108 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
109 | ||
110 | Advanced Compiler Design and Implementation, | |
111 | Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */ | |
112 | ||
113 | #include "config.h" | |
114 | #include "system.h" | |
115 | #include "coretypes.h" | |
116 | #include "tm.h" | |
6de9cd9a | 117 | #include "tree.h" |
750628d8 | 118 | #include "flags.h" |
6de9cd9a | 119 | #include "tm_p.h" |
6de9cd9a | 120 | #include "basic-block.h" |
750628d8 | 121 | #include "output.h" |
750628d8 | 122 | #include "function.h" |
cf835838 JM |
123 | #include "tree-pretty-print.h" |
124 | #include "gimple-pretty-print.h" | |
750628d8 | 125 | #include "timevar.h" |
6de9cd9a | 126 | #include "tree-dump.h" |
750628d8 | 127 | #include "tree-flow.h" |
6de9cd9a | 128 | #include "tree-pass.h" |
750628d8 | 129 | #include "tree-ssa-propagate.h" |
53a8f709 | 130 | #include "value-prof.h" |
750628d8 | 131 | #include "langhooks.h" |
ae3df618 | 132 | #include "target.h" |
718f9c0f | 133 | #include "diagnostic-core.h" |
6ac01510 | 134 | #include "toplev.h" |
74fe548b | 135 | #include "dbgcnt.h" |
6de9cd9a DN |
136 | |
137 | ||
138 | /* Possible lattice values. */ | |
139 | typedef enum | |
140 | { | |
106dec71 | 141 | UNINITIALIZED, |
6de9cd9a DN |
142 | UNDEFINED, |
143 | CONSTANT, | |
144 | VARYING | |
0bca51f0 | 145 | } ccp_lattice_t; |
6de9cd9a | 146 | |
455e6d5b RG |
147 | struct prop_value_d { |
148 | /* Lattice value. */ | |
149 | ccp_lattice_t lattice_val; | |
150 | ||
151 | /* Propagated value. */ | |
152 | tree value; | |
0b4b14ac RG |
153 | |
154 | /* Mask that applies to the propagated value during CCP. For | |
155 | X with a CONSTANT lattice value X & ~mask == value & ~mask. */ | |
156 | double_int mask; | |
455e6d5b RG |
157 | }; |
158 | ||
159 | typedef struct prop_value_d prop_value_t; | |
160 | ||
0bca51f0 DN |
161 | /* Array of propagated constant values. After propagation, |
162 | CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If | |
163 | the constant is held in an SSA name representing a memory store | |
38635499 DN |
164 | (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual |
165 | memory reference used to store (i.e., the LHS of the assignment | |
166 | doing the store). */ | |
404f4351 | 167 | static prop_value_t *const_val; |
6de9cd9a | 168 | |
25e20805 | 169 | static void canonicalize_float_value (prop_value_t *); |
f61e18ec | 170 | static bool ccp_fold_stmt (gimple_stmt_iterator *); |
25e20805 | 171 | |
0bca51f0 | 172 | /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */ |
95eec0d6 DB |
173 | |
174 | static void | |
0bca51f0 | 175 | dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val) |
95eec0d6 | 176 | { |
750628d8 | 177 | switch (val.lattice_val) |
95eec0d6 | 178 | { |
0bca51f0 DN |
179 | case UNINITIALIZED: |
180 | fprintf (outf, "%sUNINITIALIZED", prefix); | |
181 | break; | |
750628d8 DN |
182 | case UNDEFINED: |
183 | fprintf (outf, "%sUNDEFINED", prefix); | |
184 | break; | |
185 | case VARYING: | |
186 | fprintf (outf, "%sVARYING", prefix); | |
187 | break; | |
750628d8 DN |
188 | case CONSTANT: |
189 | fprintf (outf, "%sCONSTANT ", prefix); | |
0b4b14ac RG |
190 | if (TREE_CODE (val.value) != INTEGER_CST |
191 | || double_int_zero_p (val.mask)) | |
192 | print_generic_expr (outf, val.value, dump_flags); | |
193 | else | |
194 | { | |
195 | double_int cval = double_int_and_not (tree_to_double_int (val.value), | |
196 | val.mask); | |
197 | fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX, | |
198 | prefix, cval.high, cval.low); | |
199 | fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")", | |
200 | val.mask.high, val.mask.low); | |
201 | } | |
750628d8 DN |
202 | break; |
203 | default: | |
1e128c5f | 204 | gcc_unreachable (); |
750628d8 | 205 | } |
95eec0d6 | 206 | } |
6de9cd9a | 207 | |
6de9cd9a | 208 | |
0bca51f0 DN |
209 | /* Print lattice value VAL to stderr. */ |
210 | ||
211 | void debug_lattice_value (prop_value_t val); | |
212 | ||
24e47c76 | 213 | DEBUG_FUNCTION void |
0bca51f0 DN |
214 | debug_lattice_value (prop_value_t val) |
215 | { | |
216 | dump_lattice_value (stderr, "", val); | |
217 | fprintf (stderr, "\n"); | |
218 | } | |
6de9cd9a | 219 | |
6de9cd9a | 220 | |
0bca51f0 DN |
221 | /* Compute a default value for variable VAR and store it in the |
222 | CONST_VAL array. The following rules are used to get default | |
223 | values: | |
95eec0d6 | 224 | |
0bca51f0 DN |
225 | 1- Global and static variables that are declared constant are |
226 | considered CONSTANT. | |
227 | ||
228 | 2- Any other value is considered UNDEFINED. This is useful when | |
750628d8 DN |
229 | considering PHI nodes. PHI arguments that are undefined do not |
230 | change the constant value of the PHI node, which allows for more | |
0bca51f0 | 231 | constants to be propagated. |
6de9cd9a | 232 | |
caf55296 | 233 | 3- Variables defined by statements other than assignments and PHI |
0bca51f0 | 234 | nodes are considered VARYING. |
6de9cd9a | 235 | |
caf55296 | 236 | 4- Initial values of variables that are not GIMPLE registers are |
106dec71 | 237 | considered VARYING. */ |
6de9cd9a | 238 | |
0bca51f0 DN |
239 | static prop_value_t |
240 | get_default_value (tree var) | |
241 | { | |
242 | tree sym = SSA_NAME_VAR (var); | |
0b4b14ac | 243 | prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } }; |
e8114fba RG |
244 | gimple stmt; |
245 | ||
246 | stmt = SSA_NAME_DEF_STMT (var); | |
247 | ||
248 | if (gimple_nop_p (stmt)) | |
6de9cd9a | 249 | { |
e8114fba RG |
250 | /* Variables defined by an empty statement are those used |
251 | before being initialized. If VAR is a local variable, we | |
252 | can assume initially that it is UNDEFINED, otherwise we must | |
253 | consider it VARYING. */ | |
6938f93f JH |
254 | if (is_gimple_reg (sym) |
255 | && TREE_CODE (sym) == VAR_DECL) | |
e8114fba RG |
256 | val.lattice_val = UNDEFINED; |
257 | else | |
0b4b14ac RG |
258 | { |
259 | val.lattice_val = VARYING; | |
260 | val.mask = double_int_minus_one; | |
261 | } | |
6de9cd9a | 262 | } |
e8114fba RG |
263 | else if (is_gimple_assign (stmt) |
264 | /* Value-returning GIMPLE_CALL statements assign to | |
265 | a variable, and are treated similarly to GIMPLE_ASSIGN. */ | |
266 | || (is_gimple_call (stmt) | |
267 | && gimple_call_lhs (stmt) != NULL_TREE) | |
268 | || gimple_code (stmt) == GIMPLE_PHI) | |
750628d8 | 269 | { |
e8114fba RG |
270 | tree cst; |
271 | if (gimple_assign_single_p (stmt) | |
272 | && DECL_P (gimple_assign_rhs1 (stmt)) | |
273 | && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt)))) | |
0bca51f0 | 274 | { |
e8114fba RG |
275 | val.lattice_val = CONSTANT; |
276 | val.value = cst; | |
0bca51f0 DN |
277 | } |
278 | else | |
e8114fba RG |
279 | /* Any other variable defined by an assignment or a PHI node |
280 | is considered UNDEFINED. */ | |
281 | val.lattice_val = UNDEFINED; | |
282 | } | |
283 | else | |
284 | { | |
285 | /* Otherwise, VAR will never take on a constant value. */ | |
286 | val.lattice_val = VARYING; | |
0b4b14ac | 287 | val.mask = double_int_minus_one; |
750628d8 | 288 | } |
6de9cd9a | 289 | |
750628d8 DN |
290 | return val; |
291 | } | |
6de9cd9a | 292 | |
6de9cd9a | 293 | |
106dec71 | 294 | /* Get the constant value associated with variable VAR. */ |
6de9cd9a | 295 | |
106dec71 ZD |
296 | static inline prop_value_t * |
297 | get_value (tree var) | |
0bca51f0 | 298 | { |
ed97ddc6 | 299 | prop_value_t *val; |
106dec71 | 300 | |
ed97ddc6 RG |
301 | if (const_val == NULL) |
302 | return NULL; | |
303 | ||
304 | val = &const_val[SSA_NAME_VERSION (var)]; | |
106dec71 | 305 | if (val->lattice_val == UNINITIALIZED) |
6de9cd9a DN |
306 | *val = get_default_value (var); |
307 | ||
25e20805 RG |
308 | canonicalize_float_value (val); |
309 | ||
6de9cd9a DN |
310 | return val; |
311 | } | |
312 | ||
84d77ca6 RG |
313 | /* Return the constant tree value associated with VAR. */ |
314 | ||
315 | static inline tree | |
316 | get_constant_value (tree var) | |
317 | { | |
318 | prop_value_t *val = get_value (var); | |
0b4b14ac RG |
319 | if (val |
320 | && val->lattice_val == CONSTANT | |
321 | && (TREE_CODE (val->value) != INTEGER_CST | |
322 | || double_int_zero_p (val->mask))) | |
84d77ca6 RG |
323 | return val->value; |
324 | return NULL_TREE; | |
325 | } | |
326 | ||
106dec71 ZD |
327 | /* Sets the value associated with VAR to VARYING. */ |
328 | ||
329 | static inline void | |
330 | set_value_varying (tree var) | |
331 | { | |
332 | prop_value_t *val = &const_val[SSA_NAME_VERSION (var)]; | |
333 | ||
334 | val->lattice_val = VARYING; | |
335 | val->value = NULL_TREE; | |
0b4b14ac | 336 | val->mask = double_int_minus_one; |
106dec71 | 337 | } |
6de9cd9a | 338 | |
fbb5445b L |
339 | /* For float types, modify the value of VAL to make ccp work correctly |
340 | for non-standard values (-0, NaN): | |
341 | ||
342 | If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0. | |
343 | If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED. | |
344 | This is to fix the following problem (see PR 29921): Suppose we have | |
345 | ||
346 | x = 0.0 * y | |
347 | ||
348 | and we set value of y to NaN. This causes value of x to be set to NaN. | |
349 | When we later determine that y is in fact VARYING, fold uses the fact | |
350 | that HONOR_NANS is false, and we try to change the value of x to 0, | |
351 | causing an ICE. With HONOR_NANS being false, the real appearance of | |
352 | NaN would cause undefined behavior, though, so claiming that y (and x) | |
353 | are UNDEFINED initially is correct. */ | |
354 | ||
355 | static void | |
356 | canonicalize_float_value (prop_value_t *val) | |
357 | { | |
358 | enum machine_mode mode; | |
359 | tree type; | |
360 | REAL_VALUE_TYPE d; | |
361 | ||
362 | if (val->lattice_val != CONSTANT | |
363 | || TREE_CODE (val->value) != REAL_CST) | |
364 | return; | |
365 | ||
366 | d = TREE_REAL_CST (val->value); | |
367 | type = TREE_TYPE (val->value); | |
368 | mode = TYPE_MODE (type); | |
369 | ||
370 | if (!HONOR_SIGNED_ZEROS (mode) | |
371 | && REAL_VALUE_MINUS_ZERO (d)) | |
372 | { | |
373 | val->value = build_real (type, dconst0); | |
374 | return; | |
375 | } | |
376 | ||
377 | if (!HONOR_NANS (mode) | |
378 | && REAL_VALUE_ISNAN (d)) | |
379 | { | |
380 | val->lattice_val = UNDEFINED; | |
381 | val->value = NULL; | |
fbb5445b L |
382 | return; |
383 | } | |
384 | } | |
385 | ||
0b4b14ac RG |
386 | /* Return whether the lattice transition is valid. */ |
387 | ||
388 | static bool | |
389 | valid_lattice_transition (prop_value_t old_val, prop_value_t new_val) | |
390 | { | |
391 | /* Lattice transitions must always be monotonically increasing in | |
392 | value. */ | |
393 | if (old_val.lattice_val < new_val.lattice_val) | |
394 | return true; | |
395 | ||
396 | if (old_val.lattice_val != new_val.lattice_val) | |
397 | return false; | |
398 | ||
399 | if (!old_val.value && !new_val.value) | |
400 | return true; | |
401 | ||
402 | /* Now both lattice values are CONSTANT. */ | |
403 | ||
404 | /* Allow transitioning from &x to &x & ~3. */ | |
405 | if (TREE_CODE (old_val.value) != INTEGER_CST | |
406 | && TREE_CODE (new_val.value) == INTEGER_CST) | |
407 | return true; | |
408 | ||
409 | /* Bit-lattices have to agree in the still valid bits. */ | |
410 | if (TREE_CODE (old_val.value) == INTEGER_CST | |
411 | && TREE_CODE (new_val.value) == INTEGER_CST) | |
412 | return double_int_equal_p | |
413 | (double_int_and_not (tree_to_double_int (old_val.value), | |
414 | new_val.mask), | |
415 | double_int_and_not (tree_to_double_int (new_val.value), | |
416 | new_val.mask)); | |
417 | ||
418 | /* Otherwise constant values have to agree. */ | |
419 | return operand_equal_p (old_val.value, new_val.value, 0); | |
420 | } | |
421 | ||
0bca51f0 DN |
422 | /* Set the value for variable VAR to NEW_VAL. Return true if the new |
423 | value is different from VAR's previous value. */ | |
6de9cd9a | 424 | |
750628d8 | 425 | static bool |
0bca51f0 | 426 | set_lattice_value (tree var, prop_value_t new_val) |
6de9cd9a | 427 | { |
7a95d078 RG |
428 | /* We can deal with old UNINITIALIZED values just fine here. */ |
429 | prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)]; | |
0bca51f0 | 430 | |
fbb5445b L |
431 | canonicalize_float_value (&new_val); |
432 | ||
0b4b14ac RG |
433 | /* We have to be careful to not go up the bitwise lattice |
434 | represented by the mask. | |
435 | ??? This doesn't seem to be the best place to enforce this. */ | |
436 | if (new_val.lattice_val == CONSTANT | |
437 | && old_val->lattice_val == CONSTANT | |
438 | && TREE_CODE (new_val.value) == INTEGER_CST | |
439 | && TREE_CODE (old_val->value) == INTEGER_CST) | |
440 | { | |
441 | double_int diff; | |
442 | diff = double_int_xor (tree_to_double_int (new_val.value), | |
443 | tree_to_double_int (old_val->value)); | |
444 | new_val.mask = double_int_ior (new_val.mask, | |
445 | double_int_ior (old_val->mask, diff)); | |
446 | } | |
106dec71 | 447 | |
0b4b14ac | 448 | gcc_assert (valid_lattice_transition (*old_val, new_val)); |
0bca51f0 | 449 | |
0b4b14ac RG |
450 | /* If *OLD_VAL and NEW_VAL are the same, return false to inform the |
451 | caller that this was a non-transition. */ | |
452 | if (old_val->lattice_val != new_val.lattice_val | |
453 | || (new_val.lattice_val == CONSTANT | |
454 | && TREE_CODE (new_val.value) == INTEGER_CST | |
455 | && (TREE_CODE (old_val->value) != INTEGER_CST | |
456 | || !double_int_equal_p (new_val.mask, old_val->mask)))) | |
6de9cd9a | 457 | { |
0b4b14ac RG |
458 | /* ??? We would like to delay creation of INTEGER_CSTs from |
459 | partially constants here. */ | |
460 | ||
750628d8 DN |
461 | if (dump_file && (dump_flags & TDF_DETAILS)) |
462 | { | |
0bca51f0 | 463 | dump_lattice_value (dump_file, "Lattice value changed to ", new_val); |
106dec71 | 464 | fprintf (dump_file, ". Adding SSA edges to worklist.\n"); |
750628d8 DN |
465 | } |
466 | ||
0bca51f0 DN |
467 | *old_val = new_val; |
468 | ||
7a95d078 | 469 | gcc_assert (new_val.lattice_val != UNINITIALIZED); |
106dec71 | 470 | return true; |
6de9cd9a | 471 | } |
750628d8 DN |
472 | |
473 | return false; | |
6de9cd9a DN |
474 | } |
475 | ||
0b4b14ac RG |
476 | static prop_value_t get_value_for_expr (tree, bool); |
477 | static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree); | |
478 | static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *, | |
479 | tree, double_int, double_int, | |
480 | tree, double_int, double_int); | |
481 | ||
482 | /* Return a double_int that can be used for bitwise simplifications | |
483 | from VAL. */ | |
484 | ||
485 | static double_int | |
486 | value_to_double_int (prop_value_t val) | |
487 | { | |
488 | if (val.value | |
489 | && TREE_CODE (val.value) == INTEGER_CST) | |
490 | return tree_to_double_int (val.value); | |
491 | else | |
492 | return double_int_zero; | |
493 | } | |
494 | ||
495 | /* Return the value for the address expression EXPR based on alignment | |
496 | information. */ | |
7a95d078 RG |
497 | |
498 | static prop_value_t | |
0b4b14ac RG |
499 | get_value_from_alignment (tree expr) |
500 | { | |
501 | prop_value_t val; | |
502 | HOST_WIDE_INT bitsize, bitpos; | |
503 | tree base, offset; | |
504 | enum machine_mode mode; | |
505 | int align; | |
506 | ||
507 | gcc_assert (TREE_CODE (expr) == ADDR_EXPR); | |
508 | ||
509 | base = get_inner_reference (TREE_OPERAND (expr, 0), | |
510 | &bitsize, &bitpos, &offset, | |
511 | &mode, &align, &align, false); | |
512 | if (TREE_CODE (base) == MISALIGNED_INDIRECT_REF) | |
513 | val = get_value_for_expr (TREE_OPERAND (base, 0), true); | |
514 | else if (TREE_CODE (base) == MEM_REF) | |
515 | val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr), | |
516 | TREE_OPERAND (base, 0), TREE_OPERAND (base, 1)); | |
517 | else if (base | |
e80c2726 | 518 | && ((align = get_object_alignment (base, BIGGEST_ALIGNMENT)) |
0b4b14ac RG |
519 | > BITS_PER_UNIT)) |
520 | { | |
521 | val.lattice_val = CONSTANT; | |
522 | /* We assume pointers are zero-extended. */ | |
523 | val.mask = double_int_and_not | |
524 | (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))), | |
525 | uhwi_to_double_int (align / BITS_PER_UNIT - 1)); | |
526 | val.value = build_int_cst (TREE_TYPE (expr), 0); | |
527 | } | |
528 | else | |
529 | { | |
530 | val.lattice_val = VARYING; | |
531 | val.mask = double_int_minus_one; | |
532 | val.value = NULL_TREE; | |
533 | } | |
534 | if (bitpos != 0) | |
535 | { | |
536 | double_int value, mask; | |
537 | bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask, | |
538 | TREE_TYPE (expr), value_to_double_int (val), val.mask, | |
539 | TREE_TYPE (expr), | |
540 | shwi_to_double_int (bitpos / BITS_PER_UNIT), | |
541 | double_int_zero); | |
542 | val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT; | |
543 | val.mask = mask; | |
544 | if (val.lattice_val == CONSTANT) | |
545 | val.value = double_int_to_tree (TREE_TYPE (expr), value); | |
546 | else | |
547 | val.value = NULL_TREE; | |
548 | } | |
549 | /* ??? We should handle i * 4 and more complex expressions from | |
550 | the offset, possibly by just expanding get_value_for_expr. */ | |
551 | if (offset != NULL_TREE) | |
552 | { | |
553 | double_int value, mask; | |
554 | prop_value_t oval = get_value_for_expr (offset, true); | |
555 | bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask, | |
556 | TREE_TYPE (expr), value_to_double_int (val), val.mask, | |
557 | TREE_TYPE (expr), value_to_double_int (oval), | |
558 | oval.mask); | |
559 | val.mask = mask; | |
560 | if (double_int_minus_one_p (mask)) | |
561 | { | |
562 | val.lattice_val = VARYING; | |
563 | val.value = NULL_TREE; | |
564 | } | |
565 | else | |
566 | { | |
567 | val.lattice_val = CONSTANT; | |
568 | val.value = double_int_to_tree (TREE_TYPE (expr), value); | |
569 | } | |
570 | } | |
571 | ||
572 | return val; | |
573 | } | |
574 | ||
575 | /* Return the value for the tree operand EXPR. If FOR_BITS_P is true | |
576 | return constant bits extracted from alignment information for | |
577 | invariant addresses. */ | |
578 | ||
579 | static prop_value_t | |
580 | get_value_for_expr (tree expr, bool for_bits_p) | |
7a95d078 RG |
581 | { |
582 | prop_value_t val; | |
583 | ||
584 | if (TREE_CODE (expr) == SSA_NAME) | |
0b4b14ac RG |
585 | { |
586 | val = *get_value (expr); | |
587 | if (for_bits_p | |
588 | && val.lattice_val == CONSTANT | |
589 | && TREE_CODE (val.value) == ADDR_EXPR) | |
590 | val = get_value_from_alignment (val.value); | |
591 | } | |
592 | else if (is_gimple_min_invariant (expr) | |
593 | && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR)) | |
7a95d078 RG |
594 | { |
595 | val.lattice_val = CONSTANT; | |
596 | val.value = expr; | |
0b4b14ac | 597 | val.mask = double_int_zero; |
7a95d078 RG |
598 | canonicalize_float_value (&val); |
599 | } | |
0b4b14ac RG |
600 | else if (TREE_CODE (expr) == ADDR_EXPR) |
601 | val = get_value_from_alignment (expr); | |
7a95d078 RG |
602 | else |
603 | { | |
604 | val.lattice_val = VARYING; | |
0b4b14ac | 605 | val.mask = double_int_minus_one; |
7a95d078 RG |
606 | val.value = NULL_TREE; |
607 | } | |
7a95d078 RG |
608 | return val; |
609 | } | |
610 | ||
0bca51f0 | 611 | /* Return the likely CCP lattice value for STMT. |
6de9cd9a | 612 | |
750628d8 | 613 | If STMT has no operands, then return CONSTANT. |
6de9cd9a | 614 | |
7f879c96 RG |
615 | Else if undefinedness of operands of STMT cause its value to be |
616 | undefined, then return UNDEFINED. | |
6de9cd9a | 617 | |
750628d8 | 618 | Else if any operands of STMT are constants, then return CONSTANT. |
6de9cd9a | 619 | |
750628d8 | 620 | Else return VARYING. */ |
6de9cd9a | 621 | |
0bca51f0 | 622 | static ccp_lattice_t |
726a989a | 623 | likely_value (gimple stmt) |
750628d8 | 624 | { |
7f879c96 | 625 | bool has_constant_operand, has_undefined_operand, all_undefined_operands; |
750628d8 DN |
626 | tree use; |
627 | ssa_op_iter iter; | |
e8114fba | 628 | unsigned i; |
6de9cd9a | 629 | |
e0c68ce9 | 630 | enum gimple_code code = gimple_code (stmt); |
726a989a RB |
631 | |
632 | /* This function appears to be called only for assignments, calls, | |
633 | conditionals, and switches, due to the logic in visit_stmt. */ | |
634 | gcc_assert (code == GIMPLE_ASSIGN | |
635 | || code == GIMPLE_CALL | |
636 | || code == GIMPLE_COND | |
637 | || code == GIMPLE_SWITCH); | |
0bca51f0 DN |
638 | |
639 | /* If the statement has volatile operands, it won't fold to a | |
640 | constant value. */ | |
726a989a | 641 | if (gimple_has_volatile_ops (stmt)) |
0bca51f0 DN |
642 | return VARYING; |
643 | ||
726a989a | 644 | /* Arrive here for more complex cases. */ |
106dec71 | 645 | has_constant_operand = false; |
7f879c96 RG |
646 | has_undefined_operand = false; |
647 | all_undefined_operands = true; | |
e8114fba | 648 | FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE) |
750628d8 | 649 | { |
106dec71 | 650 | prop_value_t *val = get_value (use); |
750628d8 | 651 | |
106dec71 | 652 | if (val->lattice_val == UNDEFINED) |
7f879c96 RG |
653 | has_undefined_operand = true; |
654 | else | |
655 | all_undefined_operands = false; | |
0bca51f0 | 656 | |
750628d8 | 657 | if (val->lattice_val == CONSTANT) |
106dec71 | 658 | has_constant_operand = true; |
6de9cd9a | 659 | } |
750628d8 | 660 | |
5006671f RG |
661 | /* There may be constants in regular rhs operands. For calls we |
662 | have to ignore lhs, fndecl and static chain, otherwise only | |
663 | the lhs. */ | |
664 | for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt); | |
e8114fba RG |
665 | i < gimple_num_ops (stmt); ++i) |
666 | { | |
667 | tree op = gimple_op (stmt, i); | |
668 | if (!op || TREE_CODE (op) == SSA_NAME) | |
669 | continue; | |
670 | if (is_gimple_min_invariant (op)) | |
671 | has_constant_operand = true; | |
672 | } | |
673 | ||
1cdaa211 RG |
674 | if (has_constant_operand) |
675 | all_undefined_operands = false; | |
676 | ||
7f879c96 RG |
677 | /* If the operation combines operands like COMPLEX_EXPR make sure to |
678 | not mark the result UNDEFINED if only one part of the result is | |
679 | undefined. */ | |
726a989a | 680 | if (has_undefined_operand && all_undefined_operands) |
7f879c96 | 681 | return UNDEFINED; |
726a989a | 682 | else if (code == GIMPLE_ASSIGN && has_undefined_operand) |
7f879c96 | 683 | { |
726a989a | 684 | switch (gimple_assign_rhs_code (stmt)) |
7f879c96 RG |
685 | { |
686 | /* Unary operators are handled with all_undefined_operands. */ | |
687 | case PLUS_EXPR: | |
688 | case MINUS_EXPR: | |
7f879c96 | 689 | case POINTER_PLUS_EXPR: |
7f879c96 RG |
690 | /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected. |
691 | Not bitwise operators, one VARYING operand may specify the | |
692 | result completely. Not logical operators for the same reason. | |
0cedb9e9 RG |
693 | Not COMPLEX_EXPR as one VARYING operand makes the result partly |
694 | not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because | |
695 | the undefined operand may be promoted. */ | |
7f879c96 RG |
696 | return UNDEFINED; |
697 | ||
698 | default: | |
699 | ; | |
700 | } | |
701 | } | |
702 | /* If there was an UNDEFINED operand but the result may be not UNDEFINED | |
703 | fall back to VARYING even if there were CONSTANT operands. */ | |
704 | if (has_undefined_operand) | |
705 | return VARYING; | |
706 | ||
e8114fba RG |
707 | /* We do not consider virtual operands here -- load from read-only |
708 | memory may have only VARYING virtual operands, but still be | |
709 | constant. */ | |
106dec71 | 710 | if (has_constant_operand |
e8114fba | 711 | || gimple_references_memory_p (stmt)) |
0bca51f0 DN |
712 | return CONSTANT; |
713 | ||
106dec71 | 714 | return VARYING; |
6de9cd9a DN |
715 | } |
716 | ||
106dec71 ZD |
717 | /* Returns true if STMT cannot be constant. */ |
718 | ||
719 | static bool | |
726a989a | 720 | surely_varying_stmt_p (gimple stmt) |
106dec71 ZD |
721 | { |
722 | /* If the statement has operands that we cannot handle, it cannot be | |
723 | constant. */ | |
726a989a | 724 | if (gimple_has_volatile_ops (stmt)) |
106dec71 ZD |
725 | return true; |
726 | ||
174ef36d RG |
727 | /* If it is a call and does not return a value or is not a |
728 | builtin and not an indirect call, it is varying. */ | |
726a989a | 729 | if (is_gimple_call (stmt)) |
174ef36d RG |
730 | { |
731 | tree fndecl; | |
732 | if (!gimple_call_lhs (stmt) | |
733 | || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE | |
99f536cc | 734 | && !DECL_BUILT_IN (fndecl))) |
174ef36d RG |
735 | return true; |
736 | } | |
106dec71 | 737 | |
e8114fba | 738 | /* Any other store operation is not interesting. */ |
5006671f | 739 | else if (gimple_vdef (stmt)) |
e8114fba RG |
740 | return true; |
741 | ||
106dec71 ZD |
742 | /* Anything other than assignments and conditional jumps are not |
743 | interesting for CCP. */ | |
726a989a | 744 | if (gimple_code (stmt) != GIMPLE_ASSIGN |
174ef36d RG |
745 | && gimple_code (stmt) != GIMPLE_COND |
746 | && gimple_code (stmt) != GIMPLE_SWITCH | |
747 | && gimple_code (stmt) != GIMPLE_CALL) | |
106dec71 ZD |
748 | return true; |
749 | ||
750 | return false; | |
751 | } | |
6de9cd9a | 752 | |
750628d8 | 753 | /* Initialize local data structures for CCP. */ |
6de9cd9a DN |
754 | |
755 | static void | |
750628d8 | 756 | ccp_initialize (void) |
6de9cd9a | 757 | { |
750628d8 | 758 | basic_block bb; |
6de9cd9a | 759 | |
b9eae1a9 | 760 | const_val = XCNEWVEC (prop_value_t, num_ssa_names); |
6de9cd9a | 761 | |
750628d8 DN |
762 | /* Initialize simulation flags for PHI nodes and statements. */ |
763 | FOR_EACH_BB (bb) | |
6de9cd9a | 764 | { |
726a989a | 765 | gimple_stmt_iterator i; |
6de9cd9a | 766 | |
726a989a | 767 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) |
750628d8 | 768 | { |
726a989a | 769 | gimple stmt = gsi_stmt (i); |
cd6ea7a2 RH |
770 | bool is_varying; |
771 | ||
772 | /* If the statement is a control insn, then we do not | |
773 | want to avoid simulating the statement once. Failure | |
774 | to do so means that those edges will never get added. */ | |
775 | if (stmt_ends_bb_p (stmt)) | |
776 | is_varying = false; | |
777 | else | |
778 | is_varying = surely_varying_stmt_p (stmt); | |
6de9cd9a | 779 | |
106dec71 | 780 | if (is_varying) |
750628d8 | 781 | { |
0bca51f0 DN |
782 | tree def; |
783 | ssa_op_iter iter; | |
784 | ||
785 | /* If the statement will not produce a constant, mark | |
786 | all its outputs VARYING. */ | |
787 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) | |
e8114fba | 788 | set_value_varying (def); |
750628d8 | 789 | } |
726a989a | 790 | prop_set_simulate_again (stmt, !is_varying); |
750628d8 | 791 | } |
6de9cd9a DN |
792 | } |
793 | ||
726a989a RB |
794 | /* Now process PHI nodes. We never clear the simulate_again flag on |
795 | phi nodes, since we do not know which edges are executable yet, | |
796 | except for phi nodes for virtual operands when we do not do store ccp. */ | |
750628d8 | 797 | FOR_EACH_BB (bb) |
6de9cd9a | 798 | { |
726a989a | 799 | gimple_stmt_iterator i; |
750628d8 | 800 | |
726a989a RB |
801 | for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i)) |
802 | { | |
803 | gimple phi = gsi_stmt (i); | |
804 | ||
dce2b2f6 | 805 | if (!is_gimple_reg (gimple_phi_result (phi))) |
726a989a | 806 | prop_set_simulate_again (phi, false); |
106dec71 | 807 | else |
726a989a | 808 | prop_set_simulate_again (phi, true); |
750628d8 | 809 | } |
6de9cd9a | 810 | } |
750628d8 | 811 | } |
6de9cd9a | 812 | |
74fe548b XDL |
813 | /* Debug count support. Reset the values of ssa names |
814 | VARYING when the total number ssa names analyzed is | |
815 | beyond the debug count specified. */ | |
816 | ||
817 | static void | |
818 | do_dbg_cnt (void) | |
819 | { | |
820 | unsigned i; | |
821 | for (i = 0; i < num_ssa_names; i++) | |
822 | { | |
823 | if (!dbg_cnt (ccp)) | |
824 | { | |
825 | const_val[i].lattice_val = VARYING; | |
0b4b14ac | 826 | const_val[i].mask = double_int_minus_one; |
74fe548b XDL |
827 | const_val[i].value = NULL_TREE; |
828 | } | |
829 | } | |
830 | } | |
831 | ||
6de9cd9a | 832 | |
0bca51f0 | 833 | /* Do final substitution of propagated values, cleanup the flowgraph and |
b8698a0f | 834 | free allocated storage. |
6de9cd9a | 835 | |
3253eafb JH |
836 | Return TRUE when something was optimized. */ |
837 | ||
838 | static bool | |
0bca51f0 | 839 | ccp_finalize (void) |
6de9cd9a | 840 | { |
74fe548b XDL |
841 | bool something_changed; |
842 | ||
843 | do_dbg_cnt (); | |
0bca51f0 | 844 | /* Perform substitutions based on the known constant values. */ |
455e6d5b RG |
845 | something_changed = substitute_and_fold (get_constant_value, |
846 | ccp_fold_stmt, true); | |
6de9cd9a | 847 | |
0bca51f0 | 848 | free (const_val); |
ed97ddc6 | 849 | const_val = NULL; |
3253eafb | 850 | return something_changed;; |
6de9cd9a DN |
851 | } |
852 | ||
853 | ||
0bca51f0 DN |
854 | /* Compute the meet operator between *VAL1 and *VAL2. Store the result |
855 | in VAL1. | |
856 | ||
857 | any M UNDEFINED = any | |
0bca51f0 DN |
858 | any M VARYING = VARYING |
859 | Ci M Cj = Ci if (i == j) | |
860 | Ci M Cj = VARYING if (i != j) | |
106dec71 | 861 | */ |
6de9cd9a DN |
862 | |
863 | static void | |
0bca51f0 | 864 | ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2) |
6de9cd9a | 865 | { |
0bca51f0 | 866 | if (val1->lattice_val == UNDEFINED) |
6de9cd9a | 867 | { |
0bca51f0 DN |
868 | /* UNDEFINED M any = any */ |
869 | *val1 = *val2; | |
750628d8 | 870 | } |
0bca51f0 | 871 | else if (val2->lattice_val == UNDEFINED) |
195da47b | 872 | { |
0bca51f0 DN |
873 | /* any M UNDEFINED = any |
874 | Nothing to do. VAL1 already contains the value we want. */ | |
875 | ; | |
195da47b | 876 | } |
0bca51f0 DN |
877 | else if (val1->lattice_val == VARYING |
878 | || val2->lattice_val == VARYING) | |
750628d8 | 879 | { |
0bca51f0 DN |
880 | /* any M VARYING = VARYING. */ |
881 | val1->lattice_val = VARYING; | |
0b4b14ac | 882 | val1->mask = double_int_minus_one; |
0bca51f0 | 883 | val1->value = NULL_TREE; |
750628d8 | 884 | } |
0b4b14ac RG |
885 | else if (val1->lattice_val == CONSTANT |
886 | && val2->lattice_val == CONSTANT | |
887 | && TREE_CODE (val1->value) == INTEGER_CST | |
888 | && TREE_CODE (val2->value) == INTEGER_CST) | |
889 | { | |
890 | /* Ci M Cj = Ci if (i == j) | |
891 | Ci M Cj = VARYING if (i != j) | |
892 | ||
893 | For INTEGER_CSTs mask unequal bits. If no equal bits remain, | |
894 | drop to varying. */ | |
895 | val1->mask | |
896 | = double_int_ior (double_int_ior (val1->mask, | |
897 | val2->mask), | |
898 | double_int_xor (tree_to_double_int (val1->value), | |
899 | tree_to_double_int (val2->value))); | |
900 | if (double_int_minus_one_p (val1->mask)) | |
901 | { | |
902 | val1->lattice_val = VARYING; | |
903 | val1->value = NULL_TREE; | |
904 | } | |
905 | } | |
0bca51f0 DN |
906 | else if (val1->lattice_val == CONSTANT |
907 | && val2->lattice_val == CONSTANT | |
dce2b2f6 | 908 | && simple_cst_equal (val1->value, val2->value) == 1) |
750628d8 | 909 | { |
0bca51f0 DN |
910 | /* Ci M Cj = Ci if (i == j) |
911 | Ci M Cj = VARYING if (i != j) | |
912 | ||
0b4b14ac RG |
913 | VAL1 already contains the value we want for equivalent values. */ |
914 | } | |
915 | else if (val1->lattice_val == CONSTANT | |
916 | && val2->lattice_val == CONSTANT | |
917 | && (TREE_CODE (val1->value) == ADDR_EXPR | |
918 | || TREE_CODE (val2->value) == ADDR_EXPR)) | |
919 | { | |
920 | /* When not equal addresses are involved try meeting for | |
921 | alignment. */ | |
922 | prop_value_t tem = *val2; | |
923 | if (TREE_CODE (val1->value) == ADDR_EXPR) | |
924 | *val1 = get_value_for_expr (val1->value, true); | |
925 | if (TREE_CODE (val2->value) == ADDR_EXPR) | |
926 | tem = get_value_for_expr (val2->value, true); | |
927 | ccp_lattice_meet (val1, &tem); | |
750628d8 DN |
928 | } |
929 | else | |
930 | { | |
0bca51f0 DN |
931 | /* Any other combination is VARYING. */ |
932 | val1->lattice_val = VARYING; | |
0b4b14ac | 933 | val1->mask = double_int_minus_one; |
0bca51f0 | 934 | val1->value = NULL_TREE; |
750628d8 | 935 | } |
6de9cd9a DN |
936 | } |
937 | ||
938 | ||
750628d8 DN |
939 | /* Loop through the PHI_NODE's parameters for BLOCK and compare their |
940 | lattice values to determine PHI_NODE's lattice value. The value of a | |
0bca51f0 | 941 | PHI node is determined calling ccp_lattice_meet with all the arguments |
750628d8 | 942 | of the PHI node that are incoming via executable edges. */ |
6de9cd9a | 943 | |
750628d8 | 944 | static enum ssa_prop_result |
726a989a | 945 | ccp_visit_phi_node (gimple phi) |
6de9cd9a | 946 | { |
726a989a | 947 | unsigned i; |
0bca51f0 | 948 | prop_value_t *old_val, new_val; |
6de9cd9a | 949 | |
750628d8 | 950 | if (dump_file && (dump_flags & TDF_DETAILS)) |
6de9cd9a | 951 | { |
750628d8 | 952 | fprintf (dump_file, "\nVisiting PHI node: "); |
726a989a | 953 | print_gimple_stmt (dump_file, phi, 0, dump_flags); |
6de9cd9a | 954 | } |
6de9cd9a | 955 | |
726a989a | 956 | old_val = get_value (gimple_phi_result (phi)); |
750628d8 DN |
957 | switch (old_val->lattice_val) |
958 | { | |
959 | case VARYING: | |
0bca51f0 | 960 | return SSA_PROP_VARYING; |
6de9cd9a | 961 | |
750628d8 DN |
962 | case CONSTANT: |
963 | new_val = *old_val; | |
964 | break; | |
6de9cd9a | 965 | |
750628d8 | 966 | case UNDEFINED: |
750628d8 | 967 | new_val.lattice_val = UNDEFINED; |
0bca51f0 | 968 | new_val.value = NULL_TREE; |
750628d8 | 969 | break; |
6de9cd9a | 970 | |
750628d8 | 971 | default: |
1e128c5f | 972 | gcc_unreachable (); |
750628d8 | 973 | } |
6de9cd9a | 974 | |
726a989a | 975 | for (i = 0; i < gimple_phi_num_args (phi); i++) |
750628d8 | 976 | { |
0bca51f0 DN |
977 | /* Compute the meet operator over all the PHI arguments flowing |
978 | through executable edges. */ | |
726a989a | 979 | edge e = gimple_phi_arg_edge (phi, i); |
6de9cd9a | 980 | |
750628d8 DN |
981 | if (dump_file && (dump_flags & TDF_DETAILS)) |
982 | { | |
983 | fprintf (dump_file, | |
984 | "\n Argument #%d (%d -> %d %sexecutable)\n", | |
985 | i, e->src->index, e->dest->index, | |
986 | (e->flags & EDGE_EXECUTABLE) ? "" : "not "); | |
987 | } | |
988 | ||
989 | /* If the incoming edge is executable, Compute the meet operator for | |
990 | the existing value of the PHI node and the current PHI argument. */ | |
991 | if (e->flags & EDGE_EXECUTABLE) | |
992 | { | |
726a989a | 993 | tree arg = gimple_phi_arg (phi, i)->def; |
0b4b14ac | 994 | prop_value_t arg_val = get_value_for_expr (arg, false); |
6de9cd9a | 995 | |
0bca51f0 | 996 | ccp_lattice_meet (&new_val, &arg_val); |
6de9cd9a | 997 | |
750628d8 DN |
998 | if (dump_file && (dump_flags & TDF_DETAILS)) |
999 | { | |
1000 | fprintf (dump_file, "\t"); | |
0bca51f0 DN |
1001 | print_generic_expr (dump_file, arg, dump_flags); |
1002 | dump_lattice_value (dump_file, "\tValue: ", arg_val); | |
750628d8 DN |
1003 | fprintf (dump_file, "\n"); |
1004 | } | |
6de9cd9a | 1005 | |
750628d8 DN |
1006 | if (new_val.lattice_val == VARYING) |
1007 | break; | |
1008 | } | |
1009 | } | |
6de9cd9a DN |
1010 | |
1011 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
750628d8 DN |
1012 | { |
1013 | dump_lattice_value (dump_file, "\n PHI node value: ", new_val); | |
1014 | fprintf (dump_file, "\n\n"); | |
1015 | } | |
1016 | ||
106dec71 | 1017 | /* Make the transition to the new value. */ |
726a989a | 1018 | if (set_lattice_value (gimple_phi_result (phi), new_val)) |
750628d8 DN |
1019 | { |
1020 | if (new_val.lattice_val == VARYING) | |
1021 | return SSA_PROP_VARYING; | |
1022 | else | |
1023 | return SSA_PROP_INTERESTING; | |
1024 | } | |
1025 | else | |
1026 | return SSA_PROP_NOT_INTERESTING; | |
6de9cd9a DN |
1027 | } |
1028 | ||
84d77ca6 | 1029 | /* Return the constant value for OP or OP otherwise. */ |
0354c0c7 BS |
1030 | |
1031 | static tree | |
84d77ca6 | 1032 | valueize_op (tree op) |
0354c0c7 | 1033 | { |
0354c0c7 BS |
1034 | if (TREE_CODE (op) == SSA_NAME) |
1035 | { | |
84d77ca6 RG |
1036 | tree tem = get_constant_value (op); |
1037 | if (tem) | |
1038 | return tem; | |
0354c0c7 BS |
1039 | } |
1040 | return op; | |
1041 | } | |
1042 | ||
750628d8 DN |
1043 | /* CCP specific front-end to the non-destructive constant folding |
1044 | routines. | |
6de9cd9a DN |
1045 | |
1046 | Attempt to simplify the RHS of STMT knowing that one or more | |
1047 | operands are constants. | |
1048 | ||
1049 | If simplification is possible, return the simplified RHS, | |
726a989a | 1050 | otherwise return the original RHS or NULL_TREE. */ |
6de9cd9a DN |
1051 | |
1052 | static tree | |
726a989a | 1053 | ccp_fold (gimple stmt) |
6de9cd9a | 1054 | { |
db3927fb | 1055 | location_t loc = gimple_location (stmt); |
726a989a | 1056 | switch (gimple_code (stmt)) |
0bca51f0 | 1057 | { |
726a989a RB |
1058 | case GIMPLE_ASSIGN: |
1059 | { | |
1060 | enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
1061 | ||
1062 | switch (get_gimple_rhs_class (subcode)) | |
1063 | { | |
1064 | case GIMPLE_SINGLE_RHS: | |
1065 | { | |
1066 | tree rhs = gimple_assign_rhs1 (stmt); | |
1067 | enum tree_code_class kind = TREE_CODE_CLASS (subcode); | |
1068 | ||
1069 | if (TREE_CODE (rhs) == SSA_NAME) | |
1070 | { | |
1071 | /* If the RHS is an SSA_NAME, return its known constant value, | |
1072 | if any. */ | |
84d77ca6 | 1073 | return get_constant_value (rhs); |
726a989a RB |
1074 | } |
1075 | /* Handle propagating invariant addresses into address operations. | |
1076 | The folding we do here matches that in tree-ssa-forwprop.c. */ | |
1077 | else if (TREE_CODE (rhs) == ADDR_EXPR) | |
1078 | { | |
1079 | tree *base; | |
1080 | base = &TREE_OPERAND (rhs, 0); | |
1081 | while (handled_component_p (*base)) | |
1082 | base = &TREE_OPERAND (*base, 0); | |
70f34814 | 1083 | if (TREE_CODE (*base) == MEM_REF |
726a989a RB |
1084 | && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME) |
1085 | { | |
84d77ca6 RG |
1086 | tree val = get_constant_value (TREE_OPERAND (*base, 0)); |
1087 | if (val | |
1088 | && TREE_CODE (val) == ADDR_EXPR) | |
726a989a | 1089 | { |
70f34814 RG |
1090 | tree ret, save = *base; |
1091 | tree new_base; | |
1092 | new_base = fold_build2 (MEM_REF, TREE_TYPE (*base), | |
84d77ca6 | 1093 | unshare_expr (val), |
70f34814 | 1094 | TREE_OPERAND (*base, 1)); |
726a989a RB |
1095 | /* We need to return a new tree, not modify the IL |
1096 | or share parts of it. So play some tricks to | |
1097 | avoid manually building it. */ | |
70f34814 | 1098 | *base = new_base; |
726a989a RB |
1099 | ret = unshare_expr (rhs); |
1100 | recompute_tree_invariant_for_addr_expr (ret); | |
1101 | *base = save; | |
1102 | return ret; | |
1103 | } | |
1104 | } | |
1105 | } | |
23977d3c RG |
1106 | else if (TREE_CODE (rhs) == CONSTRUCTOR |
1107 | && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE | |
1108 | && (CONSTRUCTOR_NELTS (rhs) | |
1109 | == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)))) | |
1110 | { | |
1111 | unsigned i; | |
1112 | tree val, list; | |
1113 | ||
1114 | list = NULL_TREE; | |
1115 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) | |
1116 | { | |
84d77ca6 | 1117 | val = valueize_op (val); |
23977d3c RG |
1118 | if (TREE_CODE (val) == INTEGER_CST |
1119 | || TREE_CODE (val) == REAL_CST | |
1120 | || TREE_CODE (val) == FIXED_CST) | |
1121 | list = tree_cons (NULL_TREE, val, list); | |
1122 | else | |
1123 | return NULL_TREE; | |
1124 | } | |
1125 | ||
1126 | return build_vector (TREE_TYPE (rhs), nreverse (list)); | |
1127 | } | |
6de9cd9a | 1128 | |
726a989a | 1129 | if (kind == tcc_reference) |
c76a1f18 | 1130 | { |
4bad83f5 RG |
1131 | if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR |
1132 | || TREE_CODE (rhs) == REALPART_EXPR | |
1133 | || TREE_CODE (rhs) == IMAGPART_EXPR) | |
c76a1f18 RG |
1134 | && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) |
1135 | { | |
84d77ca6 RG |
1136 | tree val = get_constant_value (TREE_OPERAND (rhs, 0)); |
1137 | if (val) | |
db3927fb | 1138 | return fold_unary_loc (EXPR_LOCATION (rhs), |
84d77ca6 RG |
1139 | TREE_CODE (rhs), |
1140 | TREE_TYPE (rhs), val); | |
c76a1f18 | 1141 | } |
70f34814 | 1142 | else if (TREE_CODE (rhs) == MEM_REF |
e8114fba RG |
1143 | && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) |
1144 | { | |
84d77ca6 RG |
1145 | tree val = get_constant_value (TREE_OPERAND (rhs, 0)); |
1146 | if (val | |
1147 | && TREE_CODE (val) == ADDR_EXPR) | |
70f34814 RG |
1148 | { |
1149 | tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs), | |
84d77ca6 | 1150 | unshare_expr (val), |
70f34814 RG |
1151 | TREE_OPERAND (rhs, 1)); |
1152 | if (tem) | |
1153 | rhs = tem; | |
1154 | } | |
e8114fba | 1155 | } |
c76a1f18 RG |
1156 | return fold_const_aggregate_ref (rhs); |
1157 | } | |
726a989a RB |
1158 | else if (kind == tcc_declaration) |
1159 | return get_symbol_constant_value (rhs); | |
1160 | return rhs; | |
1161 | } | |
b8698a0f | 1162 | |
726a989a RB |
1163 | case GIMPLE_UNARY_RHS: |
1164 | { | |
1165 | /* Handle unary operators that can appear in GIMPLE form. | |
1166 | Note that we know the single operand must be a constant, | |
1167 | so this should almost always return a simplified RHS. */ | |
1168 | tree lhs = gimple_assign_lhs (stmt); | |
84d77ca6 | 1169 | tree op0 = valueize_op (gimple_assign_rhs1 (stmt)); |
726a989a RB |
1170 | |
1171 | /* Conversions are useless for CCP purposes if they are | |
1172 | value-preserving. Thus the restrictions that | |
1173 | useless_type_conversion_p places for pointer type conversions | |
1174 | do not apply here. Substitution later will only substitute to | |
1175 | allowed places. */ | |
1a87cf0c | 1176 | if (CONVERT_EXPR_CODE_P (subcode) |
99f536cc | 1177 | && POINTER_TYPE_P (TREE_TYPE (lhs)) |
70f34814 | 1178 | && POINTER_TYPE_P (TREE_TYPE (op0))) |
99f536cc RG |
1179 | { |
1180 | tree tem; | |
70f34814 | 1181 | /* Try to re-construct array references on-the-fly. */ |
99f536cc RG |
1182 | if (!useless_type_conversion_p (TREE_TYPE (lhs), |
1183 | TREE_TYPE (op0)) | |
1184 | && ((tem = maybe_fold_offset_to_address | |
db3927fb | 1185 | (loc, |
c2255bc4 | 1186 | op0, integer_zero_node, TREE_TYPE (lhs))) |
99f536cc RG |
1187 | != NULL_TREE)) |
1188 | return tem; | |
1189 | return op0; | |
1190 | } | |
726a989a | 1191 | |
b8698a0f | 1192 | return |
db3927fb AH |
1193 | fold_unary_ignore_overflow_loc (loc, subcode, |
1194 | gimple_expr_type (stmt), op0); | |
ff8b183b | 1195 | } |
726a989a RB |
1196 | |
1197 | case GIMPLE_BINARY_RHS: | |
1198 | { | |
1199 | /* Handle binary operators that can appear in GIMPLE form. */ | |
84d77ca6 RG |
1200 | tree op0 = valueize_op (gimple_assign_rhs1 (stmt)); |
1201 | tree op1 = valueize_op (gimple_assign_rhs2 (stmt)); | |
726a989a | 1202 | |
70f34814 RG |
1203 | /* Translate &x + CST into an invariant form suitable for |
1204 | further propagation. */ | |
99f536cc RG |
1205 | if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR |
1206 | && TREE_CODE (op0) == ADDR_EXPR | |
1207 | && TREE_CODE (op1) == INTEGER_CST) | |
1208 | { | |
70f34814 RG |
1209 | tree off = fold_convert (ptr_type_node, op1); |
1210 | return build_fold_addr_expr | |
1211 | (fold_build2 (MEM_REF, | |
1212 | TREE_TYPE (TREE_TYPE (op0)), | |
1213 | unshare_expr (op0), off)); | |
99f536cc RG |
1214 | } |
1215 | ||
db3927fb | 1216 | return fold_binary_loc (loc, subcode, |
70f34814 | 1217 | gimple_expr_type (stmt), op0, op1); |
726a989a RB |
1218 | } |
1219 | ||
0354c0c7 BS |
1220 | case GIMPLE_TERNARY_RHS: |
1221 | { | |
84d77ca6 RG |
1222 | /* Handle ternary operators that can appear in GIMPLE form. */ |
1223 | tree op0 = valueize_op (gimple_assign_rhs1 (stmt)); | |
1224 | tree op1 = valueize_op (gimple_assign_rhs2 (stmt)); | |
1225 | tree op2 = valueize_op (gimple_assign_rhs3 (stmt)); | |
0354c0c7 BS |
1226 | |
1227 | return fold_ternary_loc (loc, subcode, | |
1228 | gimple_expr_type (stmt), op0, op1, op2); | |
1229 | } | |
1230 | ||
726a989a RB |
1231 | default: |
1232 | gcc_unreachable (); | |
1233 | } | |
1234 | } | |
1235 | break; | |
6de9cd9a | 1236 | |
726a989a | 1237 | case GIMPLE_CALL: |
174ef36d | 1238 | { |
84d77ca6 | 1239 | tree fn = valueize_op (gimple_call_fn (stmt)); |
174ef36d | 1240 | if (TREE_CODE (fn) == ADDR_EXPR |
a135b1c4 | 1241 | && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL |
174ef36d RG |
1242 | && DECL_BUILT_IN (TREE_OPERAND (fn, 0))) |
1243 | { | |
1244 | tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt)); | |
1245 | tree call, retval; | |
1246 | unsigned i; | |
1247 | for (i = 0; i < gimple_call_num_args (stmt); ++i) | |
84d77ca6 | 1248 | args[i] = valueize_op (gimple_call_arg (stmt, i)); |
db3927fb AH |
1249 | call = build_call_array_loc (loc, |
1250 | gimple_call_return_type (stmt), | |
1251 | fn, gimple_call_num_args (stmt), args); | |
1252 | retval = fold_call_expr (EXPR_LOCATION (call), call, false); | |
174ef36d RG |
1253 | if (retval) |
1254 | /* fold_call_expr wraps the result inside a NOP_EXPR. */ | |
1255 | STRIP_NOPS (retval); | |
1256 | return retval; | |
1257 | } | |
1258 | return NULL_TREE; | |
1259 | } | |
6de9cd9a | 1260 | |
726a989a RB |
1261 | case GIMPLE_COND: |
1262 | { | |
1263 | /* Handle comparison operators that can appear in GIMPLE form. */ | |
84d77ca6 RG |
1264 | tree op0 = valueize_op (gimple_cond_lhs (stmt)); |
1265 | tree op1 = valueize_op (gimple_cond_rhs (stmt)); | |
726a989a | 1266 | enum tree_code code = gimple_cond_code (stmt); |
db3927fb | 1267 | return fold_binary_loc (loc, code, boolean_type_node, op0, op1); |
726a989a | 1268 | } |
6de9cd9a | 1269 | |
726a989a RB |
1270 | case GIMPLE_SWITCH: |
1271 | { | |
84d77ca6 RG |
1272 | /* Return the constant switch index. */ |
1273 | return valueize_op (gimple_switch_index (stmt)); | |
726a989a | 1274 | } |
00d382a8 | 1275 | |
726a989a RB |
1276 | default: |
1277 | gcc_unreachable (); | |
6de9cd9a | 1278 | } |
6de9cd9a DN |
1279 | } |
1280 | ||
ae3df618 SB |
1281 | /* Return the tree representing the element referenced by T if T is an |
1282 | ARRAY_REF or COMPONENT_REF into constant aggregates. Return | |
1283 | NULL_TREE otherwise. */ | |
1284 | ||
ed97ddc6 | 1285 | tree |
ae3df618 SB |
1286 | fold_const_aggregate_ref (tree t) |
1287 | { | |
4038c495 GB |
1288 | tree base, ctor, idx, field; |
1289 | unsigned HOST_WIDE_INT cnt; | |
1290 | tree cfield, cval; | |
84d77ca6 | 1291 | tree tem; |
ae3df618 | 1292 | |
e8114fba RG |
1293 | if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration) |
1294 | return get_symbol_constant_value (t); | |
1295 | ||
ae3df618 SB |
1296 | switch (TREE_CODE (t)) |
1297 | { | |
1298 | case ARRAY_REF: | |
1299 | /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its | |
1300 | DECL_INITIAL. If BASE is a nested reference into another | |
1301 | ARRAY_REF or COMPONENT_REF, make a recursive call to resolve | |
1302 | the inner reference. */ | |
1303 | base = TREE_OPERAND (t, 0); | |
1304 | switch (TREE_CODE (base)) | |
1305 | { | |
2bd6311d RG |
1306 | case MEM_REF: |
1307 | /* ??? We could handle this case. */ | |
1308 | if (!integer_zerop (TREE_OPERAND (base, 1))) | |
1309 | return NULL_TREE; | |
1310 | base = get_base_address (base); | |
1311 | if (!base | |
1312 | || TREE_CODE (base) != VAR_DECL) | |
1313 | return NULL_TREE; | |
1314 | ||
1315 | /* Fallthru. */ | |
ae3df618 SB |
1316 | case VAR_DECL: |
1317 | if (!TREE_READONLY (base) | |
1318 | || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE | |
1319 | || !targetm.binds_local_p (base)) | |
1320 | return NULL_TREE; | |
1321 | ||
1322 | ctor = DECL_INITIAL (base); | |
1323 | break; | |
1324 | ||
1325 | case ARRAY_REF: | |
1326 | case COMPONENT_REF: | |
1327 | ctor = fold_const_aggregate_ref (base); | |
1328 | break; | |
1329 | ||
87e1e42b RG |
1330 | case STRING_CST: |
1331 | case CONSTRUCTOR: | |
1332 | ctor = base; | |
1333 | break; | |
1334 | ||
ae3df618 SB |
1335 | default: |
1336 | return NULL_TREE; | |
1337 | } | |
1338 | ||
1339 | if (ctor == NULL_TREE | |
faaf1436 RG |
1340 | || (TREE_CODE (ctor) != CONSTRUCTOR |
1341 | && TREE_CODE (ctor) != STRING_CST) | |
ae3df618 SB |
1342 | || !TREE_STATIC (ctor)) |
1343 | return NULL_TREE; | |
1344 | ||
1345 | /* Get the index. If we have an SSA_NAME, try to resolve it | |
1346 | with the current lattice value for the SSA_NAME. */ | |
1347 | idx = TREE_OPERAND (t, 1); | |
1348 | switch (TREE_CODE (idx)) | |
1349 | { | |
1350 | case SSA_NAME: | |
84d77ca6 RG |
1351 | if ((tem = get_constant_value (idx)) |
1352 | && TREE_CODE (tem) == INTEGER_CST) | |
1353 | idx = tem; | |
ae3df618 SB |
1354 | else |
1355 | return NULL_TREE; | |
1356 | break; | |
1357 | ||
1358 | case INTEGER_CST: | |
1359 | break; | |
1360 | ||
1361 | default: | |
1362 | return NULL_TREE; | |
1363 | } | |
1364 | ||
faaf1436 RG |
1365 | /* Fold read from constant string. */ |
1366 | if (TREE_CODE (ctor) == STRING_CST) | |
1367 | { | |
1368 | if ((TYPE_MODE (TREE_TYPE (t)) | |
1369 | == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1370 | && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1371 | == MODE_INT) | |
1372 | && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1 | |
1373 | && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0) | |
0c4d4efb DJ |
1374 | return build_int_cst_type (TREE_TYPE (t), |
1375 | (TREE_STRING_POINTER (ctor) | |
1376 | [TREE_INT_CST_LOW (idx)])); | |
faaf1436 RG |
1377 | return NULL_TREE; |
1378 | } | |
1379 | ||
ae3df618 | 1380 | /* Whoo-hoo! I'll fold ya baby. Yeah! */ |
4038c495 GB |
1381 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) |
1382 | if (tree_int_cst_equal (cfield, idx)) | |
62bc00e2 | 1383 | { |
5c95f07b | 1384 | STRIP_NOPS (cval); |
75ccc1e7 RG |
1385 | if (TREE_CODE (cval) == ADDR_EXPR) |
1386 | { | |
1387 | tree base = get_base_address (TREE_OPERAND (cval, 0)); | |
1388 | if (base && TREE_CODE (base) == VAR_DECL) | |
1389 | add_referenced_var (base); | |
1390 | } | |
62bc00e2 RG |
1391 | return cval; |
1392 | } | |
ae3df618 SB |
1393 | break; |
1394 | ||
1395 | case COMPONENT_REF: | |
1396 | /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its | |
1397 | DECL_INITIAL. If BASE is a nested reference into another | |
1398 | ARRAY_REF or COMPONENT_REF, make a recursive call to resolve | |
1399 | the inner reference. */ | |
1400 | base = TREE_OPERAND (t, 0); | |
1401 | switch (TREE_CODE (base)) | |
1402 | { | |
1403 | case VAR_DECL: | |
1404 | if (!TREE_READONLY (base) | |
1405 | || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE | |
1406 | || !targetm.binds_local_p (base)) | |
1407 | return NULL_TREE; | |
1408 | ||
1409 | ctor = DECL_INITIAL (base); | |
1410 | break; | |
1411 | ||
1412 | case ARRAY_REF: | |
1413 | case COMPONENT_REF: | |
1414 | ctor = fold_const_aggregate_ref (base); | |
1415 | break; | |
1416 | ||
1417 | default: | |
1418 | return NULL_TREE; | |
1419 | } | |
1420 | ||
1421 | if (ctor == NULL_TREE | |
1422 | || TREE_CODE (ctor) != CONSTRUCTOR | |
1423 | || !TREE_STATIC (ctor)) | |
1424 | return NULL_TREE; | |
1425 | ||
1426 | field = TREE_OPERAND (t, 1); | |
1427 | ||
4038c495 GB |
1428 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) |
1429 | if (cfield == field | |
ae3df618 | 1430 | /* FIXME: Handle bit-fields. */ |
4038c495 | 1431 | && ! DECL_BIT_FIELD (cfield)) |
62bc00e2 | 1432 | { |
5c95f07b | 1433 | STRIP_NOPS (cval); |
75ccc1e7 RG |
1434 | if (TREE_CODE (cval) == ADDR_EXPR) |
1435 | { | |
1436 | tree base = get_base_address (TREE_OPERAND (cval, 0)); | |
1437 | if (base && TREE_CODE (base) == VAR_DECL) | |
1438 | add_referenced_var (base); | |
1439 | } | |
62bc00e2 RG |
1440 | return cval; |
1441 | } | |
ae3df618 SB |
1442 | break; |
1443 | ||
1ebd8d9a SB |
1444 | case REALPART_EXPR: |
1445 | case IMAGPART_EXPR: | |
1446 | { | |
1447 | tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0)); | |
1448 | if (c && TREE_CODE (c) == COMPLEX_CST) | |
db3927fb AH |
1449 | return fold_build1_loc (EXPR_LOCATION (t), |
1450 | TREE_CODE (t), TREE_TYPE (t), c); | |
1ebd8d9a SB |
1451 | break; |
1452 | } | |
87e1e42b | 1453 | |
70f34814 RG |
1454 | case MEM_REF: |
1455 | /* Get the base object we are accessing. */ | |
1456 | base = TREE_OPERAND (t, 0); | |
1457 | if (TREE_CODE (base) == SSA_NAME | |
84d77ca6 RG |
1458 | && (tem = get_constant_value (base))) |
1459 | base = tem; | |
70f34814 RG |
1460 | if (TREE_CODE (base) != ADDR_EXPR) |
1461 | return NULL_TREE; | |
1462 | base = TREE_OPERAND (base, 0); | |
1463 | switch (TREE_CODE (base)) | |
1464 | { | |
1465 | case VAR_DECL: | |
1466 | if (DECL_P (base) | |
1467 | && !AGGREGATE_TYPE_P (TREE_TYPE (base)) | |
1468 | && integer_zerop (TREE_OPERAND (t, 1))) | |
3fa39831 RG |
1469 | { |
1470 | tree res = get_symbol_constant_value (base); | |
1471 | if (res | |
1472 | && !useless_type_conversion_p | |
1473 | (TREE_TYPE (t), TREE_TYPE (res))) | |
1474 | res = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), res); | |
1475 | return res; | |
1476 | } | |
70f34814 RG |
1477 | |
1478 | if (!TREE_READONLY (base) | |
1479 | || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE | |
1480 | || !targetm.binds_local_p (base)) | |
1481 | return NULL_TREE; | |
1482 | ||
1483 | ctor = DECL_INITIAL (base); | |
1484 | break; | |
1485 | ||
1486 | case STRING_CST: | |
1487 | case CONSTRUCTOR: | |
1488 | ctor = base; | |
1489 | break; | |
1490 | ||
1491 | default: | |
1492 | return NULL_TREE; | |
1493 | } | |
1494 | ||
1495 | if (ctor == NULL_TREE | |
1496 | || (TREE_CODE (ctor) != CONSTRUCTOR | |
1497 | && TREE_CODE (ctor) != STRING_CST) | |
1498 | || !TREE_STATIC (ctor)) | |
1499 | return NULL_TREE; | |
1500 | ||
1501 | /* Get the byte offset. */ | |
1502 | idx = TREE_OPERAND (t, 1); | |
1503 | ||
1504 | /* Fold read from constant string. */ | |
1505 | if (TREE_CODE (ctor) == STRING_CST) | |
1506 | { | |
1507 | if ((TYPE_MODE (TREE_TYPE (t)) | |
1508 | == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1509 | && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1510 | == MODE_INT) | |
1511 | && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1 | |
1512 | && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0) | |
1513 | return build_int_cst_type (TREE_TYPE (t), | |
1514 | (TREE_STRING_POINTER (ctor) | |
1515 | [TREE_INT_CST_LOW (idx)])); | |
1516 | return NULL_TREE; | |
1517 | } | |
1518 | ||
1519 | /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */ | |
1520 | if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE | |
1521 | && (TYPE_MODE (TREE_TYPE (t)) | |
1522 | == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1523 | && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0 | |
1524 | && integer_zerop | |
1525 | (int_const_binop | |
1526 | (TRUNC_MOD_EXPR, idx, | |
1527 | size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0))) | |
1528 | { | |
1529 | idx = int_const_binop (TRUNC_DIV_EXPR, idx, | |
1530 | size_int (GET_MODE_SIZE | |
1531 | (TYPE_MODE (TREE_TYPE (t)))), 0); | |
1532 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) | |
1533 | if (tree_int_cst_equal (cfield, idx)) | |
1534 | { | |
1535 | STRIP_NOPS (cval); | |
1536 | if (TREE_CODE (cval) == ADDR_EXPR) | |
1537 | { | |
1538 | tree base = get_base_address (TREE_OPERAND (cval, 0)); | |
1539 | if (base && TREE_CODE (base) == VAR_DECL) | |
1540 | add_referenced_var (base); | |
1541 | } | |
1542 | if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval))) | |
1543 | return cval; | |
1544 | else if (CONSTANT_CLASS_P (cval)) | |
1545 | return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval); | |
1546 | else | |
1547 | return NULL_TREE; | |
1548 | } | |
1549 | } | |
1550 | break; | |
87e1e42b | 1551 | |
ae3df618 SB |
1552 | default: |
1553 | break; | |
1554 | } | |
1555 | ||
1556 | return NULL_TREE; | |
1557 | } | |
726a989a | 1558 | |
0b4b14ac RG |
1559 | /* Apply the operation CODE in type TYPE to the value, mask pair |
1560 | RVAL and RMASK representing a value of type RTYPE and set | |
1561 | the value, mask pair *VAL and *MASK to the result. */ | |
1562 | ||
1563 | static void | |
1564 | bit_value_unop_1 (enum tree_code code, tree type, | |
1565 | double_int *val, double_int *mask, | |
1566 | tree rtype, double_int rval, double_int rmask) | |
1567 | { | |
1568 | switch (code) | |
1569 | { | |
1570 | case BIT_NOT_EXPR: | |
1571 | *mask = rmask; | |
1572 | *val = double_int_not (rval); | |
1573 | break; | |
1574 | ||
1575 | case NEGATE_EXPR: | |
1576 | { | |
1577 | double_int temv, temm; | |
1578 | /* Return ~rval + 1. */ | |
1579 | bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask); | |
1580 | bit_value_binop_1 (PLUS_EXPR, type, val, mask, | |
1581 | type, temv, temm, | |
1582 | type, double_int_one, double_int_zero); | |
1583 | break; | |
1584 | } | |
1585 | ||
1586 | CASE_CONVERT: | |
1587 | { | |
1588 | bool uns; | |
1589 | ||
1590 | /* First extend mask and value according to the original type. */ | |
1591 | uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype) | |
1592 | ? 0 : TYPE_UNSIGNED (rtype)); | |
1593 | *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns); | |
1594 | *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns); | |
1595 | ||
1596 | /* Then extend mask and value according to the target type. */ | |
1597 | uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type) | |
1598 | ? 0 : TYPE_UNSIGNED (type)); | |
1599 | *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns); | |
1600 | *val = double_int_ext (*val, TYPE_PRECISION (type), uns); | |
1601 | break; | |
1602 | } | |
1603 | ||
1604 | default: | |
1605 | *mask = double_int_minus_one; | |
1606 | break; | |
1607 | } | |
1608 | } | |
1609 | ||
1610 | /* Apply the operation CODE in type TYPE to the value, mask pairs | |
1611 | R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE | |
1612 | and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */ | |
1613 | ||
1614 | static void | |
1615 | bit_value_binop_1 (enum tree_code code, tree type, | |
1616 | double_int *val, double_int *mask, | |
1617 | tree r1type, double_int r1val, double_int r1mask, | |
1618 | tree r2type, double_int r2val, double_int r2mask) | |
1619 | { | |
1620 | bool uns = (TREE_CODE (type) == INTEGER_TYPE | |
1621 | && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type)); | |
1622 | /* Assume we'll get a constant result. Use an initial varying value, | |
1623 | we fall back to varying in the end if necessary. */ | |
1624 | *mask = double_int_minus_one; | |
1625 | switch (code) | |
1626 | { | |
1627 | case BIT_AND_EXPR: | |
1628 | /* The mask is constant where there is a known not | |
1629 | set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */ | |
1630 | *mask = double_int_and (double_int_ior (r1mask, r2mask), | |
1631 | double_int_and (double_int_ior (r1val, r1mask), | |
1632 | double_int_ior (r2val, r2mask))); | |
1633 | *val = double_int_and (r1val, r2val); | |
1634 | break; | |
1635 | ||
1636 | case BIT_IOR_EXPR: | |
1637 | /* The mask is constant where there is a known | |
1638 | set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */ | |
1639 | *mask = double_int_and_not | |
1640 | (double_int_ior (r1mask, r2mask), | |
1641 | double_int_ior (double_int_and_not (r1val, r1mask), | |
1642 | double_int_and_not (r2val, r2mask))); | |
1643 | *val = double_int_ior (r1val, r2val); | |
1644 | break; | |
1645 | ||
1646 | case BIT_XOR_EXPR: | |
1647 | /* m1 | m2 */ | |
1648 | *mask = double_int_ior (r1mask, r2mask); | |
1649 | *val = double_int_xor (r1val, r2val); | |
1650 | break; | |
1651 | ||
1652 | case LROTATE_EXPR: | |
1653 | case RROTATE_EXPR: | |
1654 | if (double_int_zero_p (r2mask)) | |
1655 | { | |
1656 | HOST_WIDE_INT shift = r2val.low; | |
1657 | if (code == RROTATE_EXPR) | |
1658 | shift = -shift; | |
1659 | *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type)); | |
1660 | *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type)); | |
1661 | } | |
1662 | break; | |
1663 | ||
1664 | case LSHIFT_EXPR: | |
1665 | case RSHIFT_EXPR: | |
1666 | /* ??? We can handle partially known shift counts if we know | |
1667 | its sign. That way we can tell that (x << (y | 8)) & 255 | |
1668 | is zero. */ | |
1669 | if (double_int_zero_p (r2mask)) | |
1670 | { | |
1671 | HOST_WIDE_INT shift = r2val.low; | |
1672 | if (code == RSHIFT_EXPR) | |
1673 | shift = -shift; | |
1674 | /* We need to know if we are doing a left or a right shift | |
1675 | to properly shift in zeros for left shift and unsigned | |
1676 | right shifts and the sign bit for signed right shifts. | |
1677 | For signed right shifts we shift in varying in case | |
1678 | the sign bit was varying. */ | |
1679 | if (shift > 0) | |
1680 | { | |
1681 | *mask = double_int_lshift (r1mask, shift, | |
1682 | TYPE_PRECISION (type), false); | |
1683 | *val = double_int_lshift (r1val, shift, | |
1684 | TYPE_PRECISION (type), false); | |
1685 | } | |
1686 | else if (shift < 0) | |
1687 | { | |
1688 | shift = -shift; | |
1689 | *mask = double_int_rshift (r1mask, shift, | |
1690 | TYPE_PRECISION (type), !uns); | |
1691 | *val = double_int_rshift (r1val, shift, | |
1692 | TYPE_PRECISION (type), !uns); | |
1693 | } | |
1694 | else | |
1695 | { | |
1696 | *mask = r1mask; | |
1697 | *val = r1val; | |
1698 | } | |
1699 | } | |
1700 | break; | |
1701 | ||
1702 | case PLUS_EXPR: | |
1703 | case POINTER_PLUS_EXPR: | |
1704 | { | |
1705 | double_int lo, hi; | |
1706 | /* Do the addition with unknown bits set to zero, to give carry-ins of | |
1707 | zero wherever possible. */ | |
1708 | lo = double_int_add (double_int_and_not (r1val, r1mask), | |
1709 | double_int_and_not (r2val, r2mask)); | |
1710 | lo = double_int_ext (lo, TYPE_PRECISION (type), uns); | |
1711 | /* Do the addition with unknown bits set to one, to give carry-ins of | |
1712 | one wherever possible. */ | |
1713 | hi = double_int_add (double_int_ior (r1val, r1mask), | |
1714 | double_int_ior (r2val, r2mask)); | |
1715 | hi = double_int_ext (hi, TYPE_PRECISION (type), uns); | |
1716 | /* Each bit in the result is known if (a) the corresponding bits in | |
1717 | both inputs are known, and (b) the carry-in to that bit position | |
1718 | is known. We can check condition (b) by seeing if we got the same | |
1719 | result with minimised carries as with maximised carries. */ | |
1720 | *mask = double_int_ior (double_int_ior (r1mask, r2mask), | |
1721 | double_int_xor (lo, hi)); | |
1722 | *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns); | |
1723 | /* It shouldn't matter whether we choose lo or hi here. */ | |
1724 | *val = lo; | |
1725 | break; | |
1726 | } | |
1727 | ||
1728 | case MINUS_EXPR: | |
1729 | { | |
1730 | double_int temv, temm; | |
1731 | bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm, | |
1732 | r2type, r2val, r2mask); | |
1733 | bit_value_binop_1 (PLUS_EXPR, type, val, mask, | |
1734 | r1type, r1val, r1mask, | |
1735 | r2type, temv, temm); | |
1736 | break; | |
1737 | } | |
1738 | ||
1739 | case MULT_EXPR: | |
1740 | { | |
1741 | /* Just track trailing zeros in both operands and transfer | |
1742 | them to the other. */ | |
1743 | int r1tz = double_int_ctz (double_int_ior (r1val, r1mask)); | |
1744 | int r2tz = double_int_ctz (double_int_ior (r2val, r2mask)); | |
1745 | if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT) | |
1746 | { | |
1747 | *mask = double_int_zero; | |
1748 | *val = double_int_zero; | |
1749 | } | |
1750 | else if (r1tz + r2tz > 0) | |
1751 | { | |
1752 | *mask = double_int_not (double_int_mask (r1tz + r2tz)); | |
1753 | *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns); | |
1754 | *val = double_int_zero; | |
1755 | } | |
1756 | break; | |
1757 | } | |
1758 | ||
1759 | case EQ_EXPR: | |
1760 | case NE_EXPR: | |
1761 | { | |
1762 | double_int m = double_int_ior (r1mask, r2mask); | |
1763 | if (!double_int_equal_p (double_int_and_not (r1val, m), | |
1764 | double_int_and_not (r2val, m))) | |
1765 | { | |
1766 | *mask = double_int_zero; | |
1767 | *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one); | |
1768 | } | |
1769 | else | |
1770 | { | |
1771 | /* We know the result of a comparison is always one or zero. */ | |
1772 | *mask = double_int_one; | |
1773 | *val = double_int_zero; | |
1774 | } | |
1775 | break; | |
1776 | } | |
1777 | ||
1778 | case GE_EXPR: | |
1779 | case GT_EXPR: | |
1780 | { | |
1781 | double_int tem = r1val; | |
1782 | r1val = r2val; | |
1783 | r2val = tem; | |
1784 | tem = r1mask; | |
1785 | r1mask = r2mask; | |
1786 | r2mask = tem; | |
1787 | code = swap_tree_comparison (code); | |
1788 | } | |
1789 | /* Fallthru. */ | |
1790 | case LT_EXPR: | |
1791 | case LE_EXPR: | |
1792 | { | |
1793 | int minmax, maxmin; | |
1794 | /* If the most significant bits are not known we know nothing. */ | |
1795 | if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask)) | |
1796 | break; | |
1797 | ||
1798 | /* If we know the most significant bits we know the values | |
1799 | value ranges by means of treating varying bits as zero | |
1800 | or one. Do a cross comparison of the max/min pairs. */ | |
1801 | maxmin = double_int_cmp (double_int_ior (r1val, r1mask), | |
1802 | double_int_and_not (r2val, r2mask), uns); | |
1803 | minmax = double_int_cmp (double_int_and_not (r1val, r1mask), | |
1804 | double_int_ior (r2val, r2mask), uns); | |
1805 | if (maxmin < 0) /* r1 is less than r2. */ | |
1806 | { | |
1807 | *mask = double_int_zero; | |
1808 | *val = double_int_one; | |
1809 | } | |
1810 | else if (minmax > 0) /* r1 is not less or equal to r2. */ | |
1811 | { | |
1812 | *mask = double_int_zero; | |
1813 | *val = double_int_zero; | |
1814 | } | |
1815 | else if (maxmin == minmax) /* r1 and r2 are equal. */ | |
1816 | { | |
1817 | /* This probably should never happen as we'd have | |
1818 | folded the thing during fully constant value folding. */ | |
1819 | *mask = double_int_zero; | |
1820 | *val = (code == LE_EXPR ? double_int_one : double_int_zero); | |
1821 | } | |
1822 | else | |
1823 | { | |
1824 | /* We know the result of a comparison is always one or zero. */ | |
1825 | *mask = double_int_one; | |
1826 | *val = double_int_zero; | |
1827 | } | |
1828 | break; | |
1829 | } | |
1830 | ||
1831 | default:; | |
1832 | } | |
1833 | } | |
1834 | ||
1835 | /* Return the propagation value when applying the operation CODE to | |
1836 | the value RHS yielding type TYPE. */ | |
1837 | ||
1838 | static prop_value_t | |
1839 | bit_value_unop (enum tree_code code, tree type, tree rhs) | |
1840 | { | |
1841 | prop_value_t rval = get_value_for_expr (rhs, true); | |
1842 | double_int value, mask; | |
1843 | prop_value_t val; | |
1844 | gcc_assert ((rval.lattice_val == CONSTANT | |
1845 | && TREE_CODE (rval.value) == INTEGER_CST) | |
1846 | || double_int_minus_one_p (rval.mask)); | |
1847 | bit_value_unop_1 (code, type, &value, &mask, | |
1848 | TREE_TYPE (rhs), value_to_double_int (rval), rval.mask); | |
1849 | if (!double_int_minus_one_p (mask)) | |
1850 | { | |
1851 | val.lattice_val = CONSTANT; | |
1852 | val.mask = mask; | |
1853 | /* ??? Delay building trees here. */ | |
1854 | val.value = double_int_to_tree (type, value); | |
1855 | } | |
1856 | else | |
1857 | { | |
1858 | val.lattice_val = VARYING; | |
1859 | val.value = NULL_TREE; | |
1860 | val.mask = double_int_minus_one; | |
1861 | } | |
1862 | return val; | |
1863 | } | |
1864 | ||
1865 | /* Return the propagation value when applying the operation CODE to | |
1866 | the values RHS1 and RHS2 yielding type TYPE. */ | |
1867 | ||
1868 | static prop_value_t | |
1869 | bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2) | |
1870 | { | |
1871 | prop_value_t r1val = get_value_for_expr (rhs1, true); | |
1872 | prop_value_t r2val = get_value_for_expr (rhs2, true); | |
1873 | double_int value, mask; | |
1874 | prop_value_t val; | |
1875 | gcc_assert ((r1val.lattice_val == CONSTANT | |
1876 | && TREE_CODE (r1val.value) == INTEGER_CST) | |
1877 | || double_int_minus_one_p (r1val.mask)); | |
1878 | gcc_assert ((r2val.lattice_val == CONSTANT | |
1879 | && TREE_CODE (r2val.value) == INTEGER_CST) | |
1880 | || double_int_minus_one_p (r2val.mask)); | |
1881 | bit_value_binop_1 (code, type, &value, &mask, | |
1882 | TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask, | |
1883 | TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask); | |
1884 | if (!double_int_minus_one_p (mask)) | |
1885 | { | |
1886 | val.lattice_val = CONSTANT; | |
1887 | val.mask = mask; | |
1888 | /* ??? Delay building trees here. */ | |
1889 | val.value = double_int_to_tree (type, value); | |
1890 | } | |
1891 | else | |
1892 | { | |
1893 | val.lattice_val = VARYING; | |
1894 | val.value = NULL_TREE; | |
1895 | val.mask = double_int_minus_one; | |
1896 | } | |
1897 | return val; | |
1898 | } | |
1899 | ||
726a989a RB |
1900 | /* Evaluate statement STMT. |
1901 | Valid only for assignments, calls, conditionals, and switches. */ | |
6de9cd9a | 1902 | |
0bca51f0 | 1903 | static prop_value_t |
726a989a | 1904 | evaluate_stmt (gimple stmt) |
6de9cd9a | 1905 | { |
0bca51f0 | 1906 | prop_value_t val; |
faaf1436 | 1907 | tree simplified = NULL_TREE; |
0bca51f0 | 1908 | ccp_lattice_t likelyvalue = likely_value (stmt); |
0b4b14ac | 1909 | bool is_constant = false; |
0bca51f0 | 1910 | |
0b4b14ac RG |
1911 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1912 | { | |
1913 | fprintf (dump_file, "which is likely "); | |
1914 | switch (likelyvalue) | |
1915 | { | |
1916 | case CONSTANT: | |
1917 | fprintf (dump_file, "CONSTANT"); | |
1918 | break; | |
1919 | case UNDEFINED: | |
1920 | fprintf (dump_file, "UNDEFINED"); | |
1921 | break; | |
1922 | case VARYING: | |
1923 | fprintf (dump_file, "VARYING"); | |
1924 | break; | |
1925 | default:; | |
1926 | } | |
1927 | fprintf (dump_file, "\n"); | |
1928 | } | |
6ac01510 | 1929 | |
6de9cd9a DN |
1930 | /* If the statement is likely to have a CONSTANT result, then try |
1931 | to fold the statement to determine the constant value. */ | |
726a989a RB |
1932 | /* FIXME. This is the only place that we call ccp_fold. |
1933 | Since likely_value never returns CONSTANT for calls, we will | |
1934 | not attempt to fold them, including builtins that may profit. */ | |
6de9cd9a | 1935 | if (likelyvalue == CONSTANT) |
0b4b14ac RG |
1936 | { |
1937 | fold_defer_overflow_warnings (); | |
1938 | simplified = ccp_fold (stmt); | |
1939 | is_constant = simplified && is_gimple_min_invariant (simplified); | |
1940 | fold_undefer_overflow_warnings (is_constant, stmt, 0); | |
1941 | if (is_constant) | |
1942 | { | |
1943 | /* The statement produced a constant value. */ | |
1944 | val.lattice_val = CONSTANT; | |
1945 | val.value = simplified; | |
1946 | val.mask = double_int_zero; | |
1947 | } | |
1948 | } | |
6de9cd9a DN |
1949 | /* If the statement is likely to have a VARYING result, then do not |
1950 | bother folding the statement. */ | |
87e1e42b | 1951 | else if (likelyvalue == VARYING) |
726a989a | 1952 | { |
e0c68ce9 | 1953 | enum gimple_code code = gimple_code (stmt); |
726a989a RB |
1954 | if (code == GIMPLE_ASSIGN) |
1955 | { | |
1956 | enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
b8698a0f | 1957 | |
726a989a RB |
1958 | /* Other cases cannot satisfy is_gimple_min_invariant |
1959 | without folding. */ | |
1960 | if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS) | |
1961 | simplified = gimple_assign_rhs1 (stmt); | |
1962 | } | |
1963 | else if (code == GIMPLE_SWITCH) | |
1964 | simplified = gimple_switch_index (stmt); | |
1965 | else | |
44e10129 MM |
1966 | /* These cannot satisfy is_gimple_min_invariant without folding. */ |
1967 | gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND); | |
0b4b14ac RG |
1968 | is_constant = simplified && is_gimple_min_invariant (simplified); |
1969 | if (is_constant) | |
1970 | { | |
1971 | /* The statement produced a constant value. */ | |
1972 | val.lattice_val = CONSTANT; | |
1973 | val.value = simplified; | |
1974 | val.mask = double_int_zero; | |
1975 | } | |
726a989a | 1976 | } |
6de9cd9a | 1977 | |
0b4b14ac RG |
1978 | /* Resort to simplification for bitwise tracking. */ |
1979 | if (flag_tree_bit_ccp | |
1980 | && likelyvalue == CONSTANT | |
1981 | && !is_constant) | |
00d382a8 | 1982 | { |
0b4b14ac RG |
1983 | enum gimple_code code = gimple_code (stmt); |
1984 | val.lattice_val = VARYING; | |
1985 | val.value = NULL_TREE; | |
1986 | val.mask = double_int_minus_one; | |
1987 | if (code == GIMPLE_ASSIGN) | |
00d382a8 | 1988 | { |
0b4b14ac RG |
1989 | enum tree_code subcode = gimple_assign_rhs_code (stmt); |
1990 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
1991 | switch (get_gimple_rhs_class (subcode)) | |
1992 | { | |
1993 | case GIMPLE_SINGLE_RHS: | |
1994 | if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
1995 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
1996 | val = get_value_for_expr (rhs1, true); | |
1997 | break; | |
1998 | ||
1999 | case GIMPLE_UNARY_RHS: | |
2000 | if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
2001 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
2002 | && (INTEGRAL_TYPE_P (gimple_expr_type (stmt)) | |
2003 | || POINTER_TYPE_P (gimple_expr_type (stmt)))) | |
2004 | val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1); | |
2005 | break; | |
2006 | ||
2007 | case GIMPLE_BINARY_RHS: | |
2008 | if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
2009 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
2010 | { | |
2011 | tree rhs2 = gimple_assign_rhs2 (stmt); | |
2012 | val = bit_value_binop (subcode, | |
2013 | TREE_TYPE (rhs1), rhs1, rhs2); | |
2014 | } | |
2015 | break; | |
2016 | ||
2017 | default:; | |
2018 | } | |
00d382a8 | 2019 | } |
0b4b14ac RG |
2020 | else if (code == GIMPLE_COND) |
2021 | { | |
2022 | enum tree_code code = gimple_cond_code (stmt); | |
2023 | tree rhs1 = gimple_cond_lhs (stmt); | |
2024 | tree rhs2 = gimple_cond_rhs (stmt); | |
2025 | if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
2026 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
2027 | val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2); | |
2028 | } | |
2029 | is_constant = (val.lattice_val == CONSTANT); | |
00d382a8 RG |
2030 | } |
2031 | ||
0b4b14ac | 2032 | if (!is_constant) |
6de9cd9a DN |
2033 | { |
2034 | /* The statement produced a nonconstant value. If the statement | |
0bca51f0 DN |
2035 | had UNDEFINED operands, then the result of the statement |
2036 | should be UNDEFINED. Otherwise, the statement is VARYING. */ | |
106dec71 | 2037 | if (likelyvalue == UNDEFINED) |
0b4b14ac RG |
2038 | { |
2039 | val.lattice_val = likelyvalue; | |
2040 | val.mask = double_int_zero; | |
2041 | } | |
a318e3ac | 2042 | else |
0b4b14ac RG |
2043 | { |
2044 | val.lattice_val = VARYING; | |
2045 | val.mask = double_int_minus_one; | |
2046 | } | |
a318e3ac | 2047 | |
0bca51f0 | 2048 | val.value = NULL_TREE; |
6de9cd9a | 2049 | } |
750628d8 DN |
2050 | |
2051 | return val; | |
6de9cd9a DN |
2052 | } |
2053 | ||
f61e18ec RG |
2054 | /* Fold the stmt at *GSI with CCP specific information that propagating |
2055 | and regular folding does not catch. */ | |
2056 | ||
2057 | static bool | |
2058 | ccp_fold_stmt (gimple_stmt_iterator *gsi) | |
2059 | { | |
2060 | gimple stmt = gsi_stmt (*gsi); | |
f61e18ec | 2061 | |
830bc550 RG |
2062 | switch (gimple_code (stmt)) |
2063 | { | |
2064 | case GIMPLE_COND: | |
2065 | { | |
2066 | prop_value_t val; | |
2067 | /* Statement evaluation will handle type mismatches in constants | |
2068 | more gracefully than the final propagation. This allows us to | |
2069 | fold more conditionals here. */ | |
2070 | val = evaluate_stmt (stmt); | |
2071 | if (val.lattice_val != CONSTANT | |
0b4b14ac | 2072 | || !double_int_zero_p (val.mask)) |
830bc550 RG |
2073 | return false; |
2074 | ||
0b4b14ac RG |
2075 | if (dump_file) |
2076 | { | |
2077 | fprintf (dump_file, "Folding predicate "); | |
2078 | print_gimple_expr (dump_file, stmt, 0, 0); | |
2079 | fprintf (dump_file, " to "); | |
2080 | print_generic_expr (dump_file, val.value, 0); | |
2081 | fprintf (dump_file, "\n"); | |
2082 | } | |
2083 | ||
830bc550 RG |
2084 | if (integer_zerop (val.value)) |
2085 | gimple_cond_make_false (stmt); | |
2086 | else | |
2087 | gimple_cond_make_true (stmt); | |
f61e18ec | 2088 | |
830bc550 RG |
2089 | return true; |
2090 | } | |
f61e18ec | 2091 | |
830bc550 RG |
2092 | case GIMPLE_CALL: |
2093 | { | |
2094 | tree lhs = gimple_call_lhs (stmt); | |
84d77ca6 | 2095 | tree val; |
830bc550 RG |
2096 | tree argt; |
2097 | bool changed = false; | |
2098 | unsigned i; | |
2099 | ||
2100 | /* If the call was folded into a constant make sure it goes | |
2101 | away even if we cannot propagate into all uses because of | |
2102 | type issues. */ | |
2103 | if (lhs | |
2104 | && TREE_CODE (lhs) == SSA_NAME | |
84d77ca6 | 2105 | && (val = get_constant_value (lhs))) |
830bc550 | 2106 | { |
84d77ca6 | 2107 | tree new_rhs = unshare_expr (val); |
eb6b98c7 | 2108 | bool res; |
830bc550 RG |
2109 | if (!useless_type_conversion_p (TREE_TYPE (lhs), |
2110 | TREE_TYPE (new_rhs))) | |
2111 | new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs); | |
eb6b98c7 RG |
2112 | res = update_call_from_tree (gsi, new_rhs); |
2113 | gcc_assert (res); | |
830bc550 RG |
2114 | return true; |
2115 | } | |
2116 | ||
2117 | /* Propagate into the call arguments. Compared to replace_uses_in | |
2118 | this can use the argument slot types for type verification | |
2119 | instead of the current argument type. We also can safely | |
2120 | drop qualifiers here as we are dealing with constants anyway. */ | |
2121 | argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt)))); | |
2122 | for (i = 0; i < gimple_call_num_args (stmt) && argt; | |
2123 | ++i, argt = TREE_CHAIN (argt)) | |
2124 | { | |
2125 | tree arg = gimple_call_arg (stmt, i); | |
2126 | if (TREE_CODE (arg) == SSA_NAME | |
84d77ca6 | 2127 | && (val = get_constant_value (arg)) |
830bc550 RG |
2128 | && useless_type_conversion_p |
2129 | (TYPE_MAIN_VARIANT (TREE_VALUE (argt)), | |
84d77ca6 | 2130 | TYPE_MAIN_VARIANT (TREE_TYPE (val)))) |
830bc550 | 2131 | { |
84d77ca6 | 2132 | gimple_call_set_arg (stmt, i, unshare_expr (val)); |
830bc550 RG |
2133 | changed = true; |
2134 | } | |
2135 | } | |
2136 | ||
2137 | return changed; | |
2138 | } | |
f61e18ec | 2139 | |
5c95f07b RG |
2140 | case GIMPLE_ASSIGN: |
2141 | { | |
2142 | tree lhs = gimple_assign_lhs (stmt); | |
84d77ca6 | 2143 | tree val; |
5c95f07b RG |
2144 | |
2145 | /* If we have a load that turned out to be constant replace it | |
2146 | as we cannot propagate into all uses in all cases. */ | |
2147 | if (gimple_assign_single_p (stmt) | |
2148 | && TREE_CODE (lhs) == SSA_NAME | |
84d77ca6 | 2149 | && (val = get_constant_value (lhs))) |
5c95f07b | 2150 | { |
84d77ca6 | 2151 | tree rhs = unshare_expr (val); |
5c95f07b | 2152 | if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs))) |
70f34814 | 2153 | rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs); |
5c95f07b RG |
2154 | gimple_assign_set_rhs_from_tree (gsi, rhs); |
2155 | return true; | |
2156 | } | |
2157 | ||
2158 | return false; | |
2159 | } | |
2160 | ||
830bc550 RG |
2161 | default: |
2162 | return false; | |
2163 | } | |
f61e18ec RG |
2164 | } |
2165 | ||
750628d8 | 2166 | /* Visit the assignment statement STMT. Set the value of its LHS to the |
0bca51f0 DN |
2167 | value computed by the RHS and store LHS in *OUTPUT_P. If STMT |
2168 | creates virtual definitions, set the value of each new name to that | |
726a989a RB |
2169 | of the RHS (if we can derive a constant out of the RHS). |
2170 | Value-returning call statements also perform an assignment, and | |
2171 | are handled here. */ | |
6de9cd9a | 2172 | |
750628d8 | 2173 | static enum ssa_prop_result |
726a989a | 2174 | visit_assignment (gimple stmt, tree *output_p) |
6de9cd9a | 2175 | { |
0bca51f0 | 2176 | prop_value_t val; |
0bca51f0 | 2177 | enum ssa_prop_result retval; |
6de9cd9a | 2178 | |
726a989a | 2179 | tree lhs = gimple_get_lhs (stmt); |
6de9cd9a | 2180 | |
726a989a RB |
2181 | gcc_assert (gimple_code (stmt) != GIMPLE_CALL |
2182 | || gimple_call_lhs (stmt) != NULL_TREE); | |
2183 | ||
84d77ca6 RG |
2184 | if (gimple_assign_single_p (stmt) |
2185 | && gimple_assign_rhs_code (stmt) == SSA_NAME) | |
2186 | /* For a simple copy operation, we copy the lattice values. */ | |
2187 | val = *get_value (gimple_assign_rhs1 (stmt)); | |
750628d8 | 2188 | else |
726a989a RB |
2189 | /* Evaluate the statement, which could be |
2190 | either a GIMPLE_ASSIGN or a GIMPLE_CALL. */ | |
87e1e42b | 2191 | val = evaluate_stmt (stmt); |
6de9cd9a | 2192 | |
0bca51f0 | 2193 | retval = SSA_PROP_NOT_INTERESTING; |
6de9cd9a | 2194 | |
750628d8 | 2195 | /* Set the lattice value of the statement's output. */ |
0bca51f0 | 2196 | if (TREE_CODE (lhs) == SSA_NAME) |
6de9cd9a | 2197 | { |
0bca51f0 DN |
2198 | /* If STMT is an assignment to an SSA_NAME, we only have one |
2199 | value to set. */ | |
2200 | if (set_lattice_value (lhs, val)) | |
2201 | { | |
2202 | *output_p = lhs; | |
2203 | if (val.lattice_val == VARYING) | |
2204 | retval = SSA_PROP_VARYING; | |
2205 | else | |
2206 | retval = SSA_PROP_INTERESTING; | |
2207 | } | |
6de9cd9a | 2208 | } |
0bca51f0 DN |
2209 | |
2210 | return retval; | |
6de9cd9a DN |
2211 | } |
2212 | ||
6de9cd9a | 2213 | |
750628d8 DN |
2214 | /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING |
2215 | if it can determine which edge will be taken. Otherwise, return | |
2216 | SSA_PROP_VARYING. */ | |
2217 | ||
2218 | static enum ssa_prop_result | |
726a989a | 2219 | visit_cond_stmt (gimple stmt, edge *taken_edge_p) |
6de9cd9a | 2220 | { |
0bca51f0 | 2221 | prop_value_t val; |
750628d8 DN |
2222 | basic_block block; |
2223 | ||
726a989a | 2224 | block = gimple_bb (stmt); |
750628d8 | 2225 | val = evaluate_stmt (stmt); |
0b4b14ac RG |
2226 | if (val.lattice_val != CONSTANT |
2227 | || !double_int_zero_p (val.mask)) | |
2228 | return SSA_PROP_VARYING; | |
750628d8 DN |
2229 | |
2230 | /* Find which edge out of the conditional block will be taken and add it | |
2231 | to the worklist. If no single edge can be determined statically, | |
2232 | return SSA_PROP_VARYING to feed all the outgoing edges to the | |
2233 | propagation engine. */ | |
0b4b14ac | 2234 | *taken_edge_p = find_taken_edge (block, val.value); |
750628d8 DN |
2235 | if (*taken_edge_p) |
2236 | return SSA_PROP_INTERESTING; | |
2237 | else | |
2238 | return SSA_PROP_VARYING; | |
6de9cd9a DN |
2239 | } |
2240 | ||
6de9cd9a | 2241 | |
750628d8 DN |
2242 | /* Evaluate statement STMT. If the statement produces an output value and |
2243 | its evaluation changes the lattice value of its output, return | |
2244 | SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the | |
2245 | output value. | |
b8698a0f | 2246 | |
750628d8 DN |
2247 | If STMT is a conditional branch and we can determine its truth |
2248 | value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying | |
2249 | value, return SSA_PROP_VARYING. */ | |
6de9cd9a | 2250 | |
750628d8 | 2251 | static enum ssa_prop_result |
726a989a | 2252 | ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p) |
750628d8 | 2253 | { |
750628d8 DN |
2254 | tree def; |
2255 | ssa_op_iter iter; | |
6de9cd9a | 2256 | |
750628d8 | 2257 | if (dump_file && (dump_flags & TDF_DETAILS)) |
6de9cd9a | 2258 | { |
0bca51f0 | 2259 | fprintf (dump_file, "\nVisiting statement:\n"); |
726a989a | 2260 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
6de9cd9a | 2261 | } |
6de9cd9a | 2262 | |
726a989a | 2263 | switch (gimple_code (stmt)) |
6de9cd9a | 2264 | { |
726a989a RB |
2265 | case GIMPLE_ASSIGN: |
2266 | /* If the statement is an assignment that produces a single | |
2267 | output value, evaluate its RHS to see if the lattice value of | |
2268 | its output has changed. */ | |
2269 | return visit_assignment (stmt, output_p); | |
2270 | ||
2271 | case GIMPLE_CALL: | |
2272 | /* A value-returning call also performs an assignment. */ | |
2273 | if (gimple_call_lhs (stmt) != NULL_TREE) | |
2274 | return visit_assignment (stmt, output_p); | |
2275 | break; | |
2276 | ||
2277 | case GIMPLE_COND: | |
2278 | case GIMPLE_SWITCH: | |
2279 | /* If STMT is a conditional branch, see if we can determine | |
2280 | which branch will be taken. */ | |
2281 | /* FIXME. It appears that we should be able to optimize | |
2282 | computed GOTOs here as well. */ | |
2283 | return visit_cond_stmt (stmt, taken_edge_p); | |
2284 | ||
2285 | default: | |
2286 | break; | |
6de9cd9a | 2287 | } |
6de9cd9a | 2288 | |
750628d8 DN |
2289 | /* Any other kind of statement is not interesting for constant |
2290 | propagation and, therefore, not worth simulating. */ | |
750628d8 DN |
2291 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2292 | fprintf (dump_file, "No interesting values produced. Marked VARYING.\n"); | |
6de9cd9a | 2293 | |
750628d8 DN |
2294 | /* Definitions made by statements other than assignments to |
2295 | SSA_NAMEs represent unknown modifications to their outputs. | |
2296 | Mark them VARYING. */ | |
0bca51f0 DN |
2297 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) |
2298 | { | |
0b4b14ac | 2299 | prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } }; |
0bca51f0 DN |
2300 | set_lattice_value (def, v); |
2301 | } | |
6de9cd9a | 2302 | |
750628d8 DN |
2303 | return SSA_PROP_VARYING; |
2304 | } | |
6de9cd9a | 2305 | |
6de9cd9a | 2306 | |
0bca51f0 | 2307 | /* Main entry point for SSA Conditional Constant Propagation. */ |
750628d8 | 2308 | |
3253eafb | 2309 | static unsigned int |
dce2b2f6 | 2310 | do_ssa_ccp (void) |
750628d8 DN |
2311 | { |
2312 | ccp_initialize (); | |
2313 | ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node); | |
3253eafb | 2314 | if (ccp_finalize ()) |
706ca88e | 2315 | return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals); |
3253eafb JH |
2316 | else |
2317 | return 0; | |
6de9cd9a DN |
2318 | } |
2319 | ||
173b818d BB |
2320 | |
2321 | static bool | |
750628d8 | 2322 | gate_ccp (void) |
173b818d | 2323 | { |
750628d8 | 2324 | return flag_tree_ccp != 0; |
173b818d BB |
2325 | } |
2326 | ||
6de9cd9a | 2327 | |
b8698a0f | 2328 | struct gimple_opt_pass pass_ccp = |
750628d8 | 2329 | { |
8ddbbcae JH |
2330 | { |
2331 | GIMPLE_PASS, | |
750628d8 DN |
2332 | "ccp", /* name */ |
2333 | gate_ccp, /* gate */ | |
0bca51f0 | 2334 | do_ssa_ccp, /* execute */ |
750628d8 DN |
2335 | NULL, /* sub */ |
2336 | NULL, /* next */ | |
2337 | 0, /* static_pass_number */ | |
2338 | TV_TREE_CCP, /* tv_id */ | |
7faade0f | 2339 | PROP_cfg | PROP_ssa, /* properties_required */ |
750628d8 | 2340 | 0, /* properties_provided */ |
ae07b463 | 2341 | 0, /* properties_destroyed */ |
750628d8 | 2342 | 0, /* todo_flags_start */ |
3253eafb | 2343 | TODO_dump_func | TODO_verify_ssa |
8ddbbcae JH |
2344 | | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */ |
2345 | } | |
750628d8 | 2346 | }; |
6de9cd9a | 2347 | |
6de9cd9a | 2348 | |
726a989a | 2349 | |
cb8e078d JJ |
2350 | /* Try to optimize out __builtin_stack_restore. Optimize it out |
2351 | if there is another __builtin_stack_restore in the same basic | |
2352 | block and no calls or ASM_EXPRs are in between, or if this block's | |
2353 | only outgoing edge is to EXIT_BLOCK and there are no calls or | |
2354 | ASM_EXPRs after this __builtin_stack_restore. */ | |
2355 | ||
2356 | static tree | |
726a989a | 2357 | optimize_stack_restore (gimple_stmt_iterator i) |
cb8e078d | 2358 | { |
ff9d1adc RH |
2359 | tree callee; |
2360 | gimple stmt; | |
726a989a RB |
2361 | |
2362 | basic_block bb = gsi_bb (i); | |
2363 | gimple call = gsi_stmt (i); | |
cb8e078d | 2364 | |
726a989a RB |
2365 | if (gimple_code (call) != GIMPLE_CALL |
2366 | || gimple_call_num_args (call) != 1 | |
2367 | || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME | |
2368 | || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0)))) | |
cb8e078d JJ |
2369 | return NULL_TREE; |
2370 | ||
726a989a | 2371 | for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i)) |
cb8e078d | 2372 | { |
726a989a RB |
2373 | stmt = gsi_stmt (i); |
2374 | if (gimple_code (stmt) == GIMPLE_ASM) | |
cb8e078d | 2375 | return NULL_TREE; |
726a989a | 2376 | if (gimple_code (stmt) != GIMPLE_CALL) |
cb8e078d JJ |
2377 | continue; |
2378 | ||
726a989a | 2379 | callee = gimple_call_fndecl (stmt); |
12f9ddbc RG |
2380 | if (!callee |
2381 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL | |
2382 | /* All regular builtins are ok, just obviously not alloca. */ | |
2383 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA) | |
cb8e078d JJ |
2384 | return NULL_TREE; |
2385 | ||
2386 | if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE) | |
ff9d1adc | 2387 | goto second_stack_restore; |
cb8e078d JJ |
2388 | } |
2389 | ||
ff9d1adc | 2390 | if (!gsi_end_p (i)) |
cb8e078d JJ |
2391 | return NULL_TREE; |
2392 | ||
ff9d1adc RH |
2393 | /* Allow one successor of the exit block, or zero successors. */ |
2394 | switch (EDGE_COUNT (bb->succs)) | |
2395 | { | |
2396 | case 0: | |
2397 | break; | |
2398 | case 1: | |
2399 | if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR) | |
2400 | return NULL_TREE; | |
2401 | break; | |
2402 | default: | |
2403 | return NULL_TREE; | |
2404 | } | |
2405 | second_stack_restore: | |
cb8e078d | 2406 | |
ff9d1adc RH |
2407 | /* If there's exactly one use, then zap the call to __builtin_stack_save. |
2408 | If there are multiple uses, then the last one should remove the call. | |
2409 | In any case, whether the call to __builtin_stack_save can be removed | |
2410 | or not is irrelevant to removing the call to __builtin_stack_restore. */ | |
2411 | if (has_single_use (gimple_call_arg (call, 0))) | |
2412 | { | |
2413 | gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0)); | |
2414 | if (is_gimple_call (stack_save)) | |
2415 | { | |
2416 | callee = gimple_call_fndecl (stack_save); | |
2417 | if (callee | |
2418 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL | |
2419 | && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE) | |
2420 | { | |
2421 | gimple_stmt_iterator stack_save_gsi; | |
2422 | tree rhs; | |
cb8e078d | 2423 | |
ff9d1adc RH |
2424 | stack_save_gsi = gsi_for_stmt (stack_save); |
2425 | rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0); | |
2426 | update_call_from_tree (&stack_save_gsi, rhs); | |
2427 | } | |
2428 | } | |
2429 | } | |
cb8e078d | 2430 | |
726a989a | 2431 | /* No effect, so the statement will be deleted. */ |
cb8e078d JJ |
2432 | return integer_zero_node; |
2433 | } | |
726a989a | 2434 | |
d7bd8aeb JJ |
2435 | /* If va_list type is a simple pointer and nothing special is needed, |
2436 | optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0), | |
2437 | __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple | |
2438 | pointer assignment. */ | |
2439 | ||
2440 | static tree | |
726a989a | 2441 | optimize_stdarg_builtin (gimple call) |
d7bd8aeb | 2442 | { |
35cbb299 | 2443 | tree callee, lhs, rhs, cfun_va_list; |
d7bd8aeb | 2444 | bool va_list_simple_ptr; |
db3927fb | 2445 | location_t loc = gimple_location (call); |
d7bd8aeb | 2446 | |
726a989a | 2447 | if (gimple_code (call) != GIMPLE_CALL) |
d7bd8aeb JJ |
2448 | return NULL_TREE; |
2449 | ||
726a989a | 2450 | callee = gimple_call_fndecl (call); |
35cbb299 KT |
2451 | |
2452 | cfun_va_list = targetm.fn_abi_va_list (callee); | |
2453 | va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) | |
2454 | && (TREE_TYPE (cfun_va_list) == void_type_node | |
2455 | || TREE_TYPE (cfun_va_list) == char_type_node); | |
2456 | ||
d7bd8aeb JJ |
2457 | switch (DECL_FUNCTION_CODE (callee)) |
2458 | { | |
2459 | case BUILT_IN_VA_START: | |
2460 | if (!va_list_simple_ptr | |
2461 | || targetm.expand_builtin_va_start != NULL | |
726a989a | 2462 | || built_in_decls[BUILT_IN_NEXT_ARG] == NULL) |
d7bd8aeb JJ |
2463 | return NULL_TREE; |
2464 | ||
726a989a | 2465 | if (gimple_call_num_args (call) != 2) |
d7bd8aeb JJ |
2466 | return NULL_TREE; |
2467 | ||
726a989a | 2468 | lhs = gimple_call_arg (call, 0); |
d7bd8aeb JJ |
2469 | if (!POINTER_TYPE_P (TREE_TYPE (lhs)) |
2470 | || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) | |
35cbb299 | 2471 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
d7bd8aeb | 2472 | return NULL_TREE; |
b8698a0f | 2473 | |
db3927fb AH |
2474 | lhs = build_fold_indirect_ref_loc (loc, lhs); |
2475 | rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG], | |
726a989a | 2476 | 1, integer_zero_node); |
db3927fb | 2477 | rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs); |
d7bd8aeb JJ |
2478 | return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); |
2479 | ||
2480 | case BUILT_IN_VA_COPY: | |
2481 | if (!va_list_simple_ptr) | |
2482 | return NULL_TREE; | |
2483 | ||
726a989a | 2484 | if (gimple_call_num_args (call) != 2) |
d7bd8aeb JJ |
2485 | return NULL_TREE; |
2486 | ||
726a989a | 2487 | lhs = gimple_call_arg (call, 0); |
d7bd8aeb JJ |
2488 | if (!POINTER_TYPE_P (TREE_TYPE (lhs)) |
2489 | || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) | |
35cbb299 | 2490 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
d7bd8aeb JJ |
2491 | return NULL_TREE; |
2492 | ||
db3927fb | 2493 | lhs = build_fold_indirect_ref_loc (loc, lhs); |
726a989a | 2494 | rhs = gimple_call_arg (call, 1); |
d7bd8aeb | 2495 | if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs)) |
35cbb299 | 2496 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
d7bd8aeb JJ |
2497 | return NULL_TREE; |
2498 | ||
db3927fb | 2499 | rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs); |
d7bd8aeb JJ |
2500 | return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); |
2501 | ||
2502 | case BUILT_IN_VA_END: | |
726a989a | 2503 | /* No effect, so the statement will be deleted. */ |
d7bd8aeb JJ |
2504 | return integer_zero_node; |
2505 | ||
2506 | default: | |
2507 | gcc_unreachable (); | |
2508 | } | |
2509 | } | |
726a989a | 2510 | |
6de9cd9a DN |
2511 | /* A simple pass that attempts to fold all builtin functions. This pass |
2512 | is run after we've propagated as many constants as we can. */ | |
2513 | ||
c2924966 | 2514 | static unsigned int |
6de9cd9a DN |
2515 | execute_fold_all_builtins (void) |
2516 | { | |
a7d6ba24 | 2517 | bool cfg_changed = false; |
6de9cd9a | 2518 | basic_block bb; |
7b0e48fb | 2519 | unsigned int todoflags = 0; |
b8698a0f | 2520 | |
6de9cd9a DN |
2521 | FOR_EACH_BB (bb) |
2522 | { | |
726a989a RB |
2523 | gimple_stmt_iterator i; |
2524 | for (i = gsi_start_bb (bb); !gsi_end_p (i); ) | |
6de9cd9a | 2525 | { |
726a989a | 2526 | gimple stmt, old_stmt; |
6de9cd9a | 2527 | tree callee, result; |
10a0d495 | 2528 | enum built_in_function fcode; |
6de9cd9a | 2529 | |
726a989a RB |
2530 | stmt = gsi_stmt (i); |
2531 | ||
2532 | if (gimple_code (stmt) != GIMPLE_CALL) | |
10a0d495 | 2533 | { |
726a989a | 2534 | gsi_next (&i); |
10a0d495 JJ |
2535 | continue; |
2536 | } | |
726a989a | 2537 | callee = gimple_call_fndecl (stmt); |
6de9cd9a | 2538 | if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) |
10a0d495 | 2539 | { |
726a989a | 2540 | gsi_next (&i); |
10a0d495 JJ |
2541 | continue; |
2542 | } | |
2543 | fcode = DECL_FUNCTION_CODE (callee); | |
6de9cd9a | 2544 | |
cbdd87d4 | 2545 | result = gimple_fold_builtin (stmt); |
53a8f709 UB |
2546 | |
2547 | if (result) | |
726a989a | 2548 | gimple_remove_stmt_histograms (cfun, stmt); |
53a8f709 | 2549 | |
6de9cd9a DN |
2550 | if (!result) |
2551 | switch (DECL_FUNCTION_CODE (callee)) | |
2552 | { | |
2553 | case BUILT_IN_CONSTANT_P: | |
2554 | /* Resolve __builtin_constant_p. If it hasn't been | |
2555 | folded to integer_one_node by now, it's fairly | |
2556 | certain that the value simply isn't constant. */ | |
726a989a | 2557 | result = integer_zero_node; |
6de9cd9a DN |
2558 | break; |
2559 | ||
cb8e078d | 2560 | case BUILT_IN_STACK_RESTORE: |
726a989a | 2561 | result = optimize_stack_restore (i); |
d7bd8aeb JJ |
2562 | if (result) |
2563 | break; | |
726a989a | 2564 | gsi_next (&i); |
d7bd8aeb JJ |
2565 | continue; |
2566 | ||
2567 | case BUILT_IN_VA_START: | |
2568 | case BUILT_IN_VA_END: | |
2569 | case BUILT_IN_VA_COPY: | |
2570 | /* These shouldn't be folded before pass_stdarg. */ | |
726a989a | 2571 | result = optimize_stdarg_builtin (stmt); |
cb8e078d JJ |
2572 | if (result) |
2573 | break; | |
2574 | /* FALLTHRU */ | |
2575 | ||
6de9cd9a | 2576 | default: |
726a989a | 2577 | gsi_next (&i); |
6de9cd9a DN |
2578 | continue; |
2579 | } | |
2580 | ||
2581 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2582 | { | |
2583 | fprintf (dump_file, "Simplified\n "); | |
726a989a | 2584 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
6de9cd9a DN |
2585 | } |
2586 | ||
726a989a | 2587 | old_stmt = stmt; |
726a989a | 2588 | if (!update_call_from_tree (&i, result)) |
4bad83f5 RG |
2589 | { |
2590 | gimplify_and_update_call_from_tree (&i, result); | |
2591 | todoflags |= TODO_update_address_taken; | |
2592 | } | |
cfaab3a9 | 2593 | |
726a989a | 2594 | stmt = gsi_stmt (i); |
cff4e50d | 2595 | update_stmt (stmt); |
cfaab3a9 | 2596 | |
726a989a RB |
2597 | if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt) |
2598 | && gimple_purge_dead_eh_edges (bb)) | |
a7d6ba24 | 2599 | cfg_changed = true; |
6de9cd9a DN |
2600 | |
2601 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2602 | { | |
2603 | fprintf (dump_file, "to\n "); | |
726a989a | 2604 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
6de9cd9a DN |
2605 | fprintf (dump_file, "\n"); |
2606 | } | |
10a0d495 JJ |
2607 | |
2608 | /* Retry the same statement if it changed into another | |
2609 | builtin, there might be new opportunities now. */ | |
726a989a | 2610 | if (gimple_code (stmt) != GIMPLE_CALL) |
10a0d495 | 2611 | { |
726a989a | 2612 | gsi_next (&i); |
10a0d495 JJ |
2613 | continue; |
2614 | } | |
726a989a | 2615 | callee = gimple_call_fndecl (stmt); |
10a0d495 | 2616 | if (!callee |
726a989a | 2617 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL |
10a0d495 | 2618 | || DECL_FUNCTION_CODE (callee) == fcode) |
726a989a | 2619 | gsi_next (&i); |
6de9cd9a DN |
2620 | } |
2621 | } | |
b8698a0f | 2622 | |
a7d6ba24 | 2623 | /* Delete unreachable blocks. */ |
7b0e48fb DB |
2624 | if (cfg_changed) |
2625 | todoflags |= TODO_cleanup_cfg; | |
b8698a0f | 2626 | |
7b0e48fb | 2627 | return todoflags; |
6de9cd9a DN |
2628 | } |
2629 | ||
750628d8 | 2630 | |
b8698a0f | 2631 | struct gimple_opt_pass pass_fold_builtins = |
6de9cd9a | 2632 | { |
8ddbbcae JH |
2633 | { |
2634 | GIMPLE_PASS, | |
6de9cd9a DN |
2635 | "fab", /* name */ |
2636 | NULL, /* gate */ | |
2637 | execute_fold_all_builtins, /* execute */ | |
2638 | NULL, /* sub */ | |
2639 | NULL, /* next */ | |
2640 | 0, /* static_pass_number */ | |
7072a650 | 2641 | TV_NONE, /* tv_id */ |
7faade0f | 2642 | PROP_cfg | PROP_ssa, /* properties_required */ |
6de9cd9a DN |
2643 | 0, /* properties_provided */ |
2644 | 0, /* properties_destroyed */ | |
2645 | 0, /* todo_flags_start */ | |
b28b1600 JJ |
2646 | TODO_dump_func |
2647 | | TODO_verify_ssa | |
8ddbbcae JH |
2648 | | TODO_update_ssa /* todo_flags_finish */ |
2649 | } | |
6de9cd9a | 2650 | }; |