]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* Conditional constant propagation pass for the GNU compiler. |
87c0a9fc | 2 | Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, |
42b4f48b | 3 | 2010, 2011 Free Software Foundation, Inc. |
4ee9c684 | 4 | Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org> |
5 | Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com> | |
6 | ||
7 | This file is part of GCC. | |
48e1416a | 8 | |
4ee9c684 | 9 | GCC is free software; you can redistribute it and/or modify it |
10 | under the terms of the GNU General Public License as published by the | |
8c4c00c1 | 11 | Free Software Foundation; either version 3, or (at your option) any |
4ee9c684 | 12 | later version. |
48e1416a | 13 | |
4ee9c684 | 14 | GCC is distributed in the hope that it will be useful, but WITHOUT |
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
17 | for more details. | |
48e1416a | 18 | |
4ee9c684 | 19 | You should have received a copy of the GNU General Public License |
8c4c00c1 | 20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 22 | |
88dbf20f | 23 | /* Conditional constant propagation (CCP) is based on the SSA |
24 | propagation engine (tree-ssa-propagate.c). Constant assignments of | |
25 | the form VAR = CST are propagated from the assignments into uses of | |
26 | VAR, which in turn may generate new constants. The simulation uses | |
27 | a four level lattice to keep track of constant values associated | |
28 | with SSA names. Given an SSA name V_i, it may take one of the | |
29 | following values: | |
30 | ||
bfa30570 | 31 | UNINITIALIZED -> the initial state of the value. This value |
32 | is replaced with a correct initial value | |
33 | the first time the value is used, so the | |
34 | rest of the pass does not need to care about | |
35 | it. Using this value simplifies initialization | |
36 | of the pass, and prevents us from needlessly | |
37 | scanning statements that are never reached. | |
88dbf20f | 38 | |
39 | UNDEFINED -> V_i is a local variable whose definition | |
40 | has not been processed yet. Therefore we | |
41 | don't yet know if its value is a constant | |
42 | or not. | |
43 | ||
44 | CONSTANT -> V_i has been found to hold a constant | |
45 | value C. | |
46 | ||
47 | VARYING -> V_i cannot take a constant value, or if it | |
48 | does, it is not possible to determine it | |
49 | at compile time. | |
50 | ||
51 | The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node: | |
52 | ||
53 | 1- In ccp_visit_stmt, we are interested in assignments whose RHS | |
54 | evaluates into a constant and conditional jumps whose predicate | |
55 | evaluates into a boolean true or false. When an assignment of | |
56 | the form V_i = CONST is found, V_i's lattice value is set to | |
57 | CONSTANT and CONST is associated with it. This causes the | |
58 | propagation engine to add all the SSA edges coming out the | |
59 | assignment into the worklists, so that statements that use V_i | |
60 | can be visited. | |
61 | ||
62 | If the statement is a conditional with a constant predicate, we | |
63 | mark the outgoing edges as executable or not executable | |
64 | depending on the predicate's value. This is then used when | |
65 | visiting PHI nodes to know when a PHI argument can be ignored. | |
48e1416a | 66 | |
88dbf20f | 67 | |
68 | 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the | |
69 | same constant C, then the LHS of the PHI is set to C. This | |
70 | evaluation is known as the "meet operation". Since one of the | |
71 | goals of this evaluation is to optimistically return constant | |
72 | values as often as possible, it uses two main short cuts: | |
73 | ||
74 | - If an argument is flowing in through a non-executable edge, it | |
75 | is ignored. This is useful in cases like this: | |
76 | ||
77 | if (PRED) | |
78 | a_9 = 3; | |
79 | else | |
80 | a_10 = 100; | |
81 | a_11 = PHI (a_9, a_10) | |
82 | ||
83 | If PRED is known to always evaluate to false, then we can | |
84 | assume that a_11 will always take its value from a_10, meaning | |
85 | that instead of consider it VARYING (a_9 and a_10 have | |
86 | different values), we can consider it CONSTANT 100. | |
87 | ||
88 | - If an argument has an UNDEFINED value, then it does not affect | |
89 | the outcome of the meet operation. If a variable V_i has an | |
90 | UNDEFINED value, it means that either its defining statement | |
91 | hasn't been visited yet or V_i has no defining statement, in | |
92 | which case the original symbol 'V' is being used | |
93 | uninitialized. Since 'V' is a local variable, the compiler | |
94 | may assume any initial value for it. | |
95 | ||
96 | ||
97 | After propagation, every variable V_i that ends up with a lattice | |
98 | value of CONSTANT will have the associated constant value in the | |
99 | array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for | |
100 | final substitution and folding. | |
101 | ||
4ee9c684 | 102 | References: |
103 | ||
104 | Constant propagation with conditional branches, | |
105 | Wegman and Zadeck, ACM TOPLAS 13(2):181-210. | |
106 | ||
107 | Building an Optimizing Compiler, | |
108 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
109 | ||
110 | Advanced Compiler Design and Implementation, | |
111 | Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */ | |
112 | ||
113 | #include "config.h" | |
114 | #include "system.h" | |
115 | #include "coretypes.h" | |
116 | #include "tm.h" | |
4ee9c684 | 117 | #include "tree.h" |
41511585 | 118 | #include "flags.h" |
4ee9c684 | 119 | #include "tm_p.h" |
4ee9c684 | 120 | #include "basic-block.h" |
41511585 | 121 | #include "output.h" |
41511585 | 122 | #include "function.h" |
ce084dfc | 123 | #include "tree-pretty-print.h" |
124 | #include "gimple-pretty-print.h" | |
41511585 | 125 | #include "timevar.h" |
4ee9c684 | 126 | #include "tree-dump.h" |
41511585 | 127 | #include "tree-flow.h" |
4ee9c684 | 128 | #include "tree-pass.h" |
41511585 | 129 | #include "tree-ssa-propagate.h" |
5a4b7e1e | 130 | #include "value-prof.h" |
41511585 | 131 | #include "langhooks.h" |
8782adcf | 132 | #include "target.h" |
0b205f4c | 133 | #include "diagnostic-core.h" |
43fb76c1 | 134 | #include "dbgcnt.h" |
1d0b727d | 135 | #include "gimple-fold.h" |
4ee9c684 | 136 | |
137 | ||
138 | /* Possible lattice values. */ | |
139 | typedef enum | |
140 | { | |
bfa30570 | 141 | UNINITIALIZED, |
4ee9c684 | 142 | UNDEFINED, |
143 | CONSTANT, | |
144 | VARYING | |
88dbf20f | 145 | } ccp_lattice_t; |
4ee9c684 | 146 | |
14f101cf | 147 | struct prop_value_d { |
148 | /* Lattice value. */ | |
149 | ccp_lattice_t lattice_val; | |
150 | ||
151 | /* Propagated value. */ | |
152 | tree value; | |
b7e55469 | 153 | |
154 | /* Mask that applies to the propagated value during CCP. For | |
155 | X with a CONSTANT lattice value X & ~mask == value & ~mask. */ | |
156 | double_int mask; | |
14f101cf | 157 | }; |
158 | ||
159 | typedef struct prop_value_d prop_value_t; | |
160 | ||
88dbf20f | 161 | /* Array of propagated constant values. After propagation, |
162 | CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If | |
163 | the constant is held in an SSA name representing a memory store | |
4fb5e5ca | 164 | (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual |
165 | memory reference used to store (i.e., the LHS of the assignment | |
166 | doing the store). */ | |
20140406 | 167 | static prop_value_t *const_val; |
4ee9c684 | 168 | |
4af351a8 | 169 | static void canonicalize_float_value (prop_value_t *); |
6688f8ec | 170 | static bool ccp_fold_stmt (gimple_stmt_iterator *); |
4af351a8 | 171 | |
88dbf20f | 172 | /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */ |
01406fc0 | 173 | |
174 | static void | |
88dbf20f | 175 | dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val) |
01406fc0 | 176 | { |
41511585 | 177 | switch (val.lattice_val) |
01406fc0 | 178 | { |
88dbf20f | 179 | case UNINITIALIZED: |
180 | fprintf (outf, "%sUNINITIALIZED", prefix); | |
181 | break; | |
41511585 | 182 | case UNDEFINED: |
183 | fprintf (outf, "%sUNDEFINED", prefix); | |
184 | break; | |
185 | case VARYING: | |
186 | fprintf (outf, "%sVARYING", prefix); | |
187 | break; | |
41511585 | 188 | case CONSTANT: |
189 | fprintf (outf, "%sCONSTANT ", prefix); | |
b7e55469 | 190 | if (TREE_CODE (val.value) != INTEGER_CST |
191 | || double_int_zero_p (val.mask)) | |
192 | print_generic_expr (outf, val.value, dump_flags); | |
193 | else | |
194 | { | |
195 | double_int cval = double_int_and_not (tree_to_double_int (val.value), | |
196 | val.mask); | |
197 | fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX, | |
198 | prefix, cval.high, cval.low); | |
199 | fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")", | |
200 | val.mask.high, val.mask.low); | |
201 | } | |
41511585 | 202 | break; |
203 | default: | |
8c0963c4 | 204 | gcc_unreachable (); |
41511585 | 205 | } |
01406fc0 | 206 | } |
4ee9c684 | 207 | |
4ee9c684 | 208 | |
88dbf20f | 209 | /* Print lattice value VAL to stderr. */ |
210 | ||
211 | void debug_lattice_value (prop_value_t val); | |
212 | ||
4b987fac | 213 | DEBUG_FUNCTION void |
88dbf20f | 214 | debug_lattice_value (prop_value_t val) |
215 | { | |
216 | dump_lattice_value (stderr, "", val); | |
217 | fprintf (stderr, "\n"); | |
218 | } | |
4ee9c684 | 219 | |
4ee9c684 | 220 | |
88dbf20f | 221 | /* Compute a default value for variable VAR and store it in the |
222 | CONST_VAL array. The following rules are used to get default | |
223 | values: | |
01406fc0 | 224 | |
88dbf20f | 225 | 1- Global and static variables that are declared constant are |
226 | considered CONSTANT. | |
227 | ||
228 | 2- Any other value is considered UNDEFINED. This is useful when | |
41511585 | 229 | considering PHI nodes. PHI arguments that are undefined do not |
230 | change the constant value of the PHI node, which allows for more | |
88dbf20f | 231 | constants to be propagated. |
4ee9c684 | 232 | |
8883e700 | 233 | 3- Variables defined by statements other than assignments and PHI |
88dbf20f | 234 | nodes are considered VARYING. |
4ee9c684 | 235 | |
8883e700 | 236 | 4- Initial values of variables that are not GIMPLE registers are |
bfa30570 | 237 | considered VARYING. */ |
4ee9c684 | 238 | |
88dbf20f | 239 | static prop_value_t |
240 | get_default_value (tree var) | |
241 | { | |
242 | tree sym = SSA_NAME_VAR (var); | |
b7e55469 | 243 | prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } }; |
8edeb88b | 244 | gimple stmt; |
245 | ||
246 | stmt = SSA_NAME_DEF_STMT (var); | |
247 | ||
248 | if (gimple_nop_p (stmt)) | |
4ee9c684 | 249 | { |
8edeb88b | 250 | /* Variables defined by an empty statement are those used |
251 | before being initialized. If VAR is a local variable, we | |
252 | can assume initially that it is UNDEFINED, otherwise we must | |
253 | consider it VARYING. */ | |
524a0531 | 254 | if (is_gimple_reg (sym) |
255 | && TREE_CODE (sym) == VAR_DECL) | |
8edeb88b | 256 | val.lattice_val = UNDEFINED; |
257 | else | |
b7e55469 | 258 | { |
259 | val.lattice_val = VARYING; | |
260 | val.mask = double_int_minus_one; | |
261 | } | |
4ee9c684 | 262 | } |
8edeb88b | 263 | else if (is_gimple_assign (stmt) |
264 | /* Value-returning GIMPLE_CALL statements assign to | |
265 | a variable, and are treated similarly to GIMPLE_ASSIGN. */ | |
266 | || (is_gimple_call (stmt) | |
267 | && gimple_call_lhs (stmt) != NULL_TREE) | |
268 | || gimple_code (stmt) == GIMPLE_PHI) | |
41511585 | 269 | { |
8edeb88b | 270 | tree cst; |
271 | if (gimple_assign_single_p (stmt) | |
272 | && DECL_P (gimple_assign_rhs1 (stmt)) | |
273 | && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt)))) | |
88dbf20f | 274 | { |
8edeb88b | 275 | val.lattice_val = CONSTANT; |
276 | val.value = cst; | |
88dbf20f | 277 | } |
278 | else | |
8edeb88b | 279 | /* Any other variable defined by an assignment or a PHI node |
280 | is considered UNDEFINED. */ | |
281 | val.lattice_val = UNDEFINED; | |
282 | } | |
283 | else | |
284 | { | |
285 | /* Otherwise, VAR will never take on a constant value. */ | |
286 | val.lattice_val = VARYING; | |
b7e55469 | 287 | val.mask = double_int_minus_one; |
41511585 | 288 | } |
4ee9c684 | 289 | |
41511585 | 290 | return val; |
291 | } | |
4ee9c684 | 292 | |
4ee9c684 | 293 | |
bfa30570 | 294 | /* Get the constant value associated with variable VAR. */ |
4ee9c684 | 295 | |
bfa30570 | 296 | static inline prop_value_t * |
297 | get_value (tree var) | |
88dbf20f | 298 | { |
e004838d | 299 | prop_value_t *val; |
bfa30570 | 300 | |
e004838d | 301 | if (const_val == NULL) |
302 | return NULL; | |
303 | ||
304 | val = &const_val[SSA_NAME_VERSION (var)]; | |
bfa30570 | 305 | if (val->lattice_val == UNINITIALIZED) |
4ee9c684 | 306 | *val = get_default_value (var); |
307 | ||
4af351a8 | 308 | canonicalize_float_value (val); |
309 | ||
4ee9c684 | 310 | return val; |
311 | } | |
312 | ||
15d138c9 | 313 | /* Return the constant tree value associated with VAR. */ |
314 | ||
315 | static inline tree | |
316 | get_constant_value (tree var) | |
317 | { | |
98d92e3c | 318 | prop_value_t *val; |
319 | if (TREE_CODE (var) != SSA_NAME) | |
320 | { | |
321 | if (is_gimple_min_invariant (var)) | |
322 | return var; | |
323 | return NULL_TREE; | |
324 | } | |
325 | val = get_value (var); | |
b7e55469 | 326 | if (val |
327 | && val->lattice_val == CONSTANT | |
328 | && (TREE_CODE (val->value) != INTEGER_CST | |
329 | || double_int_zero_p (val->mask))) | |
15d138c9 | 330 | return val->value; |
331 | return NULL_TREE; | |
332 | } | |
333 | ||
bfa30570 | 334 | /* Sets the value associated with VAR to VARYING. */ |
335 | ||
336 | static inline void | |
337 | set_value_varying (tree var) | |
338 | { | |
339 | prop_value_t *val = &const_val[SSA_NAME_VERSION (var)]; | |
340 | ||
341 | val->lattice_val = VARYING; | |
342 | val->value = NULL_TREE; | |
b7e55469 | 343 | val->mask = double_int_minus_one; |
bfa30570 | 344 | } |
4ee9c684 | 345 | |
b31eb493 | 346 | /* For float types, modify the value of VAL to make ccp work correctly |
347 | for non-standard values (-0, NaN): | |
348 | ||
349 | If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0. | |
350 | If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED. | |
351 | This is to fix the following problem (see PR 29921): Suppose we have | |
352 | ||
353 | x = 0.0 * y | |
354 | ||
355 | and we set value of y to NaN. This causes value of x to be set to NaN. | |
356 | When we later determine that y is in fact VARYING, fold uses the fact | |
357 | that HONOR_NANS is false, and we try to change the value of x to 0, | |
358 | causing an ICE. With HONOR_NANS being false, the real appearance of | |
359 | NaN would cause undefined behavior, though, so claiming that y (and x) | |
360 | are UNDEFINED initially is correct. */ | |
361 | ||
362 | static void | |
363 | canonicalize_float_value (prop_value_t *val) | |
364 | { | |
365 | enum machine_mode mode; | |
366 | tree type; | |
367 | REAL_VALUE_TYPE d; | |
368 | ||
369 | if (val->lattice_val != CONSTANT | |
370 | || TREE_CODE (val->value) != REAL_CST) | |
371 | return; | |
372 | ||
373 | d = TREE_REAL_CST (val->value); | |
374 | type = TREE_TYPE (val->value); | |
375 | mode = TYPE_MODE (type); | |
376 | ||
377 | if (!HONOR_SIGNED_ZEROS (mode) | |
378 | && REAL_VALUE_MINUS_ZERO (d)) | |
379 | { | |
380 | val->value = build_real (type, dconst0); | |
381 | return; | |
382 | } | |
383 | ||
384 | if (!HONOR_NANS (mode) | |
385 | && REAL_VALUE_ISNAN (d)) | |
386 | { | |
387 | val->lattice_val = UNDEFINED; | |
388 | val->value = NULL; | |
b31eb493 | 389 | return; |
390 | } | |
391 | } | |
392 | ||
b7e55469 | 393 | /* Return whether the lattice transition is valid. */ |
394 | ||
395 | static bool | |
396 | valid_lattice_transition (prop_value_t old_val, prop_value_t new_val) | |
397 | { | |
398 | /* Lattice transitions must always be monotonically increasing in | |
399 | value. */ | |
400 | if (old_val.lattice_val < new_val.lattice_val) | |
401 | return true; | |
402 | ||
403 | if (old_val.lattice_val != new_val.lattice_val) | |
404 | return false; | |
405 | ||
406 | if (!old_val.value && !new_val.value) | |
407 | return true; | |
408 | ||
409 | /* Now both lattice values are CONSTANT. */ | |
410 | ||
411 | /* Allow transitioning from &x to &x & ~3. */ | |
412 | if (TREE_CODE (old_val.value) != INTEGER_CST | |
413 | && TREE_CODE (new_val.value) == INTEGER_CST) | |
414 | return true; | |
415 | ||
416 | /* Bit-lattices have to agree in the still valid bits. */ | |
417 | if (TREE_CODE (old_val.value) == INTEGER_CST | |
418 | && TREE_CODE (new_val.value) == INTEGER_CST) | |
419 | return double_int_equal_p | |
420 | (double_int_and_not (tree_to_double_int (old_val.value), | |
421 | new_val.mask), | |
422 | double_int_and_not (tree_to_double_int (new_val.value), | |
423 | new_val.mask)); | |
424 | ||
425 | /* Otherwise constant values have to agree. */ | |
426 | return operand_equal_p (old_val.value, new_val.value, 0); | |
427 | } | |
428 | ||
88dbf20f | 429 | /* Set the value for variable VAR to NEW_VAL. Return true if the new |
430 | value is different from VAR's previous value. */ | |
4ee9c684 | 431 | |
41511585 | 432 | static bool |
88dbf20f | 433 | set_lattice_value (tree var, prop_value_t new_val) |
4ee9c684 | 434 | { |
6d0bf6d6 | 435 | /* We can deal with old UNINITIALIZED values just fine here. */ |
436 | prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)]; | |
88dbf20f | 437 | |
b31eb493 | 438 | canonicalize_float_value (&new_val); |
439 | ||
b7e55469 | 440 | /* We have to be careful to not go up the bitwise lattice |
441 | represented by the mask. | |
442 | ??? This doesn't seem to be the best place to enforce this. */ | |
443 | if (new_val.lattice_val == CONSTANT | |
444 | && old_val->lattice_val == CONSTANT | |
445 | && TREE_CODE (new_val.value) == INTEGER_CST | |
446 | && TREE_CODE (old_val->value) == INTEGER_CST) | |
447 | { | |
448 | double_int diff; | |
449 | diff = double_int_xor (tree_to_double_int (new_val.value), | |
450 | tree_to_double_int (old_val->value)); | |
451 | new_val.mask = double_int_ior (new_val.mask, | |
452 | double_int_ior (old_val->mask, diff)); | |
453 | } | |
bfa30570 | 454 | |
b7e55469 | 455 | gcc_assert (valid_lattice_transition (*old_val, new_val)); |
88dbf20f | 456 | |
b7e55469 | 457 | /* If *OLD_VAL and NEW_VAL are the same, return false to inform the |
458 | caller that this was a non-transition. */ | |
459 | if (old_val->lattice_val != new_val.lattice_val | |
460 | || (new_val.lattice_val == CONSTANT | |
461 | && TREE_CODE (new_val.value) == INTEGER_CST | |
462 | && (TREE_CODE (old_val->value) != INTEGER_CST | |
463 | || !double_int_equal_p (new_val.mask, old_val->mask)))) | |
4ee9c684 | 464 | { |
b7e55469 | 465 | /* ??? We would like to delay creation of INTEGER_CSTs from |
466 | partially constants here. */ | |
467 | ||
41511585 | 468 | if (dump_file && (dump_flags & TDF_DETAILS)) |
469 | { | |
88dbf20f | 470 | dump_lattice_value (dump_file, "Lattice value changed to ", new_val); |
bfa30570 | 471 | fprintf (dump_file, ". Adding SSA edges to worklist.\n"); |
41511585 | 472 | } |
473 | ||
88dbf20f | 474 | *old_val = new_val; |
475 | ||
6d0bf6d6 | 476 | gcc_assert (new_val.lattice_val != UNINITIALIZED); |
bfa30570 | 477 | return true; |
4ee9c684 | 478 | } |
41511585 | 479 | |
480 | return false; | |
4ee9c684 | 481 | } |
482 | ||
b7e55469 | 483 | static prop_value_t get_value_for_expr (tree, bool); |
484 | static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree); | |
485 | static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *, | |
486 | tree, double_int, double_int, | |
487 | tree, double_int, double_int); | |
488 | ||
489 | /* Return a double_int that can be used for bitwise simplifications | |
490 | from VAL. */ | |
491 | ||
492 | static double_int | |
493 | value_to_double_int (prop_value_t val) | |
494 | { | |
495 | if (val.value | |
496 | && TREE_CODE (val.value) == INTEGER_CST) | |
497 | return tree_to_double_int (val.value); | |
498 | else | |
499 | return double_int_zero; | |
500 | } | |
501 | ||
502 | /* Return the value for the address expression EXPR based on alignment | |
503 | information. */ | |
6d0bf6d6 | 504 | |
505 | static prop_value_t | |
b7e55469 | 506 | get_value_from_alignment (tree expr) |
507 | { | |
508 | prop_value_t val; | |
509 | HOST_WIDE_INT bitsize, bitpos; | |
510 | tree base, offset; | |
511 | enum machine_mode mode; | |
512 | int align; | |
513 | ||
514 | gcc_assert (TREE_CODE (expr) == ADDR_EXPR); | |
515 | ||
516 | base = get_inner_reference (TREE_OPERAND (expr, 0), | |
517 | &bitsize, &bitpos, &offset, | |
518 | &mode, &align, &align, false); | |
5d9de213 | 519 | if (TREE_CODE (base) == MEM_REF) |
b7e55469 | 520 | val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr), |
521 | TREE_OPERAND (base, 0), TREE_OPERAND (base, 1)); | |
522 | else if (base | |
57862ab5 | 523 | /* ??? While function decls have DECL_ALIGN their addresses |
524 | may encode extra information in the lower bits on some | |
525 | targets (PR47239). Simply punt for function decls for now. */ | |
526 | && TREE_CODE (base) != FUNCTION_DECL | |
98ab9e8f | 527 | && ((align = get_object_alignment (base, BIGGEST_ALIGNMENT)) |
b7e55469 | 528 | > BITS_PER_UNIT)) |
529 | { | |
530 | val.lattice_val = CONSTANT; | |
531 | /* We assume pointers are zero-extended. */ | |
532 | val.mask = double_int_and_not | |
533 | (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))), | |
534 | uhwi_to_double_int (align / BITS_PER_UNIT - 1)); | |
535 | val.value = build_int_cst (TREE_TYPE (expr), 0); | |
536 | } | |
537 | else | |
538 | { | |
539 | val.lattice_val = VARYING; | |
540 | val.mask = double_int_minus_one; | |
541 | val.value = NULL_TREE; | |
542 | } | |
543 | if (bitpos != 0) | |
544 | { | |
545 | double_int value, mask; | |
546 | bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask, | |
547 | TREE_TYPE (expr), value_to_double_int (val), val.mask, | |
548 | TREE_TYPE (expr), | |
549 | shwi_to_double_int (bitpos / BITS_PER_UNIT), | |
550 | double_int_zero); | |
551 | val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT; | |
552 | val.mask = mask; | |
553 | if (val.lattice_val == CONSTANT) | |
554 | val.value = double_int_to_tree (TREE_TYPE (expr), value); | |
555 | else | |
556 | val.value = NULL_TREE; | |
557 | } | |
558 | /* ??? We should handle i * 4 and more complex expressions from | |
559 | the offset, possibly by just expanding get_value_for_expr. */ | |
560 | if (offset != NULL_TREE) | |
561 | { | |
562 | double_int value, mask; | |
563 | prop_value_t oval = get_value_for_expr (offset, true); | |
564 | bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask, | |
565 | TREE_TYPE (expr), value_to_double_int (val), val.mask, | |
566 | TREE_TYPE (expr), value_to_double_int (oval), | |
567 | oval.mask); | |
568 | val.mask = mask; | |
569 | if (double_int_minus_one_p (mask)) | |
570 | { | |
571 | val.lattice_val = VARYING; | |
572 | val.value = NULL_TREE; | |
573 | } | |
574 | else | |
575 | { | |
576 | val.lattice_val = CONSTANT; | |
577 | val.value = double_int_to_tree (TREE_TYPE (expr), value); | |
578 | } | |
579 | } | |
580 | ||
581 | return val; | |
582 | } | |
583 | ||
584 | /* Return the value for the tree operand EXPR. If FOR_BITS_P is true | |
585 | return constant bits extracted from alignment information for | |
586 | invariant addresses. */ | |
587 | ||
588 | static prop_value_t | |
589 | get_value_for_expr (tree expr, bool for_bits_p) | |
6d0bf6d6 | 590 | { |
591 | prop_value_t val; | |
592 | ||
593 | if (TREE_CODE (expr) == SSA_NAME) | |
b7e55469 | 594 | { |
595 | val = *get_value (expr); | |
596 | if (for_bits_p | |
597 | && val.lattice_val == CONSTANT | |
598 | && TREE_CODE (val.value) == ADDR_EXPR) | |
599 | val = get_value_from_alignment (val.value); | |
600 | } | |
601 | else if (is_gimple_min_invariant (expr) | |
602 | && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR)) | |
6d0bf6d6 | 603 | { |
604 | val.lattice_val = CONSTANT; | |
605 | val.value = expr; | |
b7e55469 | 606 | val.mask = double_int_zero; |
6d0bf6d6 | 607 | canonicalize_float_value (&val); |
608 | } | |
b7e55469 | 609 | else if (TREE_CODE (expr) == ADDR_EXPR) |
610 | val = get_value_from_alignment (expr); | |
6d0bf6d6 | 611 | else |
612 | { | |
613 | val.lattice_val = VARYING; | |
b7e55469 | 614 | val.mask = double_int_minus_one; |
6d0bf6d6 | 615 | val.value = NULL_TREE; |
616 | } | |
6d0bf6d6 | 617 | return val; |
618 | } | |
619 | ||
88dbf20f | 620 | /* Return the likely CCP lattice value for STMT. |
4ee9c684 | 621 | |
41511585 | 622 | If STMT has no operands, then return CONSTANT. |
4ee9c684 | 623 | |
d61b9af3 | 624 | Else if undefinedness of operands of STMT cause its value to be |
625 | undefined, then return UNDEFINED. | |
4ee9c684 | 626 | |
41511585 | 627 | Else if any operands of STMT are constants, then return CONSTANT. |
4ee9c684 | 628 | |
41511585 | 629 | Else return VARYING. */ |
4ee9c684 | 630 | |
88dbf20f | 631 | static ccp_lattice_t |
75a70cf9 | 632 | likely_value (gimple stmt) |
41511585 | 633 | { |
d61b9af3 | 634 | bool has_constant_operand, has_undefined_operand, all_undefined_operands; |
41511585 | 635 | tree use; |
636 | ssa_op_iter iter; | |
8edeb88b | 637 | unsigned i; |
4ee9c684 | 638 | |
590c3166 | 639 | enum gimple_code code = gimple_code (stmt); |
75a70cf9 | 640 | |
641 | /* This function appears to be called only for assignments, calls, | |
642 | conditionals, and switches, due to the logic in visit_stmt. */ | |
643 | gcc_assert (code == GIMPLE_ASSIGN | |
644 | || code == GIMPLE_CALL | |
645 | || code == GIMPLE_COND | |
646 | || code == GIMPLE_SWITCH); | |
88dbf20f | 647 | |
648 | /* If the statement has volatile operands, it won't fold to a | |
649 | constant value. */ | |
75a70cf9 | 650 | if (gimple_has_volatile_ops (stmt)) |
88dbf20f | 651 | return VARYING; |
652 | ||
75a70cf9 | 653 | /* Arrive here for more complex cases. */ |
bfa30570 | 654 | has_constant_operand = false; |
d61b9af3 | 655 | has_undefined_operand = false; |
656 | all_undefined_operands = true; | |
8edeb88b | 657 | FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE) |
41511585 | 658 | { |
bfa30570 | 659 | prop_value_t *val = get_value (use); |
41511585 | 660 | |
bfa30570 | 661 | if (val->lattice_val == UNDEFINED) |
d61b9af3 | 662 | has_undefined_operand = true; |
663 | else | |
664 | all_undefined_operands = false; | |
88dbf20f | 665 | |
41511585 | 666 | if (val->lattice_val == CONSTANT) |
bfa30570 | 667 | has_constant_operand = true; |
4ee9c684 | 668 | } |
41511585 | 669 | |
dd277d48 | 670 | /* There may be constants in regular rhs operands. For calls we |
671 | have to ignore lhs, fndecl and static chain, otherwise only | |
672 | the lhs. */ | |
673 | for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt); | |
8edeb88b | 674 | i < gimple_num_ops (stmt); ++i) |
675 | { | |
676 | tree op = gimple_op (stmt, i); | |
677 | if (!op || TREE_CODE (op) == SSA_NAME) | |
678 | continue; | |
679 | if (is_gimple_min_invariant (op)) | |
680 | has_constant_operand = true; | |
681 | } | |
682 | ||
87c0a9fc | 683 | if (has_constant_operand) |
684 | all_undefined_operands = false; | |
685 | ||
d61b9af3 | 686 | /* If the operation combines operands like COMPLEX_EXPR make sure to |
687 | not mark the result UNDEFINED if only one part of the result is | |
688 | undefined. */ | |
75a70cf9 | 689 | if (has_undefined_operand && all_undefined_operands) |
d61b9af3 | 690 | return UNDEFINED; |
75a70cf9 | 691 | else if (code == GIMPLE_ASSIGN && has_undefined_operand) |
d61b9af3 | 692 | { |
75a70cf9 | 693 | switch (gimple_assign_rhs_code (stmt)) |
d61b9af3 | 694 | { |
695 | /* Unary operators are handled with all_undefined_operands. */ | |
696 | case PLUS_EXPR: | |
697 | case MINUS_EXPR: | |
d61b9af3 | 698 | case POINTER_PLUS_EXPR: |
d61b9af3 | 699 | /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected. |
700 | Not bitwise operators, one VARYING operand may specify the | |
701 | result completely. Not logical operators for the same reason. | |
05a936a0 | 702 | Not COMPLEX_EXPR as one VARYING operand makes the result partly |
703 | not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because | |
704 | the undefined operand may be promoted. */ | |
d61b9af3 | 705 | return UNDEFINED; |
706 | ||
707 | default: | |
708 | ; | |
709 | } | |
710 | } | |
711 | /* If there was an UNDEFINED operand but the result may be not UNDEFINED | |
712 | fall back to VARYING even if there were CONSTANT operands. */ | |
713 | if (has_undefined_operand) | |
714 | return VARYING; | |
715 | ||
8edeb88b | 716 | /* We do not consider virtual operands here -- load from read-only |
717 | memory may have only VARYING virtual operands, but still be | |
718 | constant. */ | |
bfa30570 | 719 | if (has_constant_operand |
8edeb88b | 720 | || gimple_references_memory_p (stmt)) |
88dbf20f | 721 | return CONSTANT; |
722 | ||
bfa30570 | 723 | return VARYING; |
4ee9c684 | 724 | } |
725 | ||
bfa30570 | 726 | /* Returns true if STMT cannot be constant. */ |
727 | ||
728 | static bool | |
75a70cf9 | 729 | surely_varying_stmt_p (gimple stmt) |
bfa30570 | 730 | { |
731 | /* If the statement has operands that we cannot handle, it cannot be | |
732 | constant. */ | |
75a70cf9 | 733 | if (gimple_has_volatile_ops (stmt)) |
bfa30570 | 734 | return true; |
735 | ||
f257af64 | 736 | /* If it is a call and does not return a value or is not a |
737 | builtin and not an indirect call, it is varying. */ | |
75a70cf9 | 738 | if (is_gimple_call (stmt)) |
f257af64 | 739 | { |
740 | tree fndecl; | |
741 | if (!gimple_call_lhs (stmt) | |
742 | || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE | |
5768aeb3 | 743 | && !DECL_BUILT_IN (fndecl))) |
f257af64 | 744 | return true; |
745 | } | |
bfa30570 | 746 | |
8edeb88b | 747 | /* Any other store operation is not interesting. */ |
dd277d48 | 748 | else if (gimple_vdef (stmt)) |
8edeb88b | 749 | return true; |
750 | ||
bfa30570 | 751 | /* Anything other than assignments and conditional jumps are not |
752 | interesting for CCP. */ | |
75a70cf9 | 753 | if (gimple_code (stmt) != GIMPLE_ASSIGN |
f257af64 | 754 | && gimple_code (stmt) != GIMPLE_COND |
755 | && gimple_code (stmt) != GIMPLE_SWITCH | |
756 | && gimple_code (stmt) != GIMPLE_CALL) | |
bfa30570 | 757 | return true; |
758 | ||
759 | return false; | |
760 | } | |
4ee9c684 | 761 | |
41511585 | 762 | /* Initialize local data structures for CCP. */ |
4ee9c684 | 763 | |
764 | static void | |
41511585 | 765 | ccp_initialize (void) |
4ee9c684 | 766 | { |
41511585 | 767 | basic_block bb; |
4ee9c684 | 768 | |
43959b95 | 769 | const_val = XCNEWVEC (prop_value_t, num_ssa_names); |
4ee9c684 | 770 | |
41511585 | 771 | /* Initialize simulation flags for PHI nodes and statements. */ |
772 | FOR_EACH_BB (bb) | |
4ee9c684 | 773 | { |
75a70cf9 | 774 | gimple_stmt_iterator i; |
4ee9c684 | 775 | |
75a70cf9 | 776 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) |
41511585 | 777 | { |
75a70cf9 | 778 | gimple stmt = gsi_stmt (i); |
2193544e | 779 | bool is_varying; |
780 | ||
781 | /* If the statement is a control insn, then we do not | |
782 | want to avoid simulating the statement once. Failure | |
783 | to do so means that those edges will never get added. */ | |
784 | if (stmt_ends_bb_p (stmt)) | |
785 | is_varying = false; | |
786 | else | |
787 | is_varying = surely_varying_stmt_p (stmt); | |
4ee9c684 | 788 | |
bfa30570 | 789 | if (is_varying) |
41511585 | 790 | { |
88dbf20f | 791 | tree def; |
792 | ssa_op_iter iter; | |
793 | ||
794 | /* If the statement will not produce a constant, mark | |
795 | all its outputs VARYING. */ | |
796 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) | |
8edeb88b | 797 | set_value_varying (def); |
41511585 | 798 | } |
75a70cf9 | 799 | prop_set_simulate_again (stmt, !is_varying); |
41511585 | 800 | } |
4ee9c684 | 801 | } |
802 | ||
75a70cf9 | 803 | /* Now process PHI nodes. We never clear the simulate_again flag on |
804 | phi nodes, since we do not know which edges are executable yet, | |
805 | except for phi nodes for virtual operands when we do not do store ccp. */ | |
41511585 | 806 | FOR_EACH_BB (bb) |
4ee9c684 | 807 | { |
75a70cf9 | 808 | gimple_stmt_iterator i; |
41511585 | 809 | |
75a70cf9 | 810 | for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i)) |
811 | { | |
812 | gimple phi = gsi_stmt (i); | |
813 | ||
61207d43 | 814 | if (!is_gimple_reg (gimple_phi_result (phi))) |
75a70cf9 | 815 | prop_set_simulate_again (phi, false); |
bfa30570 | 816 | else |
75a70cf9 | 817 | prop_set_simulate_again (phi, true); |
41511585 | 818 | } |
4ee9c684 | 819 | } |
41511585 | 820 | } |
4ee9c684 | 821 | |
43fb76c1 | 822 | /* Debug count support. Reset the values of ssa names |
823 | VARYING when the total number ssa names analyzed is | |
824 | beyond the debug count specified. */ | |
825 | ||
826 | static void | |
827 | do_dbg_cnt (void) | |
828 | { | |
829 | unsigned i; | |
830 | for (i = 0; i < num_ssa_names; i++) | |
831 | { | |
832 | if (!dbg_cnt (ccp)) | |
833 | { | |
834 | const_val[i].lattice_val = VARYING; | |
b7e55469 | 835 | const_val[i].mask = double_int_minus_one; |
43fb76c1 | 836 | const_val[i].value = NULL_TREE; |
837 | } | |
838 | } | |
839 | } | |
840 | ||
4ee9c684 | 841 | |
88dbf20f | 842 | /* Do final substitution of propagated values, cleanup the flowgraph and |
48e1416a | 843 | free allocated storage. |
4ee9c684 | 844 | |
33a34f1e | 845 | Return TRUE when something was optimized. */ |
846 | ||
847 | static bool | |
88dbf20f | 848 | ccp_finalize (void) |
4ee9c684 | 849 | { |
43fb76c1 | 850 | bool something_changed; |
153c3b50 | 851 | unsigned i; |
43fb76c1 | 852 | |
853 | do_dbg_cnt (); | |
153c3b50 | 854 | |
855 | /* Derive alignment and misalignment information from partially | |
856 | constant pointers in the lattice. */ | |
857 | for (i = 1; i < num_ssa_names; ++i) | |
858 | { | |
859 | tree name = ssa_name (i); | |
860 | prop_value_t *val; | |
861 | struct ptr_info_def *pi; | |
862 | unsigned int tem, align; | |
863 | ||
864 | if (!name | |
865 | || !POINTER_TYPE_P (TREE_TYPE (name))) | |
866 | continue; | |
867 | ||
868 | val = get_value (name); | |
869 | if (val->lattice_val != CONSTANT | |
870 | || TREE_CODE (val->value) != INTEGER_CST) | |
871 | continue; | |
872 | ||
873 | /* Trailing constant bits specify the alignment, trailing value | |
874 | bits the misalignment. */ | |
875 | tem = val->mask.low; | |
876 | align = (tem & -tem); | |
877 | if (align == 1) | |
878 | continue; | |
879 | ||
880 | pi = get_ptr_info (name); | |
881 | pi->align = align; | |
882 | pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1); | |
883 | } | |
884 | ||
88dbf20f | 885 | /* Perform substitutions based on the known constant values. */ |
14f101cf | 886 | something_changed = substitute_and_fold (get_constant_value, |
887 | ccp_fold_stmt, true); | |
4ee9c684 | 888 | |
88dbf20f | 889 | free (const_val); |
e004838d | 890 | const_val = NULL; |
33a34f1e | 891 | return something_changed;; |
4ee9c684 | 892 | } |
893 | ||
894 | ||
88dbf20f | 895 | /* Compute the meet operator between *VAL1 and *VAL2. Store the result |
896 | in VAL1. | |
897 | ||
898 | any M UNDEFINED = any | |
88dbf20f | 899 | any M VARYING = VARYING |
900 | Ci M Cj = Ci if (i == j) | |
901 | Ci M Cj = VARYING if (i != j) | |
bfa30570 | 902 | */ |
4ee9c684 | 903 | |
904 | static void | |
88dbf20f | 905 | ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2) |
4ee9c684 | 906 | { |
88dbf20f | 907 | if (val1->lattice_val == UNDEFINED) |
4ee9c684 | 908 | { |
88dbf20f | 909 | /* UNDEFINED M any = any */ |
910 | *val1 = *val2; | |
41511585 | 911 | } |
88dbf20f | 912 | else if (val2->lattice_val == UNDEFINED) |
92481a4d | 913 | { |
88dbf20f | 914 | /* any M UNDEFINED = any |
915 | Nothing to do. VAL1 already contains the value we want. */ | |
916 | ; | |
92481a4d | 917 | } |
88dbf20f | 918 | else if (val1->lattice_val == VARYING |
919 | || val2->lattice_val == VARYING) | |
41511585 | 920 | { |
88dbf20f | 921 | /* any M VARYING = VARYING. */ |
922 | val1->lattice_val = VARYING; | |
b7e55469 | 923 | val1->mask = double_int_minus_one; |
88dbf20f | 924 | val1->value = NULL_TREE; |
41511585 | 925 | } |
b7e55469 | 926 | else if (val1->lattice_val == CONSTANT |
927 | && val2->lattice_val == CONSTANT | |
928 | && TREE_CODE (val1->value) == INTEGER_CST | |
929 | && TREE_CODE (val2->value) == INTEGER_CST) | |
930 | { | |
931 | /* Ci M Cj = Ci if (i == j) | |
932 | Ci M Cj = VARYING if (i != j) | |
933 | ||
934 | For INTEGER_CSTs mask unequal bits. If no equal bits remain, | |
935 | drop to varying. */ | |
936 | val1->mask | |
937 | = double_int_ior (double_int_ior (val1->mask, | |
938 | val2->mask), | |
939 | double_int_xor (tree_to_double_int (val1->value), | |
940 | tree_to_double_int (val2->value))); | |
941 | if (double_int_minus_one_p (val1->mask)) | |
942 | { | |
943 | val1->lattice_val = VARYING; | |
944 | val1->value = NULL_TREE; | |
945 | } | |
946 | } | |
88dbf20f | 947 | else if (val1->lattice_val == CONSTANT |
948 | && val2->lattice_val == CONSTANT | |
61207d43 | 949 | && simple_cst_equal (val1->value, val2->value) == 1) |
41511585 | 950 | { |
88dbf20f | 951 | /* Ci M Cj = Ci if (i == j) |
952 | Ci M Cj = VARYING if (i != j) | |
953 | ||
b7e55469 | 954 | VAL1 already contains the value we want for equivalent values. */ |
955 | } | |
956 | else if (val1->lattice_val == CONSTANT | |
957 | && val2->lattice_val == CONSTANT | |
958 | && (TREE_CODE (val1->value) == ADDR_EXPR | |
959 | || TREE_CODE (val2->value) == ADDR_EXPR)) | |
960 | { | |
961 | /* When not equal addresses are involved try meeting for | |
962 | alignment. */ | |
963 | prop_value_t tem = *val2; | |
964 | if (TREE_CODE (val1->value) == ADDR_EXPR) | |
965 | *val1 = get_value_for_expr (val1->value, true); | |
966 | if (TREE_CODE (val2->value) == ADDR_EXPR) | |
967 | tem = get_value_for_expr (val2->value, true); | |
968 | ccp_lattice_meet (val1, &tem); | |
41511585 | 969 | } |
970 | else | |
971 | { | |
88dbf20f | 972 | /* Any other combination is VARYING. */ |
973 | val1->lattice_val = VARYING; | |
b7e55469 | 974 | val1->mask = double_int_minus_one; |
88dbf20f | 975 | val1->value = NULL_TREE; |
41511585 | 976 | } |
4ee9c684 | 977 | } |
978 | ||
979 | ||
41511585 | 980 | /* Loop through the PHI_NODE's parameters for BLOCK and compare their |
981 | lattice values to determine PHI_NODE's lattice value. The value of a | |
88dbf20f | 982 | PHI node is determined calling ccp_lattice_meet with all the arguments |
41511585 | 983 | of the PHI node that are incoming via executable edges. */ |
4ee9c684 | 984 | |
41511585 | 985 | static enum ssa_prop_result |
75a70cf9 | 986 | ccp_visit_phi_node (gimple phi) |
4ee9c684 | 987 | { |
75a70cf9 | 988 | unsigned i; |
88dbf20f | 989 | prop_value_t *old_val, new_val; |
4ee9c684 | 990 | |
41511585 | 991 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4ee9c684 | 992 | { |
41511585 | 993 | fprintf (dump_file, "\nVisiting PHI node: "); |
75a70cf9 | 994 | print_gimple_stmt (dump_file, phi, 0, dump_flags); |
4ee9c684 | 995 | } |
4ee9c684 | 996 | |
75a70cf9 | 997 | old_val = get_value (gimple_phi_result (phi)); |
41511585 | 998 | switch (old_val->lattice_val) |
999 | { | |
1000 | case VARYING: | |
88dbf20f | 1001 | return SSA_PROP_VARYING; |
4ee9c684 | 1002 | |
41511585 | 1003 | case CONSTANT: |
1004 | new_val = *old_val; | |
1005 | break; | |
4ee9c684 | 1006 | |
41511585 | 1007 | case UNDEFINED: |
41511585 | 1008 | new_val.lattice_val = UNDEFINED; |
88dbf20f | 1009 | new_val.value = NULL_TREE; |
41511585 | 1010 | break; |
4ee9c684 | 1011 | |
41511585 | 1012 | default: |
8c0963c4 | 1013 | gcc_unreachable (); |
41511585 | 1014 | } |
4ee9c684 | 1015 | |
75a70cf9 | 1016 | for (i = 0; i < gimple_phi_num_args (phi); i++) |
41511585 | 1017 | { |
88dbf20f | 1018 | /* Compute the meet operator over all the PHI arguments flowing |
1019 | through executable edges. */ | |
75a70cf9 | 1020 | edge e = gimple_phi_arg_edge (phi, i); |
4ee9c684 | 1021 | |
41511585 | 1022 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1023 | { | |
1024 | fprintf (dump_file, | |
1025 | "\n Argument #%d (%d -> %d %sexecutable)\n", | |
1026 | i, e->src->index, e->dest->index, | |
1027 | (e->flags & EDGE_EXECUTABLE) ? "" : "not "); | |
1028 | } | |
1029 | ||
1030 | /* If the incoming edge is executable, Compute the meet operator for | |
1031 | the existing value of the PHI node and the current PHI argument. */ | |
1032 | if (e->flags & EDGE_EXECUTABLE) | |
1033 | { | |
75a70cf9 | 1034 | tree arg = gimple_phi_arg (phi, i)->def; |
b7e55469 | 1035 | prop_value_t arg_val = get_value_for_expr (arg, false); |
4ee9c684 | 1036 | |
88dbf20f | 1037 | ccp_lattice_meet (&new_val, &arg_val); |
4ee9c684 | 1038 | |
41511585 | 1039 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1040 | { | |
1041 | fprintf (dump_file, "\t"); | |
88dbf20f | 1042 | print_generic_expr (dump_file, arg, dump_flags); |
1043 | dump_lattice_value (dump_file, "\tValue: ", arg_val); | |
41511585 | 1044 | fprintf (dump_file, "\n"); |
1045 | } | |
4ee9c684 | 1046 | |
41511585 | 1047 | if (new_val.lattice_val == VARYING) |
1048 | break; | |
1049 | } | |
1050 | } | |
4ee9c684 | 1051 | |
1052 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
41511585 | 1053 | { |
1054 | dump_lattice_value (dump_file, "\n PHI node value: ", new_val); | |
1055 | fprintf (dump_file, "\n\n"); | |
1056 | } | |
1057 | ||
bfa30570 | 1058 | /* Make the transition to the new value. */ |
75a70cf9 | 1059 | if (set_lattice_value (gimple_phi_result (phi), new_val)) |
41511585 | 1060 | { |
1061 | if (new_val.lattice_val == VARYING) | |
1062 | return SSA_PROP_VARYING; | |
1063 | else | |
1064 | return SSA_PROP_INTERESTING; | |
1065 | } | |
1066 | else | |
1067 | return SSA_PROP_NOT_INTERESTING; | |
4ee9c684 | 1068 | } |
1069 | ||
15d138c9 | 1070 | /* Return the constant value for OP or OP otherwise. */ |
00f4f705 | 1071 | |
1072 | static tree | |
15d138c9 | 1073 | valueize_op (tree op) |
00f4f705 | 1074 | { |
00f4f705 | 1075 | if (TREE_CODE (op) == SSA_NAME) |
1076 | { | |
15d138c9 | 1077 | tree tem = get_constant_value (op); |
1078 | if (tem) | |
1079 | return tem; | |
00f4f705 | 1080 | } |
1081 | return op; | |
1082 | } | |
1083 | ||
41511585 | 1084 | /* CCP specific front-end to the non-destructive constant folding |
1085 | routines. | |
4ee9c684 | 1086 | |
1087 | Attempt to simplify the RHS of STMT knowing that one or more | |
1088 | operands are constants. | |
1089 | ||
1090 | If simplification is possible, return the simplified RHS, | |
75a70cf9 | 1091 | otherwise return the original RHS or NULL_TREE. */ |
4ee9c684 | 1092 | |
1093 | static tree | |
75a70cf9 | 1094 | ccp_fold (gimple stmt) |
4ee9c684 | 1095 | { |
389dd41b | 1096 | location_t loc = gimple_location (stmt); |
75a70cf9 | 1097 | switch (gimple_code (stmt)) |
88dbf20f | 1098 | { |
75a70cf9 | 1099 | case GIMPLE_COND: |
1100 | { | |
1101 | /* Handle comparison operators that can appear in GIMPLE form. */ | |
15d138c9 | 1102 | tree op0 = valueize_op (gimple_cond_lhs (stmt)); |
1103 | tree op1 = valueize_op (gimple_cond_rhs (stmt)); | |
75a70cf9 | 1104 | enum tree_code code = gimple_cond_code (stmt); |
389dd41b | 1105 | return fold_binary_loc (loc, code, boolean_type_node, op0, op1); |
75a70cf9 | 1106 | } |
4ee9c684 | 1107 | |
75a70cf9 | 1108 | case GIMPLE_SWITCH: |
1109 | { | |
15d138c9 | 1110 | /* Return the constant switch index. */ |
1111 | return valueize_op (gimple_switch_index (stmt)); | |
75a70cf9 | 1112 | } |
912f109f | 1113 | |
1d0b727d | 1114 | case GIMPLE_ASSIGN: |
1115 | case GIMPLE_CALL: | |
1116 | return gimple_fold_stmt_to_constant_1 (stmt, valueize_op); | |
04236c3a | 1117 | |
8782adcf | 1118 | default: |
1d0b727d | 1119 | gcc_unreachable (); |
8782adcf | 1120 | } |
8782adcf | 1121 | } |
75a70cf9 | 1122 | |
b7e55469 | 1123 | /* Apply the operation CODE in type TYPE to the value, mask pair |
1124 | RVAL and RMASK representing a value of type RTYPE and set | |
1125 | the value, mask pair *VAL and *MASK to the result. */ | |
1126 | ||
1127 | static void | |
1128 | bit_value_unop_1 (enum tree_code code, tree type, | |
1129 | double_int *val, double_int *mask, | |
1130 | tree rtype, double_int rval, double_int rmask) | |
1131 | { | |
1132 | switch (code) | |
1133 | { | |
1134 | case BIT_NOT_EXPR: | |
1135 | *mask = rmask; | |
1136 | *val = double_int_not (rval); | |
1137 | break; | |
1138 | ||
1139 | case NEGATE_EXPR: | |
1140 | { | |
1141 | double_int temv, temm; | |
1142 | /* Return ~rval + 1. */ | |
1143 | bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask); | |
1144 | bit_value_binop_1 (PLUS_EXPR, type, val, mask, | |
1145 | type, temv, temm, | |
1146 | type, double_int_one, double_int_zero); | |
1147 | break; | |
1148 | } | |
1149 | ||
1150 | CASE_CONVERT: | |
1151 | { | |
1152 | bool uns; | |
1153 | ||
1154 | /* First extend mask and value according to the original type. */ | |
1155 | uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype) | |
1156 | ? 0 : TYPE_UNSIGNED (rtype)); | |
1157 | *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns); | |
1158 | *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns); | |
1159 | ||
1160 | /* Then extend mask and value according to the target type. */ | |
1161 | uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type) | |
1162 | ? 0 : TYPE_UNSIGNED (type)); | |
1163 | *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns); | |
1164 | *val = double_int_ext (*val, TYPE_PRECISION (type), uns); | |
1165 | break; | |
1166 | } | |
1167 | ||
1168 | default: | |
1169 | *mask = double_int_minus_one; | |
1170 | break; | |
1171 | } | |
1172 | } | |
1173 | ||
1174 | /* Apply the operation CODE in type TYPE to the value, mask pairs | |
1175 | R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE | |
1176 | and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */ | |
1177 | ||
1178 | static void | |
1179 | bit_value_binop_1 (enum tree_code code, tree type, | |
1180 | double_int *val, double_int *mask, | |
1181 | tree r1type, double_int r1val, double_int r1mask, | |
1182 | tree r2type, double_int r2val, double_int r2mask) | |
1183 | { | |
90c0f5b7 | 1184 | bool uns = (TREE_CODE (type) == INTEGER_TYPE |
1185 | && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type)); | |
b7e55469 | 1186 | /* Assume we'll get a constant result. Use an initial varying value, |
1187 | we fall back to varying in the end if necessary. */ | |
1188 | *mask = double_int_minus_one; | |
1189 | switch (code) | |
1190 | { | |
1191 | case BIT_AND_EXPR: | |
1192 | /* The mask is constant where there is a known not | |
1193 | set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */ | |
1194 | *mask = double_int_and (double_int_ior (r1mask, r2mask), | |
1195 | double_int_and (double_int_ior (r1val, r1mask), | |
1196 | double_int_ior (r2val, r2mask))); | |
1197 | *val = double_int_and (r1val, r2val); | |
1198 | break; | |
1199 | ||
1200 | case BIT_IOR_EXPR: | |
1201 | /* The mask is constant where there is a known | |
1202 | set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */ | |
1203 | *mask = double_int_and_not | |
1204 | (double_int_ior (r1mask, r2mask), | |
1205 | double_int_ior (double_int_and_not (r1val, r1mask), | |
1206 | double_int_and_not (r2val, r2mask))); | |
1207 | *val = double_int_ior (r1val, r2val); | |
1208 | break; | |
1209 | ||
1210 | case BIT_XOR_EXPR: | |
1211 | /* m1 | m2 */ | |
1212 | *mask = double_int_ior (r1mask, r2mask); | |
1213 | *val = double_int_xor (r1val, r2val); | |
1214 | break; | |
1215 | ||
1216 | case LROTATE_EXPR: | |
1217 | case RROTATE_EXPR: | |
1218 | if (double_int_zero_p (r2mask)) | |
1219 | { | |
1220 | HOST_WIDE_INT shift = r2val.low; | |
1221 | if (code == RROTATE_EXPR) | |
1222 | shift = -shift; | |
1223 | *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type)); | |
1224 | *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type)); | |
1225 | } | |
1226 | break; | |
1227 | ||
1228 | case LSHIFT_EXPR: | |
1229 | case RSHIFT_EXPR: | |
1230 | /* ??? We can handle partially known shift counts if we know | |
1231 | its sign. That way we can tell that (x << (y | 8)) & 255 | |
1232 | is zero. */ | |
1233 | if (double_int_zero_p (r2mask)) | |
1234 | { | |
1235 | HOST_WIDE_INT shift = r2val.low; | |
1236 | if (code == RSHIFT_EXPR) | |
1237 | shift = -shift; | |
1238 | /* We need to know if we are doing a left or a right shift | |
1239 | to properly shift in zeros for left shift and unsigned | |
1240 | right shifts and the sign bit for signed right shifts. | |
1241 | For signed right shifts we shift in varying in case | |
1242 | the sign bit was varying. */ | |
1243 | if (shift > 0) | |
1244 | { | |
1245 | *mask = double_int_lshift (r1mask, shift, | |
1246 | TYPE_PRECISION (type), false); | |
1247 | *val = double_int_lshift (r1val, shift, | |
1248 | TYPE_PRECISION (type), false); | |
1249 | } | |
1250 | else if (shift < 0) | |
1251 | { | |
90c0f5b7 | 1252 | /* ??? We can have sizetype related inconsistencies in |
1253 | the IL. */ | |
1254 | if ((TREE_CODE (r1type) == INTEGER_TYPE | |
1255 | && (TYPE_IS_SIZETYPE (r1type) | |
1256 | ? 0 : TYPE_UNSIGNED (r1type))) != uns) | |
1257 | break; | |
1258 | ||
b7e55469 | 1259 | shift = -shift; |
1260 | *mask = double_int_rshift (r1mask, shift, | |
1261 | TYPE_PRECISION (type), !uns); | |
1262 | *val = double_int_rshift (r1val, shift, | |
1263 | TYPE_PRECISION (type), !uns); | |
1264 | } | |
1265 | else | |
1266 | { | |
1267 | *mask = r1mask; | |
1268 | *val = r1val; | |
1269 | } | |
1270 | } | |
1271 | break; | |
1272 | ||
1273 | case PLUS_EXPR: | |
1274 | case POINTER_PLUS_EXPR: | |
1275 | { | |
1276 | double_int lo, hi; | |
1277 | /* Do the addition with unknown bits set to zero, to give carry-ins of | |
1278 | zero wherever possible. */ | |
1279 | lo = double_int_add (double_int_and_not (r1val, r1mask), | |
1280 | double_int_and_not (r2val, r2mask)); | |
1281 | lo = double_int_ext (lo, TYPE_PRECISION (type), uns); | |
1282 | /* Do the addition with unknown bits set to one, to give carry-ins of | |
1283 | one wherever possible. */ | |
1284 | hi = double_int_add (double_int_ior (r1val, r1mask), | |
1285 | double_int_ior (r2val, r2mask)); | |
1286 | hi = double_int_ext (hi, TYPE_PRECISION (type), uns); | |
1287 | /* Each bit in the result is known if (a) the corresponding bits in | |
1288 | both inputs are known, and (b) the carry-in to that bit position | |
1289 | is known. We can check condition (b) by seeing if we got the same | |
1290 | result with minimised carries as with maximised carries. */ | |
1291 | *mask = double_int_ior (double_int_ior (r1mask, r2mask), | |
1292 | double_int_xor (lo, hi)); | |
1293 | *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns); | |
1294 | /* It shouldn't matter whether we choose lo or hi here. */ | |
1295 | *val = lo; | |
1296 | break; | |
1297 | } | |
1298 | ||
1299 | case MINUS_EXPR: | |
1300 | { | |
1301 | double_int temv, temm; | |
1302 | bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm, | |
1303 | r2type, r2val, r2mask); | |
1304 | bit_value_binop_1 (PLUS_EXPR, type, val, mask, | |
1305 | r1type, r1val, r1mask, | |
1306 | r2type, temv, temm); | |
1307 | break; | |
1308 | } | |
1309 | ||
1310 | case MULT_EXPR: | |
1311 | { | |
1312 | /* Just track trailing zeros in both operands and transfer | |
1313 | them to the other. */ | |
1314 | int r1tz = double_int_ctz (double_int_ior (r1val, r1mask)); | |
1315 | int r2tz = double_int_ctz (double_int_ior (r2val, r2mask)); | |
1316 | if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT) | |
1317 | { | |
1318 | *mask = double_int_zero; | |
1319 | *val = double_int_zero; | |
1320 | } | |
1321 | else if (r1tz + r2tz > 0) | |
1322 | { | |
1323 | *mask = double_int_not (double_int_mask (r1tz + r2tz)); | |
1324 | *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns); | |
1325 | *val = double_int_zero; | |
1326 | } | |
1327 | break; | |
1328 | } | |
1329 | ||
1330 | case EQ_EXPR: | |
1331 | case NE_EXPR: | |
1332 | { | |
1333 | double_int m = double_int_ior (r1mask, r2mask); | |
1334 | if (!double_int_equal_p (double_int_and_not (r1val, m), | |
1335 | double_int_and_not (r2val, m))) | |
1336 | { | |
1337 | *mask = double_int_zero; | |
1338 | *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one); | |
1339 | } | |
1340 | else | |
1341 | { | |
1342 | /* We know the result of a comparison is always one or zero. */ | |
1343 | *mask = double_int_one; | |
1344 | *val = double_int_zero; | |
1345 | } | |
1346 | break; | |
1347 | } | |
1348 | ||
1349 | case GE_EXPR: | |
1350 | case GT_EXPR: | |
1351 | { | |
1352 | double_int tem = r1val; | |
1353 | r1val = r2val; | |
1354 | r2val = tem; | |
1355 | tem = r1mask; | |
1356 | r1mask = r2mask; | |
1357 | r2mask = tem; | |
1358 | code = swap_tree_comparison (code); | |
1359 | } | |
1360 | /* Fallthru. */ | |
1361 | case LT_EXPR: | |
1362 | case LE_EXPR: | |
1363 | { | |
1364 | int minmax, maxmin; | |
1365 | /* If the most significant bits are not known we know nothing. */ | |
1366 | if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask)) | |
1367 | break; | |
1368 | ||
90c0f5b7 | 1369 | /* For comparisons the signedness is in the comparison operands. */ |
1370 | uns = (TREE_CODE (r1type) == INTEGER_TYPE | |
1371 | && TYPE_IS_SIZETYPE (r1type) ? 0 : TYPE_UNSIGNED (r1type)); | |
1372 | /* ??? We can have sizetype related inconsistencies in the IL. */ | |
1373 | if ((TREE_CODE (r2type) == INTEGER_TYPE | |
1374 | && TYPE_IS_SIZETYPE (r2type) ? 0 : TYPE_UNSIGNED (r2type)) != uns) | |
1375 | break; | |
1376 | ||
b7e55469 | 1377 | /* If we know the most significant bits we know the values |
1378 | value ranges by means of treating varying bits as zero | |
1379 | or one. Do a cross comparison of the max/min pairs. */ | |
1380 | maxmin = double_int_cmp (double_int_ior (r1val, r1mask), | |
1381 | double_int_and_not (r2val, r2mask), uns); | |
1382 | minmax = double_int_cmp (double_int_and_not (r1val, r1mask), | |
1383 | double_int_ior (r2val, r2mask), uns); | |
1384 | if (maxmin < 0) /* r1 is less than r2. */ | |
1385 | { | |
1386 | *mask = double_int_zero; | |
1387 | *val = double_int_one; | |
1388 | } | |
1389 | else if (minmax > 0) /* r1 is not less or equal to r2. */ | |
1390 | { | |
1391 | *mask = double_int_zero; | |
1392 | *val = double_int_zero; | |
1393 | } | |
1394 | else if (maxmin == minmax) /* r1 and r2 are equal. */ | |
1395 | { | |
1396 | /* This probably should never happen as we'd have | |
1397 | folded the thing during fully constant value folding. */ | |
1398 | *mask = double_int_zero; | |
1399 | *val = (code == LE_EXPR ? double_int_one : double_int_zero); | |
1400 | } | |
1401 | else | |
1402 | { | |
1403 | /* We know the result of a comparison is always one or zero. */ | |
1404 | *mask = double_int_one; | |
1405 | *val = double_int_zero; | |
1406 | } | |
1407 | break; | |
1408 | } | |
1409 | ||
1410 | default:; | |
1411 | } | |
1412 | } | |
1413 | ||
1414 | /* Return the propagation value when applying the operation CODE to | |
1415 | the value RHS yielding type TYPE. */ | |
1416 | ||
1417 | static prop_value_t | |
1418 | bit_value_unop (enum tree_code code, tree type, tree rhs) | |
1419 | { | |
1420 | prop_value_t rval = get_value_for_expr (rhs, true); | |
1421 | double_int value, mask; | |
1422 | prop_value_t val; | |
1423 | gcc_assert ((rval.lattice_val == CONSTANT | |
1424 | && TREE_CODE (rval.value) == INTEGER_CST) | |
1425 | || double_int_minus_one_p (rval.mask)); | |
1426 | bit_value_unop_1 (code, type, &value, &mask, | |
1427 | TREE_TYPE (rhs), value_to_double_int (rval), rval.mask); | |
1428 | if (!double_int_minus_one_p (mask)) | |
1429 | { | |
1430 | val.lattice_val = CONSTANT; | |
1431 | val.mask = mask; | |
1432 | /* ??? Delay building trees here. */ | |
1433 | val.value = double_int_to_tree (type, value); | |
1434 | } | |
1435 | else | |
1436 | { | |
1437 | val.lattice_val = VARYING; | |
1438 | val.value = NULL_TREE; | |
1439 | val.mask = double_int_minus_one; | |
1440 | } | |
1441 | return val; | |
1442 | } | |
1443 | ||
1444 | /* Return the propagation value when applying the operation CODE to | |
1445 | the values RHS1 and RHS2 yielding type TYPE. */ | |
1446 | ||
1447 | static prop_value_t | |
1448 | bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2) | |
1449 | { | |
1450 | prop_value_t r1val = get_value_for_expr (rhs1, true); | |
1451 | prop_value_t r2val = get_value_for_expr (rhs2, true); | |
1452 | double_int value, mask; | |
1453 | prop_value_t val; | |
1454 | gcc_assert ((r1val.lattice_val == CONSTANT | |
1455 | && TREE_CODE (r1val.value) == INTEGER_CST) | |
1456 | || double_int_minus_one_p (r1val.mask)); | |
1457 | gcc_assert ((r2val.lattice_val == CONSTANT | |
1458 | && TREE_CODE (r2val.value) == INTEGER_CST) | |
1459 | || double_int_minus_one_p (r2val.mask)); | |
1460 | bit_value_binop_1 (code, type, &value, &mask, | |
1461 | TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask, | |
1462 | TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask); | |
1463 | if (!double_int_minus_one_p (mask)) | |
1464 | { | |
1465 | val.lattice_val = CONSTANT; | |
1466 | val.mask = mask; | |
1467 | /* ??? Delay building trees here. */ | |
1468 | val.value = double_int_to_tree (type, value); | |
1469 | } | |
1470 | else | |
1471 | { | |
1472 | val.lattice_val = VARYING; | |
1473 | val.value = NULL_TREE; | |
1474 | val.mask = double_int_minus_one; | |
1475 | } | |
1476 | return val; | |
1477 | } | |
1478 | ||
75a70cf9 | 1479 | /* Evaluate statement STMT. |
1480 | Valid only for assignments, calls, conditionals, and switches. */ | |
4ee9c684 | 1481 | |
88dbf20f | 1482 | static prop_value_t |
75a70cf9 | 1483 | evaluate_stmt (gimple stmt) |
4ee9c684 | 1484 | { |
88dbf20f | 1485 | prop_value_t val; |
4f61cce6 | 1486 | tree simplified = NULL_TREE; |
88dbf20f | 1487 | ccp_lattice_t likelyvalue = likely_value (stmt); |
b7e55469 | 1488 | bool is_constant = false; |
88dbf20f | 1489 | |
b7e55469 | 1490 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1491 | { | |
1492 | fprintf (dump_file, "which is likely "); | |
1493 | switch (likelyvalue) | |
1494 | { | |
1495 | case CONSTANT: | |
1496 | fprintf (dump_file, "CONSTANT"); | |
1497 | break; | |
1498 | case UNDEFINED: | |
1499 | fprintf (dump_file, "UNDEFINED"); | |
1500 | break; | |
1501 | case VARYING: | |
1502 | fprintf (dump_file, "VARYING"); | |
1503 | break; | |
1504 | default:; | |
1505 | } | |
1506 | fprintf (dump_file, "\n"); | |
1507 | } | |
add6ee5e | 1508 | |
4ee9c684 | 1509 | /* If the statement is likely to have a CONSTANT result, then try |
1510 | to fold the statement to determine the constant value. */ | |
75a70cf9 | 1511 | /* FIXME. This is the only place that we call ccp_fold. |
1512 | Since likely_value never returns CONSTANT for calls, we will | |
1513 | not attempt to fold them, including builtins that may profit. */ | |
4ee9c684 | 1514 | if (likelyvalue == CONSTANT) |
b7e55469 | 1515 | { |
1516 | fold_defer_overflow_warnings (); | |
1517 | simplified = ccp_fold (stmt); | |
1518 | is_constant = simplified && is_gimple_min_invariant (simplified); | |
1519 | fold_undefer_overflow_warnings (is_constant, stmt, 0); | |
1520 | if (is_constant) | |
1521 | { | |
1522 | /* The statement produced a constant value. */ | |
1523 | val.lattice_val = CONSTANT; | |
1524 | val.value = simplified; | |
1525 | val.mask = double_int_zero; | |
1526 | } | |
1527 | } | |
4ee9c684 | 1528 | /* If the statement is likely to have a VARYING result, then do not |
1529 | bother folding the statement. */ | |
04236c3a | 1530 | else if (likelyvalue == VARYING) |
75a70cf9 | 1531 | { |
590c3166 | 1532 | enum gimple_code code = gimple_code (stmt); |
75a70cf9 | 1533 | if (code == GIMPLE_ASSIGN) |
1534 | { | |
1535 | enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
48e1416a | 1536 | |
75a70cf9 | 1537 | /* Other cases cannot satisfy is_gimple_min_invariant |
1538 | without folding. */ | |
1539 | if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS) | |
1540 | simplified = gimple_assign_rhs1 (stmt); | |
1541 | } | |
1542 | else if (code == GIMPLE_SWITCH) | |
1543 | simplified = gimple_switch_index (stmt); | |
1544 | else | |
a65c4d64 | 1545 | /* These cannot satisfy is_gimple_min_invariant without folding. */ |
1546 | gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND); | |
b7e55469 | 1547 | is_constant = simplified && is_gimple_min_invariant (simplified); |
1548 | if (is_constant) | |
1549 | { | |
1550 | /* The statement produced a constant value. */ | |
1551 | val.lattice_val = CONSTANT; | |
1552 | val.value = simplified; | |
1553 | val.mask = double_int_zero; | |
1554 | } | |
75a70cf9 | 1555 | } |
4ee9c684 | 1556 | |
b7e55469 | 1557 | /* Resort to simplification for bitwise tracking. */ |
1558 | if (flag_tree_bit_ccp | |
1559 | && likelyvalue == CONSTANT | |
1560 | && !is_constant) | |
912f109f | 1561 | { |
b7e55469 | 1562 | enum gimple_code code = gimple_code (stmt); |
153c3b50 | 1563 | tree fndecl; |
b7e55469 | 1564 | val.lattice_val = VARYING; |
1565 | val.value = NULL_TREE; | |
1566 | val.mask = double_int_minus_one; | |
1567 | if (code == GIMPLE_ASSIGN) | |
912f109f | 1568 | { |
b7e55469 | 1569 | enum tree_code subcode = gimple_assign_rhs_code (stmt); |
1570 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
1571 | switch (get_gimple_rhs_class (subcode)) | |
1572 | { | |
1573 | case GIMPLE_SINGLE_RHS: | |
1574 | if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
1575 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
1576 | val = get_value_for_expr (rhs1, true); | |
1577 | break; | |
1578 | ||
1579 | case GIMPLE_UNARY_RHS: | |
1580 | if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
1581 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
1582 | && (INTEGRAL_TYPE_P (gimple_expr_type (stmt)) | |
1583 | || POINTER_TYPE_P (gimple_expr_type (stmt)))) | |
1584 | val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1); | |
1585 | break; | |
1586 | ||
1587 | case GIMPLE_BINARY_RHS: | |
1588 | if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
1589 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
1590 | { | |
e47d81e0 | 1591 | tree lhs = gimple_assign_lhs (stmt); |
b7e55469 | 1592 | tree rhs2 = gimple_assign_rhs2 (stmt); |
1593 | val = bit_value_binop (subcode, | |
e47d81e0 | 1594 | TREE_TYPE (lhs), rhs1, rhs2); |
b7e55469 | 1595 | } |
1596 | break; | |
1597 | ||
1598 | default:; | |
1599 | } | |
912f109f | 1600 | } |
b7e55469 | 1601 | else if (code == GIMPLE_COND) |
1602 | { | |
1603 | enum tree_code code = gimple_cond_code (stmt); | |
1604 | tree rhs1 = gimple_cond_lhs (stmt); | |
1605 | tree rhs2 = gimple_cond_rhs (stmt); | |
1606 | if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
1607 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
1608 | val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2); | |
1609 | } | |
153c3b50 | 1610 | else if (code == GIMPLE_CALL |
1611 | && (fndecl = gimple_call_fndecl (stmt)) | |
1612 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
1613 | { | |
1614 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1615 | { | |
1616 | case BUILT_IN_MALLOC: | |
1617 | case BUILT_IN_REALLOC: | |
1618 | case BUILT_IN_CALLOC: | |
1619 | val.lattice_val = CONSTANT; | |
1620 | val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0); | |
1621 | val.mask = shwi_to_double_int | |
1622 | (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT) | |
1623 | / BITS_PER_UNIT - 1)); | |
1624 | break; | |
1625 | ||
1626 | case BUILT_IN_ALLOCA: | |
1627 | val.lattice_val = CONSTANT; | |
1628 | val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0); | |
1629 | val.mask = shwi_to_double_int | |
1630 | (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT) | |
1631 | / BITS_PER_UNIT - 1)); | |
1632 | break; | |
1633 | ||
1634 | default:; | |
1635 | } | |
1636 | } | |
b7e55469 | 1637 | is_constant = (val.lattice_val == CONSTANT); |
912f109f | 1638 | } |
1639 | ||
b7e55469 | 1640 | if (!is_constant) |
4ee9c684 | 1641 | { |
1642 | /* The statement produced a nonconstant value. If the statement | |
88dbf20f | 1643 | had UNDEFINED operands, then the result of the statement |
1644 | should be UNDEFINED. Otherwise, the statement is VARYING. */ | |
bfa30570 | 1645 | if (likelyvalue == UNDEFINED) |
b7e55469 | 1646 | { |
1647 | val.lattice_val = likelyvalue; | |
1648 | val.mask = double_int_zero; | |
1649 | } | |
b765fa12 | 1650 | else |
b7e55469 | 1651 | { |
1652 | val.lattice_val = VARYING; | |
1653 | val.mask = double_int_minus_one; | |
1654 | } | |
b765fa12 | 1655 | |
88dbf20f | 1656 | val.value = NULL_TREE; |
4ee9c684 | 1657 | } |
41511585 | 1658 | |
1659 | return val; | |
4ee9c684 | 1660 | } |
1661 | ||
6688f8ec | 1662 | /* Fold the stmt at *GSI with CCP specific information that propagating |
1663 | and regular folding does not catch. */ | |
1664 | ||
1665 | static bool | |
1666 | ccp_fold_stmt (gimple_stmt_iterator *gsi) | |
1667 | { | |
1668 | gimple stmt = gsi_stmt (*gsi); | |
6688f8ec | 1669 | |
94144e68 | 1670 | switch (gimple_code (stmt)) |
1671 | { | |
1672 | case GIMPLE_COND: | |
1673 | { | |
1674 | prop_value_t val; | |
1675 | /* Statement evaluation will handle type mismatches in constants | |
1676 | more gracefully than the final propagation. This allows us to | |
1677 | fold more conditionals here. */ | |
1678 | val = evaluate_stmt (stmt); | |
1679 | if (val.lattice_val != CONSTANT | |
b7e55469 | 1680 | || !double_int_zero_p (val.mask)) |
94144e68 | 1681 | return false; |
1682 | ||
b7e55469 | 1683 | if (dump_file) |
1684 | { | |
1685 | fprintf (dump_file, "Folding predicate "); | |
1686 | print_gimple_expr (dump_file, stmt, 0, 0); | |
1687 | fprintf (dump_file, " to "); | |
1688 | print_generic_expr (dump_file, val.value, 0); | |
1689 | fprintf (dump_file, "\n"); | |
1690 | } | |
1691 | ||
94144e68 | 1692 | if (integer_zerop (val.value)) |
1693 | gimple_cond_make_false (stmt); | |
1694 | else | |
1695 | gimple_cond_make_true (stmt); | |
6688f8ec | 1696 | |
94144e68 | 1697 | return true; |
1698 | } | |
6688f8ec | 1699 | |
94144e68 | 1700 | case GIMPLE_CALL: |
1701 | { | |
1702 | tree lhs = gimple_call_lhs (stmt); | |
15d138c9 | 1703 | tree val; |
94144e68 | 1704 | tree argt; |
1705 | bool changed = false; | |
1706 | unsigned i; | |
1707 | ||
1708 | /* If the call was folded into a constant make sure it goes | |
1709 | away even if we cannot propagate into all uses because of | |
1710 | type issues. */ | |
1711 | if (lhs | |
1712 | && TREE_CODE (lhs) == SSA_NAME | |
15d138c9 | 1713 | && (val = get_constant_value (lhs))) |
94144e68 | 1714 | { |
15d138c9 | 1715 | tree new_rhs = unshare_expr (val); |
338cce8f | 1716 | bool res; |
94144e68 | 1717 | if (!useless_type_conversion_p (TREE_TYPE (lhs), |
1718 | TREE_TYPE (new_rhs))) | |
1719 | new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs); | |
338cce8f | 1720 | res = update_call_from_tree (gsi, new_rhs); |
1721 | gcc_assert (res); | |
94144e68 | 1722 | return true; |
1723 | } | |
1724 | ||
1725 | /* Propagate into the call arguments. Compared to replace_uses_in | |
1726 | this can use the argument slot types for type verification | |
1727 | instead of the current argument type. We also can safely | |
1728 | drop qualifiers here as we are dealing with constants anyway. */ | |
2de00a2d | 1729 | argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt)); |
94144e68 | 1730 | for (i = 0; i < gimple_call_num_args (stmt) && argt; |
1731 | ++i, argt = TREE_CHAIN (argt)) | |
1732 | { | |
1733 | tree arg = gimple_call_arg (stmt, i); | |
1734 | if (TREE_CODE (arg) == SSA_NAME | |
15d138c9 | 1735 | && (val = get_constant_value (arg)) |
94144e68 | 1736 | && useless_type_conversion_p |
1737 | (TYPE_MAIN_VARIANT (TREE_VALUE (argt)), | |
15d138c9 | 1738 | TYPE_MAIN_VARIANT (TREE_TYPE (val)))) |
94144e68 | 1739 | { |
15d138c9 | 1740 | gimple_call_set_arg (stmt, i, unshare_expr (val)); |
94144e68 | 1741 | changed = true; |
1742 | } | |
1743 | } | |
e16f4c39 | 1744 | |
94144e68 | 1745 | return changed; |
1746 | } | |
6688f8ec | 1747 | |
6872bf3c | 1748 | case GIMPLE_ASSIGN: |
1749 | { | |
1750 | tree lhs = gimple_assign_lhs (stmt); | |
15d138c9 | 1751 | tree val; |
6872bf3c | 1752 | |
1753 | /* If we have a load that turned out to be constant replace it | |
1754 | as we cannot propagate into all uses in all cases. */ | |
1755 | if (gimple_assign_single_p (stmt) | |
1756 | && TREE_CODE (lhs) == SSA_NAME | |
15d138c9 | 1757 | && (val = get_constant_value (lhs))) |
6872bf3c | 1758 | { |
15d138c9 | 1759 | tree rhs = unshare_expr (val); |
6872bf3c | 1760 | if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs))) |
182cf5a9 | 1761 | rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs); |
6872bf3c | 1762 | gimple_assign_set_rhs_from_tree (gsi, rhs); |
1763 | return true; | |
1764 | } | |
1765 | ||
1766 | return false; | |
1767 | } | |
1768 | ||
94144e68 | 1769 | default: |
1770 | return false; | |
1771 | } | |
6688f8ec | 1772 | } |
1773 | ||
41511585 | 1774 | /* Visit the assignment statement STMT. Set the value of its LHS to the |
88dbf20f | 1775 | value computed by the RHS and store LHS in *OUTPUT_P. If STMT |
1776 | creates virtual definitions, set the value of each new name to that | |
75a70cf9 | 1777 | of the RHS (if we can derive a constant out of the RHS). |
1778 | Value-returning call statements also perform an assignment, and | |
1779 | are handled here. */ | |
4ee9c684 | 1780 | |
41511585 | 1781 | static enum ssa_prop_result |
75a70cf9 | 1782 | visit_assignment (gimple stmt, tree *output_p) |
4ee9c684 | 1783 | { |
88dbf20f | 1784 | prop_value_t val; |
88dbf20f | 1785 | enum ssa_prop_result retval; |
4ee9c684 | 1786 | |
75a70cf9 | 1787 | tree lhs = gimple_get_lhs (stmt); |
4ee9c684 | 1788 | |
75a70cf9 | 1789 | gcc_assert (gimple_code (stmt) != GIMPLE_CALL |
1790 | || gimple_call_lhs (stmt) != NULL_TREE); | |
1791 | ||
15d138c9 | 1792 | if (gimple_assign_single_p (stmt) |
1793 | && gimple_assign_rhs_code (stmt) == SSA_NAME) | |
1794 | /* For a simple copy operation, we copy the lattice values. */ | |
1795 | val = *get_value (gimple_assign_rhs1 (stmt)); | |
41511585 | 1796 | else |
75a70cf9 | 1797 | /* Evaluate the statement, which could be |
1798 | either a GIMPLE_ASSIGN or a GIMPLE_CALL. */ | |
04236c3a | 1799 | val = evaluate_stmt (stmt); |
4ee9c684 | 1800 | |
88dbf20f | 1801 | retval = SSA_PROP_NOT_INTERESTING; |
4ee9c684 | 1802 | |
41511585 | 1803 | /* Set the lattice value of the statement's output. */ |
88dbf20f | 1804 | if (TREE_CODE (lhs) == SSA_NAME) |
4ee9c684 | 1805 | { |
88dbf20f | 1806 | /* If STMT is an assignment to an SSA_NAME, we only have one |
1807 | value to set. */ | |
1808 | if (set_lattice_value (lhs, val)) | |
1809 | { | |
1810 | *output_p = lhs; | |
1811 | if (val.lattice_val == VARYING) | |
1812 | retval = SSA_PROP_VARYING; | |
1813 | else | |
1814 | retval = SSA_PROP_INTERESTING; | |
1815 | } | |
4ee9c684 | 1816 | } |
88dbf20f | 1817 | |
1818 | return retval; | |
4ee9c684 | 1819 | } |
1820 | ||
4ee9c684 | 1821 | |
41511585 | 1822 | /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING |
1823 | if it can determine which edge will be taken. Otherwise, return | |
1824 | SSA_PROP_VARYING. */ | |
1825 | ||
1826 | static enum ssa_prop_result | |
75a70cf9 | 1827 | visit_cond_stmt (gimple stmt, edge *taken_edge_p) |
4ee9c684 | 1828 | { |
88dbf20f | 1829 | prop_value_t val; |
41511585 | 1830 | basic_block block; |
1831 | ||
75a70cf9 | 1832 | block = gimple_bb (stmt); |
41511585 | 1833 | val = evaluate_stmt (stmt); |
b7e55469 | 1834 | if (val.lattice_val != CONSTANT |
1835 | || !double_int_zero_p (val.mask)) | |
1836 | return SSA_PROP_VARYING; | |
41511585 | 1837 | |
1838 | /* Find which edge out of the conditional block will be taken and add it | |
1839 | to the worklist. If no single edge can be determined statically, | |
1840 | return SSA_PROP_VARYING to feed all the outgoing edges to the | |
1841 | propagation engine. */ | |
b7e55469 | 1842 | *taken_edge_p = find_taken_edge (block, val.value); |
41511585 | 1843 | if (*taken_edge_p) |
1844 | return SSA_PROP_INTERESTING; | |
1845 | else | |
1846 | return SSA_PROP_VARYING; | |
4ee9c684 | 1847 | } |
1848 | ||
4ee9c684 | 1849 | |
41511585 | 1850 | /* Evaluate statement STMT. If the statement produces an output value and |
1851 | its evaluation changes the lattice value of its output, return | |
1852 | SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the | |
1853 | output value. | |
48e1416a | 1854 | |
41511585 | 1855 | If STMT is a conditional branch and we can determine its truth |
1856 | value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying | |
1857 | value, return SSA_PROP_VARYING. */ | |
4ee9c684 | 1858 | |
41511585 | 1859 | static enum ssa_prop_result |
75a70cf9 | 1860 | ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p) |
41511585 | 1861 | { |
41511585 | 1862 | tree def; |
1863 | ssa_op_iter iter; | |
4ee9c684 | 1864 | |
41511585 | 1865 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4ee9c684 | 1866 | { |
88dbf20f | 1867 | fprintf (dump_file, "\nVisiting statement:\n"); |
75a70cf9 | 1868 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
4ee9c684 | 1869 | } |
4ee9c684 | 1870 | |
75a70cf9 | 1871 | switch (gimple_code (stmt)) |
4ee9c684 | 1872 | { |
75a70cf9 | 1873 | case GIMPLE_ASSIGN: |
1874 | /* If the statement is an assignment that produces a single | |
1875 | output value, evaluate its RHS to see if the lattice value of | |
1876 | its output has changed. */ | |
1877 | return visit_assignment (stmt, output_p); | |
1878 | ||
1879 | case GIMPLE_CALL: | |
1880 | /* A value-returning call also performs an assignment. */ | |
1881 | if (gimple_call_lhs (stmt) != NULL_TREE) | |
1882 | return visit_assignment (stmt, output_p); | |
1883 | break; | |
1884 | ||
1885 | case GIMPLE_COND: | |
1886 | case GIMPLE_SWITCH: | |
1887 | /* If STMT is a conditional branch, see if we can determine | |
1888 | which branch will be taken. */ | |
1889 | /* FIXME. It appears that we should be able to optimize | |
1890 | computed GOTOs here as well. */ | |
1891 | return visit_cond_stmt (stmt, taken_edge_p); | |
1892 | ||
1893 | default: | |
1894 | break; | |
4ee9c684 | 1895 | } |
4ee9c684 | 1896 | |
41511585 | 1897 | /* Any other kind of statement is not interesting for constant |
1898 | propagation and, therefore, not worth simulating. */ | |
41511585 | 1899 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1900 | fprintf (dump_file, "No interesting values produced. Marked VARYING.\n"); | |
4ee9c684 | 1901 | |
41511585 | 1902 | /* Definitions made by statements other than assignments to |
1903 | SSA_NAMEs represent unknown modifications to their outputs. | |
1904 | Mark them VARYING. */ | |
88dbf20f | 1905 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) |
1906 | { | |
b7e55469 | 1907 | prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } }; |
88dbf20f | 1908 | set_lattice_value (def, v); |
1909 | } | |
4ee9c684 | 1910 | |
41511585 | 1911 | return SSA_PROP_VARYING; |
1912 | } | |
4ee9c684 | 1913 | |
4ee9c684 | 1914 | |
88dbf20f | 1915 | /* Main entry point for SSA Conditional Constant Propagation. */ |
41511585 | 1916 | |
33a34f1e | 1917 | static unsigned int |
61207d43 | 1918 | do_ssa_ccp (void) |
41511585 | 1919 | { |
1920 | ccp_initialize (); | |
1921 | ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node); | |
33a34f1e | 1922 | if (ccp_finalize ()) |
eb9161e7 | 1923 | return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals); |
33a34f1e | 1924 | else |
1925 | return 0; | |
4ee9c684 | 1926 | } |
1927 | ||
5664499b | 1928 | |
1929 | static bool | |
41511585 | 1930 | gate_ccp (void) |
5664499b | 1931 | { |
41511585 | 1932 | return flag_tree_ccp != 0; |
5664499b | 1933 | } |
1934 | ||
4ee9c684 | 1935 | |
48e1416a | 1936 | struct gimple_opt_pass pass_ccp = |
41511585 | 1937 | { |
20099e35 | 1938 | { |
1939 | GIMPLE_PASS, | |
41511585 | 1940 | "ccp", /* name */ |
1941 | gate_ccp, /* gate */ | |
88dbf20f | 1942 | do_ssa_ccp, /* execute */ |
41511585 | 1943 | NULL, /* sub */ |
1944 | NULL, /* next */ | |
1945 | 0, /* static_pass_number */ | |
1946 | TV_TREE_CCP, /* tv_id */ | |
49290934 | 1947 | PROP_cfg | PROP_ssa, /* properties_required */ |
41511585 | 1948 | 0, /* properties_provided */ |
b6246c40 | 1949 | 0, /* properties_destroyed */ |
41511585 | 1950 | 0, /* todo_flags_start */ |
33a34f1e | 1951 | TODO_dump_func | TODO_verify_ssa |
20099e35 | 1952 | | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */ |
1953 | } | |
41511585 | 1954 | }; |
4ee9c684 | 1955 | |
4ee9c684 | 1956 | |
75a70cf9 | 1957 | |
bdd0e199 | 1958 | /* Try to optimize out __builtin_stack_restore. Optimize it out |
1959 | if there is another __builtin_stack_restore in the same basic | |
1960 | block and no calls or ASM_EXPRs are in between, or if this block's | |
1961 | only outgoing edge is to EXIT_BLOCK and there are no calls or | |
1962 | ASM_EXPRs after this __builtin_stack_restore. */ | |
1963 | ||
1964 | static tree | |
75a70cf9 | 1965 | optimize_stack_restore (gimple_stmt_iterator i) |
bdd0e199 | 1966 | { |
6ea999da | 1967 | tree callee; |
1968 | gimple stmt; | |
75a70cf9 | 1969 | |
1970 | basic_block bb = gsi_bb (i); | |
1971 | gimple call = gsi_stmt (i); | |
bdd0e199 | 1972 | |
75a70cf9 | 1973 | if (gimple_code (call) != GIMPLE_CALL |
1974 | || gimple_call_num_args (call) != 1 | |
1975 | || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME | |
1976 | || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0)))) | |
bdd0e199 | 1977 | return NULL_TREE; |
1978 | ||
75a70cf9 | 1979 | for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i)) |
bdd0e199 | 1980 | { |
75a70cf9 | 1981 | stmt = gsi_stmt (i); |
1982 | if (gimple_code (stmt) == GIMPLE_ASM) | |
bdd0e199 | 1983 | return NULL_TREE; |
75a70cf9 | 1984 | if (gimple_code (stmt) != GIMPLE_CALL) |
bdd0e199 | 1985 | continue; |
1986 | ||
75a70cf9 | 1987 | callee = gimple_call_fndecl (stmt); |
c40a6f90 | 1988 | if (!callee |
1989 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL | |
1990 | /* All regular builtins are ok, just obviously not alloca. */ | |
1991 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA) | |
bdd0e199 | 1992 | return NULL_TREE; |
1993 | ||
1994 | if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE) | |
6ea999da | 1995 | goto second_stack_restore; |
bdd0e199 | 1996 | } |
1997 | ||
6ea999da | 1998 | if (!gsi_end_p (i)) |
bdd0e199 | 1999 | return NULL_TREE; |
2000 | ||
6ea999da | 2001 | /* Allow one successor of the exit block, or zero successors. */ |
2002 | switch (EDGE_COUNT (bb->succs)) | |
2003 | { | |
2004 | case 0: | |
2005 | break; | |
2006 | case 1: | |
2007 | if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR) | |
2008 | return NULL_TREE; | |
2009 | break; | |
2010 | default: | |
2011 | return NULL_TREE; | |
2012 | } | |
2013 | second_stack_restore: | |
bdd0e199 | 2014 | |
6ea999da | 2015 | /* If there's exactly one use, then zap the call to __builtin_stack_save. |
2016 | If there are multiple uses, then the last one should remove the call. | |
2017 | In any case, whether the call to __builtin_stack_save can be removed | |
2018 | or not is irrelevant to removing the call to __builtin_stack_restore. */ | |
2019 | if (has_single_use (gimple_call_arg (call, 0))) | |
2020 | { | |
2021 | gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0)); | |
2022 | if (is_gimple_call (stack_save)) | |
2023 | { | |
2024 | callee = gimple_call_fndecl (stack_save); | |
2025 | if (callee | |
2026 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL | |
2027 | && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE) | |
2028 | { | |
2029 | gimple_stmt_iterator stack_save_gsi; | |
2030 | tree rhs; | |
bdd0e199 | 2031 | |
6ea999da | 2032 | stack_save_gsi = gsi_for_stmt (stack_save); |
2033 | rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0); | |
2034 | update_call_from_tree (&stack_save_gsi, rhs); | |
2035 | } | |
2036 | } | |
2037 | } | |
bdd0e199 | 2038 | |
75a70cf9 | 2039 | /* No effect, so the statement will be deleted. */ |
bdd0e199 | 2040 | return integer_zero_node; |
2041 | } | |
75a70cf9 | 2042 | |
8a58ed0a | 2043 | /* If va_list type is a simple pointer and nothing special is needed, |
2044 | optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0), | |
2045 | __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple | |
2046 | pointer assignment. */ | |
2047 | ||
2048 | static tree | |
75a70cf9 | 2049 | optimize_stdarg_builtin (gimple call) |
8a58ed0a | 2050 | { |
5f57a8b1 | 2051 | tree callee, lhs, rhs, cfun_va_list; |
8a58ed0a | 2052 | bool va_list_simple_ptr; |
389dd41b | 2053 | location_t loc = gimple_location (call); |
8a58ed0a | 2054 | |
75a70cf9 | 2055 | if (gimple_code (call) != GIMPLE_CALL) |
8a58ed0a | 2056 | return NULL_TREE; |
2057 | ||
75a70cf9 | 2058 | callee = gimple_call_fndecl (call); |
5f57a8b1 | 2059 | |
2060 | cfun_va_list = targetm.fn_abi_va_list (callee); | |
2061 | va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) | |
2062 | && (TREE_TYPE (cfun_va_list) == void_type_node | |
2063 | || TREE_TYPE (cfun_va_list) == char_type_node); | |
2064 | ||
8a58ed0a | 2065 | switch (DECL_FUNCTION_CODE (callee)) |
2066 | { | |
2067 | case BUILT_IN_VA_START: | |
2068 | if (!va_list_simple_ptr | |
2069 | || targetm.expand_builtin_va_start != NULL | |
75a70cf9 | 2070 | || built_in_decls[BUILT_IN_NEXT_ARG] == NULL) |
8a58ed0a | 2071 | return NULL_TREE; |
2072 | ||
75a70cf9 | 2073 | if (gimple_call_num_args (call) != 2) |
8a58ed0a | 2074 | return NULL_TREE; |
2075 | ||
75a70cf9 | 2076 | lhs = gimple_call_arg (call, 0); |
8a58ed0a | 2077 | if (!POINTER_TYPE_P (TREE_TYPE (lhs)) |
2078 | || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) | |
5f57a8b1 | 2079 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
8a58ed0a | 2080 | return NULL_TREE; |
48e1416a | 2081 | |
389dd41b | 2082 | lhs = build_fold_indirect_ref_loc (loc, lhs); |
2083 | rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG], | |
75a70cf9 | 2084 | 1, integer_zero_node); |
389dd41b | 2085 | rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs); |
8a58ed0a | 2086 | return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); |
2087 | ||
2088 | case BUILT_IN_VA_COPY: | |
2089 | if (!va_list_simple_ptr) | |
2090 | return NULL_TREE; | |
2091 | ||
75a70cf9 | 2092 | if (gimple_call_num_args (call) != 2) |
8a58ed0a | 2093 | return NULL_TREE; |
2094 | ||
75a70cf9 | 2095 | lhs = gimple_call_arg (call, 0); |
8a58ed0a | 2096 | if (!POINTER_TYPE_P (TREE_TYPE (lhs)) |
2097 | || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) | |
5f57a8b1 | 2098 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
8a58ed0a | 2099 | return NULL_TREE; |
2100 | ||
389dd41b | 2101 | lhs = build_fold_indirect_ref_loc (loc, lhs); |
75a70cf9 | 2102 | rhs = gimple_call_arg (call, 1); |
8a58ed0a | 2103 | if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs)) |
5f57a8b1 | 2104 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
8a58ed0a | 2105 | return NULL_TREE; |
2106 | ||
389dd41b | 2107 | rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs); |
8a58ed0a | 2108 | return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); |
2109 | ||
2110 | case BUILT_IN_VA_END: | |
75a70cf9 | 2111 | /* No effect, so the statement will be deleted. */ |
8a58ed0a | 2112 | return integer_zero_node; |
2113 | ||
2114 | default: | |
2115 | gcc_unreachable (); | |
2116 | } | |
2117 | } | |
75a70cf9 | 2118 | |
4ee9c684 | 2119 | /* A simple pass that attempts to fold all builtin functions. This pass |
2120 | is run after we've propagated as many constants as we can. */ | |
2121 | ||
2a1990e9 | 2122 | static unsigned int |
4ee9c684 | 2123 | execute_fold_all_builtins (void) |
2124 | { | |
b36237eb | 2125 | bool cfg_changed = false; |
4ee9c684 | 2126 | basic_block bb; |
b1b7c0c4 | 2127 | unsigned int todoflags = 0; |
48e1416a | 2128 | |
4ee9c684 | 2129 | FOR_EACH_BB (bb) |
2130 | { | |
75a70cf9 | 2131 | gimple_stmt_iterator i; |
2132 | for (i = gsi_start_bb (bb); !gsi_end_p (i); ) | |
4ee9c684 | 2133 | { |
75a70cf9 | 2134 | gimple stmt, old_stmt; |
4ee9c684 | 2135 | tree callee, result; |
0a39fd54 | 2136 | enum built_in_function fcode; |
4ee9c684 | 2137 | |
75a70cf9 | 2138 | stmt = gsi_stmt (i); |
2139 | ||
2140 | if (gimple_code (stmt) != GIMPLE_CALL) | |
0a39fd54 | 2141 | { |
75a70cf9 | 2142 | gsi_next (&i); |
0a39fd54 | 2143 | continue; |
2144 | } | |
75a70cf9 | 2145 | callee = gimple_call_fndecl (stmt); |
4ee9c684 | 2146 | if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) |
0a39fd54 | 2147 | { |
75a70cf9 | 2148 | gsi_next (&i); |
0a39fd54 | 2149 | continue; |
2150 | } | |
2151 | fcode = DECL_FUNCTION_CODE (callee); | |
4ee9c684 | 2152 | |
2d18b16d | 2153 | result = gimple_fold_builtin (stmt); |
5a4b7e1e | 2154 | |
2155 | if (result) | |
75a70cf9 | 2156 | gimple_remove_stmt_histograms (cfun, stmt); |
5a4b7e1e | 2157 | |
4ee9c684 | 2158 | if (!result) |
2159 | switch (DECL_FUNCTION_CODE (callee)) | |
2160 | { | |
2161 | case BUILT_IN_CONSTANT_P: | |
2162 | /* Resolve __builtin_constant_p. If it hasn't been | |
2163 | folded to integer_one_node by now, it's fairly | |
2164 | certain that the value simply isn't constant. */ | |
75a70cf9 | 2165 | result = integer_zero_node; |
4ee9c684 | 2166 | break; |
2167 | ||
bdd0e199 | 2168 | case BUILT_IN_STACK_RESTORE: |
75a70cf9 | 2169 | result = optimize_stack_restore (i); |
8a58ed0a | 2170 | if (result) |
2171 | break; | |
75a70cf9 | 2172 | gsi_next (&i); |
8a58ed0a | 2173 | continue; |
2174 | ||
2175 | case BUILT_IN_VA_START: | |
2176 | case BUILT_IN_VA_END: | |
2177 | case BUILT_IN_VA_COPY: | |
2178 | /* These shouldn't be folded before pass_stdarg. */ | |
75a70cf9 | 2179 | result = optimize_stdarg_builtin (stmt); |
bdd0e199 | 2180 | if (result) |
2181 | break; | |
2182 | /* FALLTHRU */ | |
2183 | ||
4ee9c684 | 2184 | default: |
75a70cf9 | 2185 | gsi_next (&i); |
4ee9c684 | 2186 | continue; |
2187 | } | |
2188 | ||
2189 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2190 | { | |
2191 | fprintf (dump_file, "Simplified\n "); | |
75a70cf9 | 2192 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
4ee9c684 | 2193 | } |
2194 | ||
75a70cf9 | 2195 | old_stmt = stmt; |
75a70cf9 | 2196 | if (!update_call_from_tree (&i, result)) |
0fefde02 | 2197 | { |
2198 | gimplify_and_update_call_from_tree (&i, result); | |
2199 | todoflags |= TODO_update_address_taken; | |
2200 | } | |
de6ed584 | 2201 | |
75a70cf9 | 2202 | stmt = gsi_stmt (i); |
4c5fd53c | 2203 | update_stmt (stmt); |
de6ed584 | 2204 | |
75a70cf9 | 2205 | if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt) |
2206 | && gimple_purge_dead_eh_edges (bb)) | |
b36237eb | 2207 | cfg_changed = true; |
4ee9c684 | 2208 | |
2209 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2210 | { | |
2211 | fprintf (dump_file, "to\n "); | |
75a70cf9 | 2212 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
4ee9c684 | 2213 | fprintf (dump_file, "\n"); |
2214 | } | |
0a39fd54 | 2215 | |
2216 | /* Retry the same statement if it changed into another | |
2217 | builtin, there might be new opportunities now. */ | |
75a70cf9 | 2218 | if (gimple_code (stmt) != GIMPLE_CALL) |
0a39fd54 | 2219 | { |
75a70cf9 | 2220 | gsi_next (&i); |
0a39fd54 | 2221 | continue; |
2222 | } | |
75a70cf9 | 2223 | callee = gimple_call_fndecl (stmt); |
0a39fd54 | 2224 | if (!callee |
75a70cf9 | 2225 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL |
0a39fd54 | 2226 | || DECL_FUNCTION_CODE (callee) == fcode) |
75a70cf9 | 2227 | gsi_next (&i); |
4ee9c684 | 2228 | } |
2229 | } | |
48e1416a | 2230 | |
b36237eb | 2231 | /* Delete unreachable blocks. */ |
b1b7c0c4 | 2232 | if (cfg_changed) |
2233 | todoflags |= TODO_cleanup_cfg; | |
48e1416a | 2234 | |
b1b7c0c4 | 2235 | return todoflags; |
4ee9c684 | 2236 | } |
2237 | ||
41511585 | 2238 | |
48e1416a | 2239 | struct gimple_opt_pass pass_fold_builtins = |
4ee9c684 | 2240 | { |
20099e35 | 2241 | { |
2242 | GIMPLE_PASS, | |
4ee9c684 | 2243 | "fab", /* name */ |
2244 | NULL, /* gate */ | |
2245 | execute_fold_all_builtins, /* execute */ | |
2246 | NULL, /* sub */ | |
2247 | NULL, /* next */ | |
2248 | 0, /* static_pass_number */ | |
0b1615c1 | 2249 | TV_NONE, /* tv_id */ |
49290934 | 2250 | PROP_cfg | PROP_ssa, /* properties_required */ |
4ee9c684 | 2251 | 0, /* properties_provided */ |
2252 | 0, /* properties_destroyed */ | |
2253 | 0, /* todo_flags_start */ | |
909e5ecb | 2254 | TODO_dump_func |
2255 | | TODO_verify_ssa | |
20099e35 | 2256 | | TODO_update_ssa /* todo_flags_finish */ |
2257 | } | |
4ee9c684 | 2258 | }; |