]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* Conditional constant propagation pass for the GNU compiler. |
3aea1f79 | 2 | Copyright (C) 2000-2014 Free Software Foundation, Inc. |
4ee9c684 | 3 | Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org> |
4 | Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com> | |
5 | ||
6 | This file is part of GCC. | |
48e1416a | 7 | |
4ee9c684 | 8 | GCC is free software; you can redistribute it and/or modify it |
9 | under the terms of the GNU General Public License as published by the | |
8c4c00c1 | 10 | Free Software Foundation; either version 3, or (at your option) any |
4ee9c684 | 11 | later version. |
48e1416a | 12 | |
4ee9c684 | 13 | GCC is distributed in the hope that it will be useful, but WITHOUT |
14 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
48e1416a | 17 | |
4ee9c684 | 18 | You should have received a copy of the GNU General Public License |
8c4c00c1 | 19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 21 | |
88dbf20f | 22 | /* Conditional constant propagation (CCP) is based on the SSA |
23 | propagation engine (tree-ssa-propagate.c). Constant assignments of | |
24 | the form VAR = CST are propagated from the assignments into uses of | |
25 | VAR, which in turn may generate new constants. The simulation uses | |
26 | a four level lattice to keep track of constant values associated | |
27 | with SSA names. Given an SSA name V_i, it may take one of the | |
28 | following values: | |
29 | ||
bfa30570 | 30 | UNINITIALIZED -> the initial state of the value. This value |
31 | is replaced with a correct initial value | |
32 | the first time the value is used, so the | |
33 | rest of the pass does not need to care about | |
34 | it. Using this value simplifies initialization | |
35 | of the pass, and prevents us from needlessly | |
36 | scanning statements that are never reached. | |
88dbf20f | 37 | |
38 | UNDEFINED -> V_i is a local variable whose definition | |
39 | has not been processed yet. Therefore we | |
40 | don't yet know if its value is a constant | |
41 | or not. | |
42 | ||
43 | CONSTANT -> V_i has been found to hold a constant | |
44 | value C. | |
45 | ||
46 | VARYING -> V_i cannot take a constant value, or if it | |
47 | does, it is not possible to determine it | |
48 | at compile time. | |
49 | ||
50 | The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node: | |
51 | ||
52 | 1- In ccp_visit_stmt, we are interested in assignments whose RHS | |
53 | evaluates into a constant and conditional jumps whose predicate | |
54 | evaluates into a boolean true or false. When an assignment of | |
55 | the form V_i = CONST is found, V_i's lattice value is set to | |
56 | CONSTANT and CONST is associated with it. This causes the | |
57 | propagation engine to add all the SSA edges coming out the | |
58 | assignment into the worklists, so that statements that use V_i | |
59 | can be visited. | |
60 | ||
61 | If the statement is a conditional with a constant predicate, we | |
62 | mark the outgoing edges as executable or not executable | |
63 | depending on the predicate's value. This is then used when | |
64 | visiting PHI nodes to know when a PHI argument can be ignored. | |
48e1416a | 65 | |
88dbf20f | 66 | |
67 | 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the | |
68 | same constant C, then the LHS of the PHI is set to C. This | |
69 | evaluation is known as the "meet operation". Since one of the | |
70 | goals of this evaluation is to optimistically return constant | |
71 | values as often as possible, it uses two main short cuts: | |
72 | ||
73 | - If an argument is flowing in through a non-executable edge, it | |
74 | is ignored. This is useful in cases like this: | |
75 | ||
76 | if (PRED) | |
77 | a_9 = 3; | |
78 | else | |
79 | a_10 = 100; | |
80 | a_11 = PHI (a_9, a_10) | |
81 | ||
82 | If PRED is known to always evaluate to false, then we can | |
83 | assume that a_11 will always take its value from a_10, meaning | |
84 | that instead of consider it VARYING (a_9 and a_10 have | |
85 | different values), we can consider it CONSTANT 100. | |
86 | ||
87 | - If an argument has an UNDEFINED value, then it does not affect | |
88 | the outcome of the meet operation. If a variable V_i has an | |
89 | UNDEFINED value, it means that either its defining statement | |
90 | hasn't been visited yet or V_i has no defining statement, in | |
91 | which case the original symbol 'V' is being used | |
92 | uninitialized. Since 'V' is a local variable, the compiler | |
93 | may assume any initial value for it. | |
94 | ||
95 | ||
96 | After propagation, every variable V_i that ends up with a lattice | |
97 | value of CONSTANT will have the associated constant value in the | |
98 | array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for | |
99 | final substitution and folding. | |
100 | ||
4ee9c684 | 101 | References: |
102 | ||
103 | Constant propagation with conditional branches, | |
104 | Wegman and Zadeck, ACM TOPLAS 13(2):181-210. | |
105 | ||
106 | Building an Optimizing Compiler, | |
107 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
108 | ||
109 | Advanced Compiler Design and Implementation, | |
110 | Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */ | |
111 | ||
112 | #include "config.h" | |
113 | #include "system.h" | |
114 | #include "coretypes.h" | |
115 | #include "tm.h" | |
4ee9c684 | 116 | #include "tree.h" |
9ed99284 | 117 | #include "stor-layout.h" |
41511585 | 118 | #include "flags.h" |
4ee9c684 | 119 | #include "tm_p.h" |
4ee9c684 | 120 | #include "basic-block.h" |
41511585 | 121 | #include "function.h" |
ce084dfc | 122 | #include "gimple-pretty-print.h" |
bc61cadb | 123 | #include "hash-table.h" |
124 | #include "tree-ssa-alias.h" | |
125 | #include "internal-fn.h" | |
126 | #include "gimple-fold.h" | |
127 | #include "tree-eh.h" | |
128 | #include "gimple-expr.h" | |
129 | #include "is-a.h" | |
e795d6e1 | 130 | #include "gimple.h" |
a8783bee | 131 | #include "gimplify.h" |
dcf1a1ec | 132 | #include "gimple-iterator.h" |
073c1fd5 | 133 | #include "gimple-ssa.h" |
134 | #include "tree-cfg.h" | |
135 | #include "tree-phinodes.h" | |
136 | #include "ssa-iterators.h" | |
9ed99284 | 137 | #include "stringpool.h" |
073c1fd5 | 138 | #include "tree-ssanames.h" |
4ee9c684 | 139 | #include "tree-pass.h" |
41511585 | 140 | #include "tree-ssa-propagate.h" |
5a4b7e1e | 141 | #include "value-prof.h" |
41511585 | 142 | #include "langhooks.h" |
8782adcf | 143 | #include "target.h" |
0b205f4c | 144 | #include "diagnostic-core.h" |
43fb76c1 | 145 | #include "dbgcnt.h" |
9a65cc0a | 146 | #include "params.h" |
4ee9c684 | 147 | |
148 | ||
149 | /* Possible lattice values. */ | |
150 | typedef enum | |
151 | { | |
bfa30570 | 152 | UNINITIALIZED, |
4ee9c684 | 153 | UNDEFINED, |
154 | CONSTANT, | |
155 | VARYING | |
88dbf20f | 156 | } ccp_lattice_t; |
4ee9c684 | 157 | |
14f101cf | 158 | struct prop_value_d { |
159 | /* Lattice value. */ | |
160 | ccp_lattice_t lattice_val; | |
161 | ||
162 | /* Propagated value. */ | |
163 | tree value; | |
b7e55469 | 164 | |
165 | /* Mask that applies to the propagated value during CCP. For | |
166 | X with a CONSTANT lattice value X & ~mask == value & ~mask. */ | |
167 | double_int mask; | |
14f101cf | 168 | }; |
169 | ||
170 | typedef struct prop_value_d prop_value_t; | |
171 | ||
88dbf20f | 172 | /* Array of propagated constant values. After propagation, |
173 | CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If | |
174 | the constant is held in an SSA name representing a memory store | |
4fb5e5ca | 175 | (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual |
176 | memory reference used to store (i.e., the LHS of the assignment | |
177 | doing the store). */ | |
20140406 | 178 | static prop_value_t *const_val; |
285df01b | 179 | static unsigned n_const_val; |
4ee9c684 | 180 | |
f5faab84 | 181 | static void canonicalize_value (prop_value_t *); |
6688f8ec | 182 | static bool ccp_fold_stmt (gimple_stmt_iterator *); |
4af351a8 | 183 | |
88dbf20f | 184 | /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */ |
01406fc0 | 185 | |
186 | static void | |
88dbf20f | 187 | dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val) |
01406fc0 | 188 | { |
41511585 | 189 | switch (val.lattice_val) |
01406fc0 | 190 | { |
88dbf20f | 191 | case UNINITIALIZED: |
192 | fprintf (outf, "%sUNINITIALIZED", prefix); | |
193 | break; | |
41511585 | 194 | case UNDEFINED: |
195 | fprintf (outf, "%sUNDEFINED", prefix); | |
196 | break; | |
197 | case VARYING: | |
198 | fprintf (outf, "%sVARYING", prefix); | |
199 | break; | |
41511585 | 200 | case CONSTANT: |
b7e55469 | 201 | if (TREE_CODE (val.value) != INTEGER_CST |
cf8f0e63 | 202 | || val.mask.is_zero ()) |
16ab4e97 | 203 | { |
204 | fprintf (outf, "%sCONSTANT ", prefix); | |
205 | print_generic_expr (outf, val.value, dump_flags); | |
206 | } | |
b7e55469 | 207 | else |
208 | { | |
cf8f0e63 | 209 | double_int cval = tree_to_double_int (val.value).and_not (val.mask); |
b7e55469 | 210 | fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX, |
211 | prefix, cval.high, cval.low); | |
212 | fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")", | |
213 | val.mask.high, val.mask.low); | |
214 | } | |
41511585 | 215 | break; |
216 | default: | |
8c0963c4 | 217 | gcc_unreachable (); |
41511585 | 218 | } |
01406fc0 | 219 | } |
4ee9c684 | 220 | |
4ee9c684 | 221 | |
88dbf20f | 222 | /* Print lattice value VAL to stderr. */ |
223 | ||
224 | void debug_lattice_value (prop_value_t val); | |
225 | ||
4b987fac | 226 | DEBUG_FUNCTION void |
88dbf20f | 227 | debug_lattice_value (prop_value_t val) |
228 | { | |
229 | dump_lattice_value (stderr, "", val); | |
230 | fprintf (stderr, "\n"); | |
231 | } | |
4ee9c684 | 232 | |
4ee9c684 | 233 | |
88dbf20f | 234 | /* Compute a default value for variable VAR and store it in the |
235 | CONST_VAL array. The following rules are used to get default | |
236 | values: | |
01406fc0 | 237 | |
88dbf20f | 238 | 1- Global and static variables that are declared constant are |
239 | considered CONSTANT. | |
240 | ||
241 | 2- Any other value is considered UNDEFINED. This is useful when | |
41511585 | 242 | considering PHI nodes. PHI arguments that are undefined do not |
243 | change the constant value of the PHI node, which allows for more | |
88dbf20f | 244 | constants to be propagated. |
4ee9c684 | 245 | |
8883e700 | 246 | 3- Variables defined by statements other than assignments and PHI |
88dbf20f | 247 | nodes are considered VARYING. |
4ee9c684 | 248 | |
8883e700 | 249 | 4- Initial values of variables that are not GIMPLE registers are |
bfa30570 | 250 | considered VARYING. */ |
4ee9c684 | 251 | |
88dbf20f | 252 | static prop_value_t |
253 | get_default_value (tree var) | |
254 | { | |
b7e55469 | 255 | prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } }; |
8edeb88b | 256 | gimple stmt; |
257 | ||
258 | stmt = SSA_NAME_DEF_STMT (var); | |
259 | ||
260 | if (gimple_nop_p (stmt)) | |
4ee9c684 | 261 | { |
8edeb88b | 262 | /* Variables defined by an empty statement are those used |
263 | before being initialized. If VAR is a local variable, we | |
264 | can assume initially that it is UNDEFINED, otherwise we must | |
265 | consider it VARYING. */ | |
7c782c9b | 266 | if (!virtual_operand_p (var) |
267 | && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL) | |
8edeb88b | 268 | val.lattice_val = UNDEFINED; |
269 | else | |
b7e55469 | 270 | { |
271 | val.lattice_val = VARYING; | |
272 | val.mask = double_int_minus_one; | |
fc08b993 | 273 | if (flag_tree_bit_ccp) |
274 | { | |
275 | double_int nonzero_bits = get_nonzero_bits (var); | |
276 | double_int mask | |
277 | = double_int::mask (TYPE_PRECISION (TREE_TYPE (var))); | |
278 | if (nonzero_bits != double_int_minus_one && nonzero_bits != mask) | |
279 | { | |
280 | val.lattice_val = CONSTANT; | |
281 | val.value = build_zero_cst (TREE_TYPE (var)); | |
282 | /* CCP wants the bits above precision set. */ | |
283 | val.mask = nonzero_bits | ~mask; | |
284 | } | |
285 | } | |
b7e55469 | 286 | } |
4ee9c684 | 287 | } |
b45b214a | 288 | else if (is_gimple_assign (stmt)) |
41511585 | 289 | { |
8edeb88b | 290 | tree cst; |
291 | if (gimple_assign_single_p (stmt) | |
292 | && DECL_P (gimple_assign_rhs1 (stmt)) | |
293 | && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt)))) | |
88dbf20f | 294 | { |
8edeb88b | 295 | val.lattice_val = CONSTANT; |
296 | val.value = cst; | |
88dbf20f | 297 | } |
298 | else | |
b45b214a | 299 | { |
300 | /* Any other variable defined by an assignment is considered | |
301 | UNDEFINED. */ | |
302 | val.lattice_val = UNDEFINED; | |
303 | } | |
304 | } | |
305 | else if ((is_gimple_call (stmt) | |
306 | && gimple_call_lhs (stmt) != NULL_TREE) | |
307 | || gimple_code (stmt) == GIMPLE_PHI) | |
308 | { | |
309 | /* A variable defined by a call or a PHI node is considered | |
310 | UNDEFINED. */ | |
311 | val.lattice_val = UNDEFINED; | |
8edeb88b | 312 | } |
313 | else | |
314 | { | |
315 | /* Otherwise, VAR will never take on a constant value. */ | |
316 | val.lattice_val = VARYING; | |
b7e55469 | 317 | val.mask = double_int_minus_one; |
41511585 | 318 | } |
4ee9c684 | 319 | |
41511585 | 320 | return val; |
321 | } | |
4ee9c684 | 322 | |
4ee9c684 | 323 | |
bfa30570 | 324 | /* Get the constant value associated with variable VAR. */ |
4ee9c684 | 325 | |
bfa30570 | 326 | static inline prop_value_t * |
327 | get_value (tree var) | |
88dbf20f | 328 | { |
e004838d | 329 | prop_value_t *val; |
bfa30570 | 330 | |
285df01b | 331 | if (const_val == NULL |
332 | || SSA_NAME_VERSION (var) >= n_const_val) | |
e004838d | 333 | return NULL; |
334 | ||
335 | val = &const_val[SSA_NAME_VERSION (var)]; | |
bfa30570 | 336 | if (val->lattice_val == UNINITIALIZED) |
4ee9c684 | 337 | *val = get_default_value (var); |
338 | ||
f5faab84 | 339 | canonicalize_value (val); |
4af351a8 | 340 | |
4ee9c684 | 341 | return val; |
342 | } | |
343 | ||
15d138c9 | 344 | /* Return the constant tree value associated with VAR. */ |
345 | ||
346 | static inline tree | |
347 | get_constant_value (tree var) | |
348 | { | |
98d92e3c | 349 | prop_value_t *val; |
350 | if (TREE_CODE (var) != SSA_NAME) | |
351 | { | |
352 | if (is_gimple_min_invariant (var)) | |
353 | return var; | |
354 | return NULL_TREE; | |
355 | } | |
356 | val = get_value (var); | |
b7e55469 | 357 | if (val |
358 | && val->lattice_val == CONSTANT | |
359 | && (TREE_CODE (val->value) != INTEGER_CST | |
cf8f0e63 | 360 | || val->mask.is_zero ())) |
15d138c9 | 361 | return val->value; |
362 | return NULL_TREE; | |
363 | } | |
364 | ||
bfa30570 | 365 | /* Sets the value associated with VAR to VARYING. */ |
366 | ||
367 | static inline void | |
368 | set_value_varying (tree var) | |
369 | { | |
370 | prop_value_t *val = &const_val[SSA_NAME_VERSION (var)]; | |
371 | ||
372 | val->lattice_val = VARYING; | |
373 | val->value = NULL_TREE; | |
b7e55469 | 374 | val->mask = double_int_minus_one; |
bfa30570 | 375 | } |
4ee9c684 | 376 | |
b31eb493 | 377 | /* For float types, modify the value of VAL to make ccp work correctly |
378 | for non-standard values (-0, NaN): | |
379 | ||
380 | If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0. | |
381 | If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED. | |
382 | This is to fix the following problem (see PR 29921): Suppose we have | |
383 | ||
384 | x = 0.0 * y | |
385 | ||
386 | and we set value of y to NaN. This causes value of x to be set to NaN. | |
387 | When we later determine that y is in fact VARYING, fold uses the fact | |
388 | that HONOR_NANS is false, and we try to change the value of x to 0, | |
389 | causing an ICE. With HONOR_NANS being false, the real appearance of | |
390 | NaN would cause undefined behavior, though, so claiming that y (and x) | |
f5faab84 | 391 | are UNDEFINED initially is correct. |
392 | ||
393 | For other constants, make sure to drop TREE_OVERFLOW. */ | |
b31eb493 | 394 | |
395 | static void | |
f5faab84 | 396 | canonicalize_value (prop_value_t *val) |
b31eb493 | 397 | { |
398 | enum machine_mode mode; | |
399 | tree type; | |
400 | REAL_VALUE_TYPE d; | |
401 | ||
f5faab84 | 402 | if (val->lattice_val != CONSTANT) |
403 | return; | |
404 | ||
405 | if (TREE_OVERFLOW_P (val->value)) | |
406 | val->value = drop_tree_overflow (val->value); | |
407 | ||
408 | if (TREE_CODE (val->value) != REAL_CST) | |
b31eb493 | 409 | return; |
410 | ||
411 | d = TREE_REAL_CST (val->value); | |
412 | type = TREE_TYPE (val->value); | |
413 | mode = TYPE_MODE (type); | |
414 | ||
415 | if (!HONOR_SIGNED_ZEROS (mode) | |
416 | && REAL_VALUE_MINUS_ZERO (d)) | |
417 | { | |
418 | val->value = build_real (type, dconst0); | |
419 | return; | |
420 | } | |
421 | ||
422 | if (!HONOR_NANS (mode) | |
423 | && REAL_VALUE_ISNAN (d)) | |
424 | { | |
425 | val->lattice_val = UNDEFINED; | |
426 | val->value = NULL; | |
b31eb493 | 427 | return; |
428 | } | |
429 | } | |
430 | ||
b7e55469 | 431 | /* Return whether the lattice transition is valid. */ |
432 | ||
433 | static bool | |
434 | valid_lattice_transition (prop_value_t old_val, prop_value_t new_val) | |
435 | { | |
436 | /* Lattice transitions must always be monotonically increasing in | |
437 | value. */ | |
438 | if (old_val.lattice_val < new_val.lattice_val) | |
439 | return true; | |
440 | ||
441 | if (old_val.lattice_val != new_val.lattice_val) | |
442 | return false; | |
443 | ||
444 | if (!old_val.value && !new_val.value) | |
445 | return true; | |
446 | ||
447 | /* Now both lattice values are CONSTANT. */ | |
448 | ||
43c92e0a | 449 | /* Allow transitioning from PHI <&x, not executable> == &x |
450 | to PHI <&x, &y> == common alignment. */ | |
b7e55469 | 451 | if (TREE_CODE (old_val.value) != INTEGER_CST |
452 | && TREE_CODE (new_val.value) == INTEGER_CST) | |
453 | return true; | |
454 | ||
455 | /* Bit-lattices have to agree in the still valid bits. */ | |
456 | if (TREE_CODE (old_val.value) == INTEGER_CST | |
457 | && TREE_CODE (new_val.value) == INTEGER_CST) | |
cf8f0e63 | 458 | return tree_to_double_int (old_val.value).and_not (new_val.mask) |
459 | == tree_to_double_int (new_val.value).and_not (new_val.mask); | |
b7e55469 | 460 | |
461 | /* Otherwise constant values have to agree. */ | |
462 | return operand_equal_p (old_val.value, new_val.value, 0); | |
463 | } | |
464 | ||
88dbf20f | 465 | /* Set the value for variable VAR to NEW_VAL. Return true if the new |
466 | value is different from VAR's previous value. */ | |
4ee9c684 | 467 | |
41511585 | 468 | static bool |
88dbf20f | 469 | set_lattice_value (tree var, prop_value_t new_val) |
4ee9c684 | 470 | { |
6d0bf6d6 | 471 | /* We can deal with old UNINITIALIZED values just fine here. */ |
472 | prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)]; | |
88dbf20f | 473 | |
f5faab84 | 474 | canonicalize_value (&new_val); |
b31eb493 | 475 | |
b7e55469 | 476 | /* We have to be careful to not go up the bitwise lattice |
477 | represented by the mask. | |
478 | ??? This doesn't seem to be the best place to enforce this. */ | |
479 | if (new_val.lattice_val == CONSTANT | |
480 | && old_val->lattice_val == CONSTANT | |
481 | && TREE_CODE (new_val.value) == INTEGER_CST | |
482 | && TREE_CODE (old_val->value) == INTEGER_CST) | |
483 | { | |
484 | double_int diff; | |
cf8f0e63 | 485 | diff = tree_to_double_int (new_val.value) |
486 | ^ tree_to_double_int (old_val->value); | |
487 | new_val.mask = new_val.mask | old_val->mask | diff; | |
b7e55469 | 488 | } |
bfa30570 | 489 | |
b7e55469 | 490 | gcc_assert (valid_lattice_transition (*old_val, new_val)); |
88dbf20f | 491 | |
b7e55469 | 492 | /* If *OLD_VAL and NEW_VAL are the same, return false to inform the |
493 | caller that this was a non-transition. */ | |
494 | if (old_val->lattice_val != new_val.lattice_val | |
495 | || (new_val.lattice_val == CONSTANT | |
496 | && TREE_CODE (new_val.value) == INTEGER_CST | |
497 | && (TREE_CODE (old_val->value) != INTEGER_CST | |
cf8f0e63 | 498 | || new_val.mask != old_val->mask))) |
4ee9c684 | 499 | { |
b7e55469 | 500 | /* ??? We would like to delay creation of INTEGER_CSTs from |
501 | partially constants here. */ | |
502 | ||
41511585 | 503 | if (dump_file && (dump_flags & TDF_DETAILS)) |
504 | { | |
88dbf20f | 505 | dump_lattice_value (dump_file, "Lattice value changed to ", new_val); |
bfa30570 | 506 | fprintf (dump_file, ". Adding SSA edges to worklist.\n"); |
41511585 | 507 | } |
508 | ||
88dbf20f | 509 | *old_val = new_val; |
510 | ||
6d0bf6d6 | 511 | gcc_assert (new_val.lattice_val != UNINITIALIZED); |
bfa30570 | 512 | return true; |
4ee9c684 | 513 | } |
41511585 | 514 | |
515 | return false; | |
4ee9c684 | 516 | } |
517 | ||
b7e55469 | 518 | static prop_value_t get_value_for_expr (tree, bool); |
519 | static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree); | |
520 | static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *, | |
521 | tree, double_int, double_int, | |
522 | tree, double_int, double_int); | |
523 | ||
524 | /* Return a double_int that can be used for bitwise simplifications | |
525 | from VAL. */ | |
526 | ||
527 | static double_int | |
528 | value_to_double_int (prop_value_t val) | |
529 | { | |
530 | if (val.value | |
531 | && TREE_CODE (val.value) == INTEGER_CST) | |
532 | return tree_to_double_int (val.value); | |
533 | else | |
534 | return double_int_zero; | |
535 | } | |
536 | ||
537 | /* Return the value for the address expression EXPR based on alignment | |
538 | information. */ | |
6d0bf6d6 | 539 | |
540 | static prop_value_t | |
b7e55469 | 541 | get_value_from_alignment (tree expr) |
542 | { | |
f8abb542 | 543 | tree type = TREE_TYPE (expr); |
b7e55469 | 544 | prop_value_t val; |
f8abb542 | 545 | unsigned HOST_WIDE_INT bitpos; |
546 | unsigned int align; | |
b7e55469 | 547 | |
548 | gcc_assert (TREE_CODE (expr) == ADDR_EXPR); | |
549 | ||
59da1bcd | 550 | get_pointer_alignment_1 (expr, &align, &bitpos); |
cf8f0e63 | 551 | val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type) |
552 | ? double_int::mask (TYPE_PRECISION (type)) | |
553 | : double_int_minus_one) | |
554 | .and_not (double_int::from_uhwi (align / BITS_PER_UNIT - 1)); | |
555 | val.lattice_val = val.mask.is_minus_one () ? VARYING : CONSTANT; | |
f8abb542 | 556 | if (val.lattice_val == CONSTANT) |
557 | val.value | |
cf8f0e63 | 558 | = double_int_to_tree (type, |
559 | double_int::from_uhwi (bitpos / BITS_PER_UNIT)); | |
b7e55469 | 560 | else |
f8abb542 | 561 | val.value = NULL_TREE; |
b7e55469 | 562 | |
563 | return val; | |
564 | } | |
565 | ||
566 | /* Return the value for the tree operand EXPR. If FOR_BITS_P is true | |
567 | return constant bits extracted from alignment information for | |
568 | invariant addresses. */ | |
569 | ||
570 | static prop_value_t | |
571 | get_value_for_expr (tree expr, bool for_bits_p) | |
6d0bf6d6 | 572 | { |
573 | prop_value_t val; | |
574 | ||
575 | if (TREE_CODE (expr) == SSA_NAME) | |
b7e55469 | 576 | { |
577 | val = *get_value (expr); | |
578 | if (for_bits_p | |
579 | && val.lattice_val == CONSTANT | |
580 | && TREE_CODE (val.value) == ADDR_EXPR) | |
581 | val = get_value_from_alignment (val.value); | |
582 | } | |
583 | else if (is_gimple_min_invariant (expr) | |
584 | && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR)) | |
6d0bf6d6 | 585 | { |
586 | val.lattice_val = CONSTANT; | |
587 | val.value = expr; | |
b7e55469 | 588 | val.mask = double_int_zero; |
f5faab84 | 589 | canonicalize_value (&val); |
6d0bf6d6 | 590 | } |
b7e55469 | 591 | else if (TREE_CODE (expr) == ADDR_EXPR) |
592 | val = get_value_from_alignment (expr); | |
6d0bf6d6 | 593 | else |
594 | { | |
595 | val.lattice_val = VARYING; | |
b7e55469 | 596 | val.mask = double_int_minus_one; |
6d0bf6d6 | 597 | val.value = NULL_TREE; |
598 | } | |
6d0bf6d6 | 599 | return val; |
600 | } | |
601 | ||
88dbf20f | 602 | /* Return the likely CCP lattice value for STMT. |
4ee9c684 | 603 | |
41511585 | 604 | If STMT has no operands, then return CONSTANT. |
4ee9c684 | 605 | |
d61b9af3 | 606 | Else if undefinedness of operands of STMT cause its value to be |
607 | undefined, then return UNDEFINED. | |
4ee9c684 | 608 | |
41511585 | 609 | Else if any operands of STMT are constants, then return CONSTANT. |
4ee9c684 | 610 | |
41511585 | 611 | Else return VARYING. */ |
4ee9c684 | 612 | |
88dbf20f | 613 | static ccp_lattice_t |
75a70cf9 | 614 | likely_value (gimple stmt) |
41511585 | 615 | { |
d61b9af3 | 616 | bool has_constant_operand, has_undefined_operand, all_undefined_operands; |
41511585 | 617 | tree use; |
618 | ssa_op_iter iter; | |
8edeb88b | 619 | unsigned i; |
4ee9c684 | 620 | |
590c3166 | 621 | enum gimple_code code = gimple_code (stmt); |
75a70cf9 | 622 | |
623 | /* This function appears to be called only for assignments, calls, | |
624 | conditionals, and switches, due to the logic in visit_stmt. */ | |
625 | gcc_assert (code == GIMPLE_ASSIGN | |
626 | || code == GIMPLE_CALL | |
627 | || code == GIMPLE_COND | |
628 | || code == GIMPLE_SWITCH); | |
88dbf20f | 629 | |
630 | /* If the statement has volatile operands, it won't fold to a | |
631 | constant value. */ | |
75a70cf9 | 632 | if (gimple_has_volatile_ops (stmt)) |
88dbf20f | 633 | return VARYING; |
634 | ||
75a70cf9 | 635 | /* Arrive here for more complex cases. */ |
bfa30570 | 636 | has_constant_operand = false; |
d61b9af3 | 637 | has_undefined_operand = false; |
638 | all_undefined_operands = true; | |
8edeb88b | 639 | FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE) |
41511585 | 640 | { |
bfa30570 | 641 | prop_value_t *val = get_value (use); |
41511585 | 642 | |
bfa30570 | 643 | if (val->lattice_val == UNDEFINED) |
d61b9af3 | 644 | has_undefined_operand = true; |
645 | else | |
646 | all_undefined_operands = false; | |
88dbf20f | 647 | |
41511585 | 648 | if (val->lattice_val == CONSTANT) |
bfa30570 | 649 | has_constant_operand = true; |
4ee9c684 | 650 | } |
41511585 | 651 | |
dd277d48 | 652 | /* There may be constants in regular rhs operands. For calls we |
653 | have to ignore lhs, fndecl and static chain, otherwise only | |
654 | the lhs. */ | |
655 | for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt); | |
8edeb88b | 656 | i < gimple_num_ops (stmt); ++i) |
657 | { | |
658 | tree op = gimple_op (stmt, i); | |
659 | if (!op || TREE_CODE (op) == SSA_NAME) | |
660 | continue; | |
661 | if (is_gimple_min_invariant (op)) | |
662 | has_constant_operand = true; | |
663 | } | |
664 | ||
87c0a9fc | 665 | if (has_constant_operand) |
666 | all_undefined_operands = false; | |
667 | ||
3d483a94 | 668 | if (has_undefined_operand |
669 | && code == GIMPLE_CALL | |
670 | && gimple_call_internal_p (stmt)) | |
671 | switch (gimple_call_internal_fn (stmt)) | |
672 | { | |
673 | /* These 3 builtins use the first argument just as a magic | |
674 | way how to find out a decl uid. */ | |
675 | case IFN_GOMP_SIMD_LANE: | |
676 | case IFN_GOMP_SIMD_VF: | |
677 | case IFN_GOMP_SIMD_LAST_LANE: | |
678 | has_undefined_operand = false; | |
679 | break; | |
680 | default: | |
681 | break; | |
682 | } | |
683 | ||
d61b9af3 | 684 | /* If the operation combines operands like COMPLEX_EXPR make sure to |
685 | not mark the result UNDEFINED if only one part of the result is | |
686 | undefined. */ | |
75a70cf9 | 687 | if (has_undefined_operand && all_undefined_operands) |
d61b9af3 | 688 | return UNDEFINED; |
75a70cf9 | 689 | else if (code == GIMPLE_ASSIGN && has_undefined_operand) |
d61b9af3 | 690 | { |
75a70cf9 | 691 | switch (gimple_assign_rhs_code (stmt)) |
d61b9af3 | 692 | { |
693 | /* Unary operators are handled with all_undefined_operands. */ | |
694 | case PLUS_EXPR: | |
695 | case MINUS_EXPR: | |
d61b9af3 | 696 | case POINTER_PLUS_EXPR: |
d61b9af3 | 697 | /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected. |
698 | Not bitwise operators, one VARYING operand may specify the | |
699 | result completely. Not logical operators for the same reason. | |
05a936a0 | 700 | Not COMPLEX_EXPR as one VARYING operand makes the result partly |
701 | not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because | |
702 | the undefined operand may be promoted. */ | |
d61b9af3 | 703 | return UNDEFINED; |
704 | ||
43c92e0a | 705 | case ADDR_EXPR: |
706 | /* If any part of an address is UNDEFINED, like the index | |
707 | of an ARRAY_EXPR, then treat the result as UNDEFINED. */ | |
708 | return UNDEFINED; | |
709 | ||
d61b9af3 | 710 | default: |
711 | ; | |
712 | } | |
713 | } | |
714 | /* If there was an UNDEFINED operand but the result may be not UNDEFINED | |
c91fedc5 | 715 | fall back to CONSTANT. During iteration UNDEFINED may still drop |
716 | to CONSTANT. */ | |
d61b9af3 | 717 | if (has_undefined_operand) |
c91fedc5 | 718 | return CONSTANT; |
d61b9af3 | 719 | |
8edeb88b | 720 | /* We do not consider virtual operands here -- load from read-only |
721 | memory may have only VARYING virtual operands, but still be | |
722 | constant. */ | |
bfa30570 | 723 | if (has_constant_operand |
8edeb88b | 724 | || gimple_references_memory_p (stmt)) |
88dbf20f | 725 | return CONSTANT; |
726 | ||
bfa30570 | 727 | return VARYING; |
4ee9c684 | 728 | } |
729 | ||
bfa30570 | 730 | /* Returns true if STMT cannot be constant. */ |
731 | ||
732 | static bool | |
75a70cf9 | 733 | surely_varying_stmt_p (gimple stmt) |
bfa30570 | 734 | { |
735 | /* If the statement has operands that we cannot handle, it cannot be | |
736 | constant. */ | |
75a70cf9 | 737 | if (gimple_has_volatile_ops (stmt)) |
bfa30570 | 738 | return true; |
739 | ||
f257af64 | 740 | /* If it is a call and does not return a value or is not a |
237e78b1 | 741 | builtin and not an indirect call or a call to function with |
742 | assume_aligned/alloc_align attribute, it is varying. */ | |
75a70cf9 | 743 | if (is_gimple_call (stmt)) |
f257af64 | 744 | { |
237e78b1 | 745 | tree fndecl, fntype = gimple_call_fntype (stmt); |
f257af64 | 746 | if (!gimple_call_lhs (stmt) |
747 | || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE | |
237e78b1 | 748 | && !DECL_BUILT_IN (fndecl) |
749 | && !lookup_attribute ("assume_aligned", | |
750 | TYPE_ATTRIBUTES (fntype)) | |
751 | && !lookup_attribute ("alloc_align", | |
752 | TYPE_ATTRIBUTES (fntype)))) | |
f257af64 | 753 | return true; |
754 | } | |
bfa30570 | 755 | |
8edeb88b | 756 | /* Any other store operation is not interesting. */ |
dd277d48 | 757 | else if (gimple_vdef (stmt)) |
8edeb88b | 758 | return true; |
759 | ||
bfa30570 | 760 | /* Anything other than assignments and conditional jumps are not |
761 | interesting for CCP. */ | |
75a70cf9 | 762 | if (gimple_code (stmt) != GIMPLE_ASSIGN |
f257af64 | 763 | && gimple_code (stmt) != GIMPLE_COND |
764 | && gimple_code (stmt) != GIMPLE_SWITCH | |
765 | && gimple_code (stmt) != GIMPLE_CALL) | |
bfa30570 | 766 | return true; |
767 | ||
768 | return false; | |
769 | } | |
4ee9c684 | 770 | |
41511585 | 771 | /* Initialize local data structures for CCP. */ |
4ee9c684 | 772 | |
773 | static void | |
41511585 | 774 | ccp_initialize (void) |
4ee9c684 | 775 | { |
41511585 | 776 | basic_block bb; |
4ee9c684 | 777 | |
285df01b | 778 | n_const_val = num_ssa_names; |
779 | const_val = XCNEWVEC (prop_value_t, n_const_val); | |
4ee9c684 | 780 | |
41511585 | 781 | /* Initialize simulation flags for PHI nodes and statements. */ |
fc00614f | 782 | FOR_EACH_BB_FN (bb, cfun) |
4ee9c684 | 783 | { |
75a70cf9 | 784 | gimple_stmt_iterator i; |
4ee9c684 | 785 | |
75a70cf9 | 786 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) |
41511585 | 787 | { |
75a70cf9 | 788 | gimple stmt = gsi_stmt (i); |
2193544e | 789 | bool is_varying; |
790 | ||
791 | /* If the statement is a control insn, then we do not | |
792 | want to avoid simulating the statement once. Failure | |
793 | to do so means that those edges will never get added. */ | |
794 | if (stmt_ends_bb_p (stmt)) | |
795 | is_varying = false; | |
796 | else | |
797 | is_varying = surely_varying_stmt_p (stmt); | |
4ee9c684 | 798 | |
bfa30570 | 799 | if (is_varying) |
41511585 | 800 | { |
88dbf20f | 801 | tree def; |
802 | ssa_op_iter iter; | |
803 | ||
804 | /* If the statement will not produce a constant, mark | |
805 | all its outputs VARYING. */ | |
806 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) | |
8edeb88b | 807 | set_value_varying (def); |
41511585 | 808 | } |
75a70cf9 | 809 | prop_set_simulate_again (stmt, !is_varying); |
41511585 | 810 | } |
4ee9c684 | 811 | } |
812 | ||
75a70cf9 | 813 | /* Now process PHI nodes. We never clear the simulate_again flag on |
814 | phi nodes, since we do not know which edges are executable yet, | |
815 | except for phi nodes for virtual operands when we do not do store ccp. */ | |
fc00614f | 816 | FOR_EACH_BB_FN (bb, cfun) |
4ee9c684 | 817 | { |
75a70cf9 | 818 | gimple_stmt_iterator i; |
41511585 | 819 | |
75a70cf9 | 820 | for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i)) |
821 | { | |
822 | gimple phi = gsi_stmt (i); | |
823 | ||
7c782c9b | 824 | if (virtual_operand_p (gimple_phi_result (phi))) |
75a70cf9 | 825 | prop_set_simulate_again (phi, false); |
bfa30570 | 826 | else |
75a70cf9 | 827 | prop_set_simulate_again (phi, true); |
41511585 | 828 | } |
4ee9c684 | 829 | } |
41511585 | 830 | } |
4ee9c684 | 831 | |
43fb76c1 | 832 | /* Debug count support. Reset the values of ssa names |
833 | VARYING when the total number ssa names analyzed is | |
834 | beyond the debug count specified. */ | |
835 | ||
836 | static void | |
837 | do_dbg_cnt (void) | |
838 | { | |
839 | unsigned i; | |
840 | for (i = 0; i < num_ssa_names; i++) | |
841 | { | |
842 | if (!dbg_cnt (ccp)) | |
843 | { | |
844 | const_val[i].lattice_val = VARYING; | |
b7e55469 | 845 | const_val[i].mask = double_int_minus_one; |
43fb76c1 | 846 | const_val[i].value = NULL_TREE; |
847 | } | |
848 | } | |
849 | } | |
850 | ||
4ee9c684 | 851 | |
88dbf20f | 852 | /* Do final substitution of propagated values, cleanup the flowgraph and |
48e1416a | 853 | free allocated storage. |
4ee9c684 | 854 | |
33a34f1e | 855 | Return TRUE when something was optimized. */ |
856 | ||
857 | static bool | |
88dbf20f | 858 | ccp_finalize (void) |
4ee9c684 | 859 | { |
43fb76c1 | 860 | bool something_changed; |
153c3b50 | 861 | unsigned i; |
43fb76c1 | 862 | |
863 | do_dbg_cnt (); | |
153c3b50 | 864 | |
865 | /* Derive alignment and misalignment information from partially | |
fc08b993 | 866 | constant pointers in the lattice or nonzero bits from partially |
867 | constant integers. */ | |
153c3b50 | 868 | for (i = 1; i < num_ssa_names; ++i) |
869 | { | |
870 | tree name = ssa_name (i); | |
871 | prop_value_t *val; | |
153c3b50 | 872 | unsigned int tem, align; |
873 | ||
874 | if (!name | |
fc08b993 | 875 | || (!POINTER_TYPE_P (TREE_TYPE (name)) |
876 | && (!INTEGRAL_TYPE_P (TREE_TYPE (name)) | |
877 | /* Don't record nonzero bits before IPA to avoid | |
878 | using too much memory. */ | |
879 | || first_pass_instance))) | |
153c3b50 | 880 | continue; |
881 | ||
882 | val = get_value (name); | |
883 | if (val->lattice_val != CONSTANT | |
884 | || TREE_CODE (val->value) != INTEGER_CST) | |
885 | continue; | |
886 | ||
fc08b993 | 887 | if (POINTER_TYPE_P (TREE_TYPE (name))) |
888 | { | |
889 | /* Trailing mask bits specify the alignment, trailing value | |
890 | bits the misalignment. */ | |
891 | tem = val->mask.low; | |
892 | align = (tem & -tem); | |
893 | if (align > 1) | |
894 | set_ptr_info_alignment (get_ptr_info (name), align, | |
895 | (TREE_INT_CST_LOW (val->value) | |
896 | & (align - 1))); | |
897 | } | |
898 | else | |
899 | { | |
900 | double_int nonzero_bits = val->mask; | |
901 | nonzero_bits = nonzero_bits | tree_to_double_int (val->value); | |
902 | nonzero_bits &= get_nonzero_bits (name); | |
903 | set_nonzero_bits (name, nonzero_bits); | |
904 | } | |
153c3b50 | 905 | } |
906 | ||
88dbf20f | 907 | /* Perform substitutions based on the known constant values. */ |
14f101cf | 908 | something_changed = substitute_and_fold (get_constant_value, |
909 | ccp_fold_stmt, true); | |
4ee9c684 | 910 | |
88dbf20f | 911 | free (const_val); |
e004838d | 912 | const_val = NULL; |
33a34f1e | 913 | return something_changed;; |
4ee9c684 | 914 | } |
915 | ||
916 | ||
88dbf20f | 917 | /* Compute the meet operator between *VAL1 and *VAL2. Store the result |
918 | in VAL1. | |
919 | ||
920 | any M UNDEFINED = any | |
88dbf20f | 921 | any M VARYING = VARYING |
922 | Ci M Cj = Ci if (i == j) | |
923 | Ci M Cj = VARYING if (i != j) | |
bfa30570 | 924 | */ |
4ee9c684 | 925 | |
926 | static void | |
88dbf20f | 927 | ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2) |
4ee9c684 | 928 | { |
88dbf20f | 929 | if (val1->lattice_val == UNDEFINED) |
4ee9c684 | 930 | { |
88dbf20f | 931 | /* UNDEFINED M any = any */ |
932 | *val1 = *val2; | |
41511585 | 933 | } |
88dbf20f | 934 | else if (val2->lattice_val == UNDEFINED) |
92481a4d | 935 | { |
88dbf20f | 936 | /* any M UNDEFINED = any |
937 | Nothing to do. VAL1 already contains the value we want. */ | |
938 | ; | |
92481a4d | 939 | } |
88dbf20f | 940 | else if (val1->lattice_val == VARYING |
941 | || val2->lattice_val == VARYING) | |
41511585 | 942 | { |
88dbf20f | 943 | /* any M VARYING = VARYING. */ |
944 | val1->lattice_val = VARYING; | |
b7e55469 | 945 | val1->mask = double_int_minus_one; |
88dbf20f | 946 | val1->value = NULL_TREE; |
41511585 | 947 | } |
b7e55469 | 948 | else if (val1->lattice_val == CONSTANT |
949 | && val2->lattice_val == CONSTANT | |
950 | && TREE_CODE (val1->value) == INTEGER_CST | |
951 | && TREE_CODE (val2->value) == INTEGER_CST) | |
952 | { | |
953 | /* Ci M Cj = Ci if (i == j) | |
954 | Ci M Cj = VARYING if (i != j) | |
955 | ||
956 | For INTEGER_CSTs mask unequal bits. If no equal bits remain, | |
957 | drop to varying. */ | |
cf8f0e63 | 958 | val1->mask = val1->mask | val2->mask |
959 | | (tree_to_double_int (val1->value) | |
960 | ^ tree_to_double_int (val2->value)); | |
961 | if (val1->mask.is_minus_one ()) | |
b7e55469 | 962 | { |
963 | val1->lattice_val = VARYING; | |
964 | val1->value = NULL_TREE; | |
965 | } | |
966 | } | |
88dbf20f | 967 | else if (val1->lattice_val == CONSTANT |
968 | && val2->lattice_val == CONSTANT | |
61207d43 | 969 | && simple_cst_equal (val1->value, val2->value) == 1) |
41511585 | 970 | { |
88dbf20f | 971 | /* Ci M Cj = Ci if (i == j) |
972 | Ci M Cj = VARYING if (i != j) | |
973 | ||
b7e55469 | 974 | VAL1 already contains the value we want for equivalent values. */ |
975 | } | |
976 | else if (val1->lattice_val == CONSTANT | |
977 | && val2->lattice_val == CONSTANT | |
978 | && (TREE_CODE (val1->value) == ADDR_EXPR | |
979 | || TREE_CODE (val2->value) == ADDR_EXPR)) | |
980 | { | |
981 | /* When not equal addresses are involved try meeting for | |
982 | alignment. */ | |
983 | prop_value_t tem = *val2; | |
984 | if (TREE_CODE (val1->value) == ADDR_EXPR) | |
985 | *val1 = get_value_for_expr (val1->value, true); | |
986 | if (TREE_CODE (val2->value) == ADDR_EXPR) | |
987 | tem = get_value_for_expr (val2->value, true); | |
988 | ccp_lattice_meet (val1, &tem); | |
41511585 | 989 | } |
990 | else | |
991 | { | |
88dbf20f | 992 | /* Any other combination is VARYING. */ |
993 | val1->lattice_val = VARYING; | |
b7e55469 | 994 | val1->mask = double_int_minus_one; |
88dbf20f | 995 | val1->value = NULL_TREE; |
41511585 | 996 | } |
4ee9c684 | 997 | } |
998 | ||
999 | ||
41511585 | 1000 | /* Loop through the PHI_NODE's parameters for BLOCK and compare their |
1001 | lattice values to determine PHI_NODE's lattice value. The value of a | |
88dbf20f | 1002 | PHI node is determined calling ccp_lattice_meet with all the arguments |
41511585 | 1003 | of the PHI node that are incoming via executable edges. */ |
4ee9c684 | 1004 | |
41511585 | 1005 | static enum ssa_prop_result |
75a70cf9 | 1006 | ccp_visit_phi_node (gimple phi) |
4ee9c684 | 1007 | { |
75a70cf9 | 1008 | unsigned i; |
88dbf20f | 1009 | prop_value_t *old_val, new_val; |
4ee9c684 | 1010 | |
41511585 | 1011 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4ee9c684 | 1012 | { |
41511585 | 1013 | fprintf (dump_file, "\nVisiting PHI node: "); |
75a70cf9 | 1014 | print_gimple_stmt (dump_file, phi, 0, dump_flags); |
4ee9c684 | 1015 | } |
4ee9c684 | 1016 | |
75a70cf9 | 1017 | old_val = get_value (gimple_phi_result (phi)); |
41511585 | 1018 | switch (old_val->lattice_val) |
1019 | { | |
1020 | case VARYING: | |
88dbf20f | 1021 | return SSA_PROP_VARYING; |
4ee9c684 | 1022 | |
41511585 | 1023 | case CONSTANT: |
1024 | new_val = *old_val; | |
1025 | break; | |
4ee9c684 | 1026 | |
41511585 | 1027 | case UNDEFINED: |
41511585 | 1028 | new_val.lattice_val = UNDEFINED; |
88dbf20f | 1029 | new_val.value = NULL_TREE; |
41511585 | 1030 | break; |
4ee9c684 | 1031 | |
41511585 | 1032 | default: |
8c0963c4 | 1033 | gcc_unreachable (); |
41511585 | 1034 | } |
4ee9c684 | 1035 | |
75a70cf9 | 1036 | for (i = 0; i < gimple_phi_num_args (phi); i++) |
41511585 | 1037 | { |
88dbf20f | 1038 | /* Compute the meet operator over all the PHI arguments flowing |
1039 | through executable edges. */ | |
75a70cf9 | 1040 | edge e = gimple_phi_arg_edge (phi, i); |
4ee9c684 | 1041 | |
41511585 | 1042 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1043 | { | |
1044 | fprintf (dump_file, | |
1045 | "\n Argument #%d (%d -> %d %sexecutable)\n", | |
1046 | i, e->src->index, e->dest->index, | |
1047 | (e->flags & EDGE_EXECUTABLE) ? "" : "not "); | |
1048 | } | |
1049 | ||
1050 | /* If the incoming edge is executable, Compute the meet operator for | |
1051 | the existing value of the PHI node and the current PHI argument. */ | |
1052 | if (e->flags & EDGE_EXECUTABLE) | |
1053 | { | |
75a70cf9 | 1054 | tree arg = gimple_phi_arg (phi, i)->def; |
b7e55469 | 1055 | prop_value_t arg_val = get_value_for_expr (arg, false); |
4ee9c684 | 1056 | |
88dbf20f | 1057 | ccp_lattice_meet (&new_val, &arg_val); |
4ee9c684 | 1058 | |
41511585 | 1059 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1060 | { | |
1061 | fprintf (dump_file, "\t"); | |
88dbf20f | 1062 | print_generic_expr (dump_file, arg, dump_flags); |
1063 | dump_lattice_value (dump_file, "\tValue: ", arg_val); | |
41511585 | 1064 | fprintf (dump_file, "\n"); |
1065 | } | |
4ee9c684 | 1066 | |
41511585 | 1067 | if (new_val.lattice_val == VARYING) |
1068 | break; | |
1069 | } | |
1070 | } | |
4ee9c684 | 1071 | |
1072 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
41511585 | 1073 | { |
1074 | dump_lattice_value (dump_file, "\n PHI node value: ", new_val); | |
1075 | fprintf (dump_file, "\n\n"); | |
1076 | } | |
1077 | ||
bfa30570 | 1078 | /* Make the transition to the new value. */ |
75a70cf9 | 1079 | if (set_lattice_value (gimple_phi_result (phi), new_val)) |
41511585 | 1080 | { |
1081 | if (new_val.lattice_val == VARYING) | |
1082 | return SSA_PROP_VARYING; | |
1083 | else | |
1084 | return SSA_PROP_INTERESTING; | |
1085 | } | |
1086 | else | |
1087 | return SSA_PROP_NOT_INTERESTING; | |
4ee9c684 | 1088 | } |
1089 | ||
15d138c9 | 1090 | /* Return the constant value for OP or OP otherwise. */ |
00f4f705 | 1091 | |
1092 | static tree | |
15d138c9 | 1093 | valueize_op (tree op) |
00f4f705 | 1094 | { |
00f4f705 | 1095 | if (TREE_CODE (op) == SSA_NAME) |
1096 | { | |
15d138c9 | 1097 | tree tem = get_constant_value (op); |
1098 | if (tem) | |
1099 | return tem; | |
00f4f705 | 1100 | } |
1101 | return op; | |
1102 | } | |
1103 | ||
41511585 | 1104 | /* CCP specific front-end to the non-destructive constant folding |
1105 | routines. | |
4ee9c684 | 1106 | |
1107 | Attempt to simplify the RHS of STMT knowing that one or more | |
1108 | operands are constants. | |
1109 | ||
1110 | If simplification is possible, return the simplified RHS, | |
75a70cf9 | 1111 | otherwise return the original RHS or NULL_TREE. */ |
4ee9c684 | 1112 | |
1113 | static tree | |
75a70cf9 | 1114 | ccp_fold (gimple stmt) |
4ee9c684 | 1115 | { |
389dd41b | 1116 | location_t loc = gimple_location (stmt); |
75a70cf9 | 1117 | switch (gimple_code (stmt)) |
88dbf20f | 1118 | { |
75a70cf9 | 1119 | case GIMPLE_COND: |
1120 | { | |
1121 | /* Handle comparison operators that can appear in GIMPLE form. */ | |
15d138c9 | 1122 | tree op0 = valueize_op (gimple_cond_lhs (stmt)); |
1123 | tree op1 = valueize_op (gimple_cond_rhs (stmt)); | |
75a70cf9 | 1124 | enum tree_code code = gimple_cond_code (stmt); |
389dd41b | 1125 | return fold_binary_loc (loc, code, boolean_type_node, op0, op1); |
75a70cf9 | 1126 | } |
4ee9c684 | 1127 | |
75a70cf9 | 1128 | case GIMPLE_SWITCH: |
1129 | { | |
15d138c9 | 1130 | /* Return the constant switch index. */ |
1131 | return valueize_op (gimple_switch_index (stmt)); | |
75a70cf9 | 1132 | } |
912f109f | 1133 | |
1d0b727d | 1134 | case GIMPLE_ASSIGN: |
1135 | case GIMPLE_CALL: | |
1136 | return gimple_fold_stmt_to_constant_1 (stmt, valueize_op); | |
04236c3a | 1137 | |
8782adcf | 1138 | default: |
1d0b727d | 1139 | gcc_unreachable (); |
8782adcf | 1140 | } |
8782adcf | 1141 | } |
75a70cf9 | 1142 | |
b7e55469 | 1143 | /* Apply the operation CODE in type TYPE to the value, mask pair |
1144 | RVAL and RMASK representing a value of type RTYPE and set | |
1145 | the value, mask pair *VAL and *MASK to the result. */ | |
1146 | ||
1147 | static void | |
1148 | bit_value_unop_1 (enum tree_code code, tree type, | |
1149 | double_int *val, double_int *mask, | |
1150 | tree rtype, double_int rval, double_int rmask) | |
1151 | { | |
1152 | switch (code) | |
1153 | { | |
1154 | case BIT_NOT_EXPR: | |
1155 | *mask = rmask; | |
cf8f0e63 | 1156 | *val = ~rval; |
b7e55469 | 1157 | break; |
1158 | ||
1159 | case NEGATE_EXPR: | |
1160 | { | |
1161 | double_int temv, temm; | |
1162 | /* Return ~rval + 1. */ | |
1163 | bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask); | |
1164 | bit_value_binop_1 (PLUS_EXPR, type, val, mask, | |
1165 | type, temv, temm, | |
1166 | type, double_int_one, double_int_zero); | |
1167 | break; | |
1168 | } | |
1169 | ||
1170 | CASE_CONVERT: | |
1171 | { | |
1172 | bool uns; | |
1173 | ||
1174 | /* First extend mask and value according to the original type. */ | |
85d86b55 | 1175 | uns = TYPE_UNSIGNED (rtype); |
cf8f0e63 | 1176 | *mask = rmask.ext (TYPE_PRECISION (rtype), uns); |
1177 | *val = rval.ext (TYPE_PRECISION (rtype), uns); | |
b7e55469 | 1178 | |
1179 | /* Then extend mask and value according to the target type. */ | |
85d86b55 | 1180 | uns = TYPE_UNSIGNED (type); |
cf8f0e63 | 1181 | *mask = (*mask).ext (TYPE_PRECISION (type), uns); |
1182 | *val = (*val).ext (TYPE_PRECISION (type), uns); | |
b7e55469 | 1183 | break; |
1184 | } | |
1185 | ||
1186 | default: | |
1187 | *mask = double_int_minus_one; | |
1188 | break; | |
1189 | } | |
1190 | } | |
1191 | ||
1192 | /* Apply the operation CODE in type TYPE to the value, mask pairs | |
1193 | R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE | |
1194 | and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */ | |
1195 | ||
1196 | static void | |
1197 | bit_value_binop_1 (enum tree_code code, tree type, | |
1198 | double_int *val, double_int *mask, | |
1199 | tree r1type, double_int r1val, double_int r1mask, | |
1200 | tree r2type, double_int r2val, double_int r2mask) | |
1201 | { | |
85d86b55 | 1202 | bool uns = TYPE_UNSIGNED (type); |
b7e55469 | 1203 | /* Assume we'll get a constant result. Use an initial varying value, |
1204 | we fall back to varying in the end if necessary. */ | |
1205 | *mask = double_int_minus_one; | |
1206 | switch (code) | |
1207 | { | |
1208 | case BIT_AND_EXPR: | |
1209 | /* The mask is constant where there is a known not | |
1210 | set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */ | |
cf8f0e63 | 1211 | *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask); |
1212 | *val = r1val & r2val; | |
b7e55469 | 1213 | break; |
1214 | ||
1215 | case BIT_IOR_EXPR: | |
1216 | /* The mask is constant where there is a known | |
1217 | set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */ | |
cf8f0e63 | 1218 | *mask = (r1mask | r2mask) |
1219 | .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask)); | |
1220 | *val = r1val | r2val; | |
b7e55469 | 1221 | break; |
1222 | ||
1223 | case BIT_XOR_EXPR: | |
1224 | /* m1 | m2 */ | |
cf8f0e63 | 1225 | *mask = r1mask | r2mask; |
1226 | *val = r1val ^ r2val; | |
b7e55469 | 1227 | break; |
1228 | ||
1229 | case LROTATE_EXPR: | |
1230 | case RROTATE_EXPR: | |
cf8f0e63 | 1231 | if (r2mask.is_zero ()) |
b7e55469 | 1232 | { |
1233 | HOST_WIDE_INT shift = r2val.low; | |
1234 | if (code == RROTATE_EXPR) | |
1235 | shift = -shift; | |
cf8f0e63 | 1236 | *mask = r1mask.lrotate (shift, TYPE_PRECISION (type)); |
1237 | *val = r1val.lrotate (shift, TYPE_PRECISION (type)); | |
b7e55469 | 1238 | } |
1239 | break; | |
1240 | ||
1241 | case LSHIFT_EXPR: | |
1242 | case RSHIFT_EXPR: | |
1243 | /* ??? We can handle partially known shift counts if we know | |
1244 | its sign. That way we can tell that (x << (y | 8)) & 255 | |
1245 | is zero. */ | |
cf8f0e63 | 1246 | if (r2mask.is_zero ()) |
b7e55469 | 1247 | { |
1248 | HOST_WIDE_INT shift = r2val.low; | |
1249 | if (code == RSHIFT_EXPR) | |
1250 | shift = -shift; | |
1251 | /* We need to know if we are doing a left or a right shift | |
1252 | to properly shift in zeros for left shift and unsigned | |
1253 | right shifts and the sign bit for signed right shifts. | |
1254 | For signed right shifts we shift in varying in case | |
1255 | the sign bit was varying. */ | |
1256 | if (shift > 0) | |
1257 | { | |
cf8f0e63 | 1258 | *mask = r1mask.llshift (shift, TYPE_PRECISION (type)); |
1259 | *val = r1val.llshift (shift, TYPE_PRECISION (type)); | |
b7e55469 | 1260 | } |
1261 | else if (shift < 0) | |
1262 | { | |
1263 | shift = -shift; | |
cf8f0e63 | 1264 | *mask = r1mask.rshift (shift, TYPE_PRECISION (type), !uns); |
1265 | *val = r1val.rshift (shift, TYPE_PRECISION (type), !uns); | |
b7e55469 | 1266 | } |
1267 | else | |
1268 | { | |
1269 | *mask = r1mask; | |
1270 | *val = r1val; | |
1271 | } | |
1272 | } | |
1273 | break; | |
1274 | ||
1275 | case PLUS_EXPR: | |
1276 | case POINTER_PLUS_EXPR: | |
1277 | { | |
1278 | double_int lo, hi; | |
1279 | /* Do the addition with unknown bits set to zero, to give carry-ins of | |
1280 | zero wherever possible. */ | |
cf8f0e63 | 1281 | lo = r1val.and_not (r1mask) + r2val.and_not (r2mask); |
1282 | lo = lo.ext (TYPE_PRECISION (type), uns); | |
b7e55469 | 1283 | /* Do the addition with unknown bits set to one, to give carry-ins of |
1284 | one wherever possible. */ | |
cf8f0e63 | 1285 | hi = (r1val | r1mask) + (r2val | r2mask); |
1286 | hi = hi.ext (TYPE_PRECISION (type), uns); | |
b7e55469 | 1287 | /* Each bit in the result is known if (a) the corresponding bits in |
1288 | both inputs are known, and (b) the carry-in to that bit position | |
1289 | is known. We can check condition (b) by seeing if we got the same | |
1290 | result with minimised carries as with maximised carries. */ | |
cf8f0e63 | 1291 | *mask = r1mask | r2mask | (lo ^ hi); |
1292 | *mask = (*mask).ext (TYPE_PRECISION (type), uns); | |
b7e55469 | 1293 | /* It shouldn't matter whether we choose lo or hi here. */ |
1294 | *val = lo; | |
1295 | break; | |
1296 | } | |
1297 | ||
1298 | case MINUS_EXPR: | |
1299 | { | |
1300 | double_int temv, temm; | |
1301 | bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm, | |
1302 | r2type, r2val, r2mask); | |
1303 | bit_value_binop_1 (PLUS_EXPR, type, val, mask, | |
1304 | r1type, r1val, r1mask, | |
1305 | r2type, temv, temm); | |
1306 | break; | |
1307 | } | |
1308 | ||
1309 | case MULT_EXPR: | |
1310 | { | |
1311 | /* Just track trailing zeros in both operands and transfer | |
1312 | them to the other. */ | |
cf8f0e63 | 1313 | int r1tz = (r1val | r1mask).trailing_zeros (); |
1314 | int r2tz = (r2val | r2mask).trailing_zeros (); | |
b7e55469 | 1315 | if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT) |
1316 | { | |
1317 | *mask = double_int_zero; | |
1318 | *val = double_int_zero; | |
1319 | } | |
1320 | else if (r1tz + r2tz > 0) | |
1321 | { | |
cf8f0e63 | 1322 | *mask = ~double_int::mask (r1tz + r2tz); |
1323 | *mask = (*mask).ext (TYPE_PRECISION (type), uns); | |
b7e55469 | 1324 | *val = double_int_zero; |
1325 | } | |
1326 | break; | |
1327 | } | |
1328 | ||
1329 | case EQ_EXPR: | |
1330 | case NE_EXPR: | |
1331 | { | |
cf8f0e63 | 1332 | double_int m = r1mask | r2mask; |
1333 | if (r1val.and_not (m) != r2val.and_not (m)) | |
b7e55469 | 1334 | { |
1335 | *mask = double_int_zero; | |
1336 | *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one); | |
1337 | } | |
1338 | else | |
1339 | { | |
1340 | /* We know the result of a comparison is always one or zero. */ | |
1341 | *mask = double_int_one; | |
1342 | *val = double_int_zero; | |
1343 | } | |
1344 | break; | |
1345 | } | |
1346 | ||
1347 | case GE_EXPR: | |
1348 | case GT_EXPR: | |
1349 | { | |
1350 | double_int tem = r1val; | |
1351 | r1val = r2val; | |
1352 | r2val = tem; | |
1353 | tem = r1mask; | |
1354 | r1mask = r2mask; | |
1355 | r2mask = tem; | |
1356 | code = swap_tree_comparison (code); | |
1357 | } | |
1358 | /* Fallthru. */ | |
1359 | case LT_EXPR: | |
1360 | case LE_EXPR: | |
1361 | { | |
1362 | int minmax, maxmin; | |
1363 | /* If the most significant bits are not known we know nothing. */ | |
cf8f0e63 | 1364 | if (r1mask.is_negative () || r2mask.is_negative ()) |
b7e55469 | 1365 | break; |
1366 | ||
90c0f5b7 | 1367 | /* For comparisons the signedness is in the comparison operands. */ |
85d86b55 | 1368 | uns = TYPE_UNSIGNED (r1type); |
90c0f5b7 | 1369 | |
b7e55469 | 1370 | /* If we know the most significant bits we know the values |
1371 | value ranges by means of treating varying bits as zero | |
1372 | or one. Do a cross comparison of the max/min pairs. */ | |
cf8f0e63 | 1373 | maxmin = (r1val | r1mask).cmp (r2val.and_not (r2mask), uns); |
1374 | minmax = r1val.and_not (r1mask).cmp (r2val | r2mask, uns); | |
b7e55469 | 1375 | if (maxmin < 0) /* r1 is less than r2. */ |
1376 | { | |
1377 | *mask = double_int_zero; | |
1378 | *val = double_int_one; | |
1379 | } | |
1380 | else if (minmax > 0) /* r1 is not less or equal to r2. */ | |
1381 | { | |
1382 | *mask = double_int_zero; | |
1383 | *val = double_int_zero; | |
1384 | } | |
1385 | else if (maxmin == minmax) /* r1 and r2 are equal. */ | |
1386 | { | |
1387 | /* This probably should never happen as we'd have | |
1388 | folded the thing during fully constant value folding. */ | |
1389 | *mask = double_int_zero; | |
1390 | *val = (code == LE_EXPR ? double_int_one : double_int_zero); | |
1391 | } | |
1392 | else | |
1393 | { | |
1394 | /* We know the result of a comparison is always one or zero. */ | |
1395 | *mask = double_int_one; | |
1396 | *val = double_int_zero; | |
1397 | } | |
1398 | break; | |
1399 | } | |
1400 | ||
1401 | default:; | |
1402 | } | |
1403 | } | |
1404 | ||
1405 | /* Return the propagation value when applying the operation CODE to | |
1406 | the value RHS yielding type TYPE. */ | |
1407 | ||
1408 | static prop_value_t | |
1409 | bit_value_unop (enum tree_code code, tree type, tree rhs) | |
1410 | { | |
1411 | prop_value_t rval = get_value_for_expr (rhs, true); | |
1412 | double_int value, mask; | |
1413 | prop_value_t val; | |
c91fedc5 | 1414 | |
1415 | if (rval.lattice_val == UNDEFINED) | |
1416 | return rval; | |
1417 | ||
b7e55469 | 1418 | gcc_assert ((rval.lattice_val == CONSTANT |
1419 | && TREE_CODE (rval.value) == INTEGER_CST) | |
cf8f0e63 | 1420 | || rval.mask.is_minus_one ()); |
b7e55469 | 1421 | bit_value_unop_1 (code, type, &value, &mask, |
1422 | TREE_TYPE (rhs), value_to_double_int (rval), rval.mask); | |
cf8f0e63 | 1423 | if (!mask.is_minus_one ()) |
b7e55469 | 1424 | { |
1425 | val.lattice_val = CONSTANT; | |
1426 | val.mask = mask; | |
1427 | /* ??? Delay building trees here. */ | |
1428 | val.value = double_int_to_tree (type, value); | |
1429 | } | |
1430 | else | |
1431 | { | |
1432 | val.lattice_val = VARYING; | |
1433 | val.value = NULL_TREE; | |
1434 | val.mask = double_int_minus_one; | |
1435 | } | |
1436 | return val; | |
1437 | } | |
1438 | ||
1439 | /* Return the propagation value when applying the operation CODE to | |
1440 | the values RHS1 and RHS2 yielding type TYPE. */ | |
1441 | ||
1442 | static prop_value_t | |
1443 | bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2) | |
1444 | { | |
1445 | prop_value_t r1val = get_value_for_expr (rhs1, true); | |
1446 | prop_value_t r2val = get_value_for_expr (rhs2, true); | |
1447 | double_int value, mask; | |
1448 | prop_value_t val; | |
c91fedc5 | 1449 | |
1450 | if (r1val.lattice_val == UNDEFINED | |
1451 | || r2val.lattice_val == UNDEFINED) | |
1452 | { | |
1453 | val.lattice_val = VARYING; | |
1454 | val.value = NULL_TREE; | |
1455 | val.mask = double_int_minus_one; | |
1456 | return val; | |
1457 | } | |
1458 | ||
b7e55469 | 1459 | gcc_assert ((r1val.lattice_val == CONSTANT |
1460 | && TREE_CODE (r1val.value) == INTEGER_CST) | |
cf8f0e63 | 1461 | || r1val.mask.is_minus_one ()); |
b7e55469 | 1462 | gcc_assert ((r2val.lattice_val == CONSTANT |
1463 | && TREE_CODE (r2val.value) == INTEGER_CST) | |
cf8f0e63 | 1464 | || r2val.mask.is_minus_one ()); |
b7e55469 | 1465 | bit_value_binop_1 (code, type, &value, &mask, |
1466 | TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask, | |
1467 | TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask); | |
cf8f0e63 | 1468 | if (!mask.is_minus_one ()) |
b7e55469 | 1469 | { |
1470 | val.lattice_val = CONSTANT; | |
1471 | val.mask = mask; | |
1472 | /* ??? Delay building trees here. */ | |
1473 | val.value = double_int_to_tree (type, value); | |
1474 | } | |
1475 | else | |
1476 | { | |
1477 | val.lattice_val = VARYING; | |
1478 | val.value = NULL_TREE; | |
1479 | val.mask = double_int_minus_one; | |
1480 | } | |
1481 | return val; | |
1482 | } | |
1483 | ||
237e78b1 | 1484 | /* Return the propagation value for __builtin_assume_aligned |
1485 | and functions with assume_aligned or alloc_aligned attribute. | |
1486 | For __builtin_assume_aligned, ATTR is NULL_TREE, | |
1487 | for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED | |
1488 | is false, for alloc_aligned attribute ATTR is non-NULL and | |
1489 | ALLOC_ALIGNED is true. */ | |
fca0886c | 1490 | |
1491 | static prop_value_t | |
237e78b1 | 1492 | bit_value_assume_aligned (gimple stmt, tree attr, prop_value_t ptrval, |
1493 | bool alloc_aligned) | |
fca0886c | 1494 | { |
237e78b1 | 1495 | tree align, misalign = NULL_TREE, type; |
fca0886c | 1496 | unsigned HOST_WIDE_INT aligni, misaligni = 0; |
fca0886c | 1497 | prop_value_t alignval; |
1498 | double_int value, mask; | |
1499 | prop_value_t val; | |
237e78b1 | 1500 | |
1501 | if (attr == NULL_TREE) | |
1502 | { | |
1503 | tree ptr = gimple_call_arg (stmt, 0); | |
1504 | type = TREE_TYPE (ptr); | |
1505 | ptrval = get_value_for_expr (ptr, true); | |
1506 | } | |
1507 | else | |
1508 | { | |
1509 | tree lhs = gimple_call_lhs (stmt); | |
1510 | type = TREE_TYPE (lhs); | |
1511 | } | |
1512 | ||
fca0886c | 1513 | if (ptrval.lattice_val == UNDEFINED) |
1514 | return ptrval; | |
1515 | gcc_assert ((ptrval.lattice_val == CONSTANT | |
1516 | && TREE_CODE (ptrval.value) == INTEGER_CST) | |
cf8f0e63 | 1517 | || ptrval.mask.is_minus_one ()); |
237e78b1 | 1518 | if (attr == NULL_TREE) |
fca0886c | 1519 | { |
237e78b1 | 1520 | /* Get aligni and misaligni from __builtin_assume_aligned. */ |
1521 | align = gimple_call_arg (stmt, 1); | |
1522 | if (!tree_fits_uhwi_p (align)) | |
fca0886c | 1523 | return ptrval; |
237e78b1 | 1524 | aligni = tree_to_uhwi (align); |
1525 | if (gimple_call_num_args (stmt) > 2) | |
1526 | { | |
1527 | misalign = gimple_call_arg (stmt, 2); | |
1528 | if (!tree_fits_uhwi_p (misalign)) | |
1529 | return ptrval; | |
1530 | misaligni = tree_to_uhwi (misalign); | |
1531 | } | |
1532 | } | |
1533 | else | |
1534 | { | |
1535 | /* Get aligni and misaligni from assume_aligned or | |
1536 | alloc_align attributes. */ | |
1537 | if (TREE_VALUE (attr) == NULL_TREE) | |
1538 | return ptrval; | |
1539 | attr = TREE_VALUE (attr); | |
1540 | align = TREE_VALUE (attr); | |
1541 | if (!tree_fits_uhwi_p (align)) | |
fca0886c | 1542 | return ptrval; |
237e78b1 | 1543 | aligni = tree_to_uhwi (align); |
1544 | if (alloc_aligned) | |
1545 | { | |
1546 | if (aligni == 0 || aligni > gimple_call_num_args (stmt)) | |
1547 | return ptrval; | |
1548 | align = gimple_call_arg (stmt, aligni - 1); | |
1549 | if (!tree_fits_uhwi_p (align)) | |
1550 | return ptrval; | |
1551 | aligni = tree_to_uhwi (align); | |
1552 | } | |
1553 | else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr))) | |
1554 | { | |
1555 | misalign = TREE_VALUE (TREE_CHAIN (attr)); | |
1556 | if (!tree_fits_uhwi_p (misalign)) | |
1557 | return ptrval; | |
1558 | misaligni = tree_to_uhwi (misalign); | |
1559 | } | |
fca0886c | 1560 | } |
237e78b1 | 1561 | if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni) |
1562 | return ptrval; | |
1563 | ||
fca0886c | 1564 | align = build_int_cst_type (type, -aligni); |
1565 | alignval = get_value_for_expr (align, true); | |
1566 | bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask, | |
1567 | type, value_to_double_int (ptrval), ptrval.mask, | |
1568 | type, value_to_double_int (alignval), alignval.mask); | |
cf8f0e63 | 1569 | if (!mask.is_minus_one ()) |
fca0886c | 1570 | { |
1571 | val.lattice_val = CONSTANT; | |
1572 | val.mask = mask; | |
1573 | gcc_assert ((mask.low & (aligni - 1)) == 0); | |
1574 | gcc_assert ((value.low & (aligni - 1)) == 0); | |
1575 | value.low |= misaligni; | |
1576 | /* ??? Delay building trees here. */ | |
1577 | val.value = double_int_to_tree (type, value); | |
1578 | } | |
1579 | else | |
1580 | { | |
1581 | val.lattice_val = VARYING; | |
1582 | val.value = NULL_TREE; | |
1583 | val.mask = double_int_minus_one; | |
1584 | } | |
1585 | return val; | |
1586 | } | |
1587 | ||
75a70cf9 | 1588 | /* Evaluate statement STMT. |
1589 | Valid only for assignments, calls, conditionals, and switches. */ | |
4ee9c684 | 1590 | |
88dbf20f | 1591 | static prop_value_t |
75a70cf9 | 1592 | evaluate_stmt (gimple stmt) |
4ee9c684 | 1593 | { |
88dbf20f | 1594 | prop_value_t val; |
4f61cce6 | 1595 | tree simplified = NULL_TREE; |
88dbf20f | 1596 | ccp_lattice_t likelyvalue = likely_value (stmt); |
b7e55469 | 1597 | bool is_constant = false; |
581bf1c2 | 1598 | unsigned int align; |
88dbf20f | 1599 | |
b7e55469 | 1600 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1601 | { | |
1602 | fprintf (dump_file, "which is likely "); | |
1603 | switch (likelyvalue) | |
1604 | { | |
1605 | case CONSTANT: | |
1606 | fprintf (dump_file, "CONSTANT"); | |
1607 | break; | |
1608 | case UNDEFINED: | |
1609 | fprintf (dump_file, "UNDEFINED"); | |
1610 | break; | |
1611 | case VARYING: | |
1612 | fprintf (dump_file, "VARYING"); | |
1613 | break; | |
1614 | default:; | |
1615 | } | |
1616 | fprintf (dump_file, "\n"); | |
1617 | } | |
add6ee5e | 1618 | |
4ee9c684 | 1619 | /* If the statement is likely to have a CONSTANT result, then try |
1620 | to fold the statement to determine the constant value. */ | |
75a70cf9 | 1621 | /* FIXME. This is the only place that we call ccp_fold. |
1622 | Since likely_value never returns CONSTANT for calls, we will | |
1623 | not attempt to fold them, including builtins that may profit. */ | |
4ee9c684 | 1624 | if (likelyvalue == CONSTANT) |
b7e55469 | 1625 | { |
1626 | fold_defer_overflow_warnings (); | |
1627 | simplified = ccp_fold (stmt); | |
1628 | is_constant = simplified && is_gimple_min_invariant (simplified); | |
1629 | fold_undefer_overflow_warnings (is_constant, stmt, 0); | |
1630 | if (is_constant) | |
1631 | { | |
1632 | /* The statement produced a constant value. */ | |
1633 | val.lattice_val = CONSTANT; | |
1634 | val.value = simplified; | |
1635 | val.mask = double_int_zero; | |
1636 | } | |
1637 | } | |
4ee9c684 | 1638 | /* If the statement is likely to have a VARYING result, then do not |
1639 | bother folding the statement. */ | |
04236c3a | 1640 | else if (likelyvalue == VARYING) |
75a70cf9 | 1641 | { |
590c3166 | 1642 | enum gimple_code code = gimple_code (stmt); |
75a70cf9 | 1643 | if (code == GIMPLE_ASSIGN) |
1644 | { | |
1645 | enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
48e1416a | 1646 | |
75a70cf9 | 1647 | /* Other cases cannot satisfy is_gimple_min_invariant |
1648 | without folding. */ | |
1649 | if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS) | |
1650 | simplified = gimple_assign_rhs1 (stmt); | |
1651 | } | |
1652 | else if (code == GIMPLE_SWITCH) | |
1653 | simplified = gimple_switch_index (stmt); | |
1654 | else | |
a65c4d64 | 1655 | /* These cannot satisfy is_gimple_min_invariant without folding. */ |
1656 | gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND); | |
b7e55469 | 1657 | is_constant = simplified && is_gimple_min_invariant (simplified); |
1658 | if (is_constant) | |
1659 | { | |
1660 | /* The statement produced a constant value. */ | |
1661 | val.lattice_val = CONSTANT; | |
1662 | val.value = simplified; | |
1663 | val.mask = double_int_zero; | |
1664 | } | |
75a70cf9 | 1665 | } |
4ee9c684 | 1666 | |
b7e55469 | 1667 | /* Resort to simplification for bitwise tracking. */ |
1668 | if (flag_tree_bit_ccp | |
939514e9 | 1669 | && (likelyvalue == CONSTANT || is_gimple_call (stmt)) |
b7e55469 | 1670 | && !is_constant) |
912f109f | 1671 | { |
b7e55469 | 1672 | enum gimple_code code = gimple_code (stmt); |
1673 | val.lattice_val = VARYING; | |
1674 | val.value = NULL_TREE; | |
1675 | val.mask = double_int_minus_one; | |
1676 | if (code == GIMPLE_ASSIGN) | |
912f109f | 1677 | { |
b7e55469 | 1678 | enum tree_code subcode = gimple_assign_rhs_code (stmt); |
1679 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
1680 | switch (get_gimple_rhs_class (subcode)) | |
1681 | { | |
1682 | case GIMPLE_SINGLE_RHS: | |
1683 | if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
1684 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
1685 | val = get_value_for_expr (rhs1, true); | |
1686 | break; | |
1687 | ||
1688 | case GIMPLE_UNARY_RHS: | |
1689 | if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
1690 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
1691 | && (INTEGRAL_TYPE_P (gimple_expr_type (stmt)) | |
1692 | || POINTER_TYPE_P (gimple_expr_type (stmt)))) | |
1693 | val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1); | |
1694 | break; | |
1695 | ||
1696 | case GIMPLE_BINARY_RHS: | |
1697 | if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
1698 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
1699 | { | |
e47d81e0 | 1700 | tree lhs = gimple_assign_lhs (stmt); |
b7e55469 | 1701 | tree rhs2 = gimple_assign_rhs2 (stmt); |
1702 | val = bit_value_binop (subcode, | |
e47d81e0 | 1703 | TREE_TYPE (lhs), rhs1, rhs2); |
b7e55469 | 1704 | } |
1705 | break; | |
1706 | ||
1707 | default:; | |
1708 | } | |
912f109f | 1709 | } |
b7e55469 | 1710 | else if (code == GIMPLE_COND) |
1711 | { | |
1712 | enum tree_code code = gimple_cond_code (stmt); | |
1713 | tree rhs1 = gimple_cond_lhs (stmt); | |
1714 | tree rhs2 = gimple_cond_rhs (stmt); | |
1715 | if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
1716 | || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
1717 | val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2); | |
1718 | } | |
0b4f0116 | 1719 | else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
153c3b50 | 1720 | { |
0b4f0116 | 1721 | tree fndecl = gimple_call_fndecl (stmt); |
153c3b50 | 1722 | switch (DECL_FUNCTION_CODE (fndecl)) |
1723 | { | |
1724 | case BUILT_IN_MALLOC: | |
1725 | case BUILT_IN_REALLOC: | |
1726 | case BUILT_IN_CALLOC: | |
939514e9 | 1727 | case BUILT_IN_STRDUP: |
1728 | case BUILT_IN_STRNDUP: | |
153c3b50 | 1729 | val.lattice_val = CONSTANT; |
1730 | val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0); | |
cf8f0e63 | 1731 | val.mask = double_int::from_shwi |
153c3b50 | 1732 | (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT) |
1733 | / BITS_PER_UNIT - 1)); | |
1734 | break; | |
1735 | ||
1736 | case BUILT_IN_ALLOCA: | |
581bf1c2 | 1737 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
1738 | align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN | |
1739 | ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)) | |
1740 | : BIGGEST_ALIGNMENT); | |
153c3b50 | 1741 | val.lattice_val = CONSTANT; |
1742 | val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0); | |
cf8f0e63 | 1743 | val.mask = double_int::from_shwi (~(((HOST_WIDE_INT) align) |
1744 | / BITS_PER_UNIT - 1)); | |
153c3b50 | 1745 | break; |
1746 | ||
939514e9 | 1747 | /* These builtins return their first argument, unmodified. */ |
1748 | case BUILT_IN_MEMCPY: | |
1749 | case BUILT_IN_MEMMOVE: | |
1750 | case BUILT_IN_MEMSET: | |
1751 | case BUILT_IN_STRCPY: | |
1752 | case BUILT_IN_STRNCPY: | |
1753 | case BUILT_IN_MEMCPY_CHK: | |
1754 | case BUILT_IN_MEMMOVE_CHK: | |
1755 | case BUILT_IN_MEMSET_CHK: | |
1756 | case BUILT_IN_STRCPY_CHK: | |
1757 | case BUILT_IN_STRNCPY_CHK: | |
1758 | val = get_value_for_expr (gimple_call_arg (stmt, 0), true); | |
1759 | break; | |
1760 | ||
fca0886c | 1761 | case BUILT_IN_ASSUME_ALIGNED: |
237e78b1 | 1762 | val = bit_value_assume_aligned (stmt, NULL_TREE, val, false); |
fca0886c | 1763 | break; |
1764 | ||
060fc206 | 1765 | case BUILT_IN_ALIGNED_ALLOC: |
1766 | { | |
1767 | tree align = get_constant_value (gimple_call_arg (stmt, 0)); | |
1768 | if (align | |
1769 | && tree_fits_uhwi_p (align)) | |
1770 | { | |
1771 | unsigned HOST_WIDE_INT aligni = tree_to_uhwi (align); | |
1772 | if (aligni > 1 | |
1773 | /* align must be power-of-two */ | |
1774 | && (aligni & (aligni - 1)) == 0) | |
1775 | { | |
1776 | val.lattice_val = CONSTANT; | |
1777 | val.value = build_int_cst (ptr_type_node, 0); | |
1778 | val.mask = double_int::from_shwi (-aligni); | |
1779 | } | |
1780 | } | |
1781 | break; | |
1782 | } | |
1783 | ||
153c3b50 | 1784 | default:; |
1785 | } | |
1786 | } | |
237e78b1 | 1787 | if (is_gimple_call (stmt) && gimple_call_lhs (stmt)) |
1788 | { | |
1789 | tree fntype = gimple_call_fntype (stmt); | |
1790 | if (fntype) | |
1791 | { | |
1792 | tree attrs = lookup_attribute ("assume_aligned", | |
1793 | TYPE_ATTRIBUTES (fntype)); | |
1794 | if (attrs) | |
1795 | val = bit_value_assume_aligned (stmt, attrs, val, false); | |
1796 | attrs = lookup_attribute ("alloc_align", | |
1797 | TYPE_ATTRIBUTES (fntype)); | |
1798 | if (attrs) | |
1799 | val = bit_value_assume_aligned (stmt, attrs, val, true); | |
1800 | } | |
1801 | } | |
b7e55469 | 1802 | is_constant = (val.lattice_val == CONSTANT); |
912f109f | 1803 | } |
1804 | ||
fc08b993 | 1805 | if (flag_tree_bit_ccp |
1806 | && ((is_constant && TREE_CODE (val.value) == INTEGER_CST) | |
1807 | || (!is_constant && likelyvalue != UNDEFINED)) | |
1808 | && gimple_get_lhs (stmt) | |
1809 | && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME) | |
1810 | { | |
1811 | tree lhs = gimple_get_lhs (stmt); | |
1812 | double_int nonzero_bits = get_nonzero_bits (lhs); | |
1813 | double_int mask = double_int::mask (TYPE_PRECISION (TREE_TYPE (lhs))); | |
1814 | if (nonzero_bits != double_int_minus_one && nonzero_bits != mask) | |
1815 | { | |
1816 | if (!is_constant) | |
1817 | { | |
1818 | val.lattice_val = CONSTANT; | |
1819 | val.value = build_zero_cst (TREE_TYPE (lhs)); | |
1820 | /* CCP wants the bits above precision set. */ | |
1821 | val.mask = nonzero_bits | ~mask; | |
1822 | is_constant = true; | |
1823 | } | |
1824 | else | |
1825 | { | |
1826 | double_int valv = tree_to_double_int (val.value); | |
1827 | if (!(valv & ~nonzero_bits & mask).is_zero ()) | |
1828 | val.value = double_int_to_tree (TREE_TYPE (lhs), | |
1829 | valv & nonzero_bits); | |
1830 | if (nonzero_bits.is_zero ()) | |
1831 | val.mask = double_int_zero; | |
1832 | else | |
1833 | val.mask = val.mask & (nonzero_bits | ~mask); | |
1834 | } | |
1835 | } | |
1836 | } | |
1837 | ||
b7e55469 | 1838 | if (!is_constant) |
4ee9c684 | 1839 | { |
1840 | /* The statement produced a nonconstant value. If the statement | |
88dbf20f | 1841 | had UNDEFINED operands, then the result of the statement |
1842 | should be UNDEFINED. Otherwise, the statement is VARYING. */ | |
bfa30570 | 1843 | if (likelyvalue == UNDEFINED) |
b7e55469 | 1844 | { |
1845 | val.lattice_val = likelyvalue; | |
1846 | val.mask = double_int_zero; | |
1847 | } | |
b765fa12 | 1848 | else |
b7e55469 | 1849 | { |
1850 | val.lattice_val = VARYING; | |
1851 | val.mask = double_int_minus_one; | |
1852 | } | |
b765fa12 | 1853 | |
88dbf20f | 1854 | val.value = NULL_TREE; |
4ee9c684 | 1855 | } |
41511585 | 1856 | |
1857 | return val; | |
4ee9c684 | 1858 | } |
1859 | ||
de6bd75e | 1860 | typedef hash_table <pointer_hash <gimple_statement_base> > gimple_htab; |
2b15d2ba | 1861 | |
582a80ed | 1862 | /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before |
1863 | each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */ | |
1864 | ||
1865 | static void | |
2b15d2ba | 1866 | insert_clobber_before_stack_restore (tree saved_val, tree var, |
1867 | gimple_htab *visited) | |
582a80ed | 1868 | { |
1869 | gimple stmt, clobber_stmt; | |
1870 | tree clobber; | |
1871 | imm_use_iterator iter; | |
1872 | gimple_stmt_iterator i; | |
1873 | gimple *slot; | |
1874 | ||
1875 | FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val) | |
1876 | if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE)) | |
1877 | { | |
f1f41a6c | 1878 | clobber = build_constructor (TREE_TYPE (var), |
1879 | NULL); | |
582a80ed | 1880 | TREE_THIS_VOLATILE (clobber) = 1; |
1881 | clobber_stmt = gimple_build_assign (var, clobber); | |
1882 | ||
1883 | i = gsi_for_stmt (stmt); | |
1884 | gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT); | |
1885 | } | |
1886 | else if (gimple_code (stmt) == GIMPLE_PHI) | |
1887 | { | |
2b15d2ba | 1888 | if (!visited->is_created ()) |
1889 | visited->create (10); | |
582a80ed | 1890 | |
2b15d2ba | 1891 | slot = visited->find_slot (stmt, INSERT); |
582a80ed | 1892 | if (*slot != NULL) |
1893 | continue; | |
1894 | ||
1895 | *slot = stmt; | |
1896 | insert_clobber_before_stack_restore (gimple_phi_result (stmt), var, | |
1897 | visited); | |
1898 | } | |
42eed683 | 1899 | else if (gimple_assign_ssa_name_copy_p (stmt)) |
1900 | insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var, | |
1901 | visited); | |
582a80ed | 1902 | else |
1903 | gcc_assert (is_gimple_debug (stmt)); | |
1904 | } | |
1905 | ||
1906 | /* Advance the iterator to the previous non-debug gimple statement in the same | |
1907 | or dominating basic block. */ | |
1908 | ||
1909 | static inline void | |
1910 | gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i) | |
1911 | { | |
1912 | basic_block dom; | |
1913 | ||
1914 | gsi_prev_nondebug (i); | |
1915 | while (gsi_end_p (*i)) | |
1916 | { | |
1917 | dom = get_immediate_dominator (CDI_DOMINATORS, i->bb); | |
34154e27 | 1918 | if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
582a80ed | 1919 | return; |
1920 | ||
1921 | *i = gsi_last_bb (dom); | |
1922 | } | |
1923 | } | |
1924 | ||
1925 | /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert | |
1543f720 | 1926 | a clobber of VAR before each matching BUILT_IN_STACK_RESTORE. |
1927 | ||
1928 | It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a | |
1929 | previous pass (such as DOM) duplicated it along multiple paths to a BB. In | |
1930 | that case the function gives up without inserting the clobbers. */ | |
582a80ed | 1931 | |
1932 | static void | |
1933 | insert_clobbers_for_var (gimple_stmt_iterator i, tree var) | |
1934 | { | |
582a80ed | 1935 | gimple stmt; |
1936 | tree saved_val; | |
2b15d2ba | 1937 | gimple_htab visited; |
582a80ed | 1938 | |
1543f720 | 1939 | for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i)) |
582a80ed | 1940 | { |
1941 | stmt = gsi_stmt (i); | |
1942 | ||
1943 | if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE)) | |
1944 | continue; | |
582a80ed | 1945 | |
1946 | saved_val = gimple_call_lhs (stmt); | |
1947 | if (saved_val == NULL_TREE) | |
1948 | continue; | |
1949 | ||
1950 | insert_clobber_before_stack_restore (saved_val, var, &visited); | |
1951 | break; | |
1952 | } | |
1953 | ||
2b15d2ba | 1954 | if (visited.is_created ()) |
1955 | visited.dispose (); | |
582a80ed | 1956 | } |
1957 | ||
581bf1c2 | 1958 | /* Detects a __builtin_alloca_with_align with constant size argument. Declares |
1959 | fixed-size array and returns the address, if found, otherwise returns | |
1960 | NULL_TREE. */ | |
9a65cc0a | 1961 | |
1962 | static tree | |
581bf1c2 | 1963 | fold_builtin_alloca_with_align (gimple stmt) |
9a65cc0a | 1964 | { |
1965 | unsigned HOST_WIDE_INT size, threshold, n_elem; | |
1966 | tree lhs, arg, block, var, elem_type, array_type; | |
9a65cc0a | 1967 | |
1968 | /* Get lhs. */ | |
1969 | lhs = gimple_call_lhs (stmt); | |
1970 | if (lhs == NULL_TREE) | |
1971 | return NULL_TREE; | |
1972 | ||
1973 | /* Detect constant argument. */ | |
1974 | arg = get_constant_value (gimple_call_arg (stmt, 0)); | |
6e93d308 | 1975 | if (arg == NULL_TREE |
1976 | || TREE_CODE (arg) != INTEGER_CST | |
cd4547bf | 1977 | || !tree_fits_uhwi_p (arg)) |
9a65cc0a | 1978 | return NULL_TREE; |
6e93d308 | 1979 | |
8c53c46c | 1980 | size = tree_to_uhwi (arg); |
9a65cc0a | 1981 | |
581bf1c2 | 1982 | /* Heuristic: don't fold large allocas. */ |
9a65cc0a | 1983 | threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME); |
581bf1c2 | 1984 | /* In case the alloca is located at function entry, it has the same lifetime |
1985 | as a declared array, so we allow a larger size. */ | |
9a65cc0a | 1986 | block = gimple_block (stmt); |
1987 | if (!(cfun->after_inlining | |
1988 | && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)) | |
1989 | threshold /= 10; | |
1990 | if (size > threshold) | |
1991 | return NULL_TREE; | |
1992 | ||
1993 | /* Declare array. */ | |
1994 | elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1); | |
1995 | n_elem = size * 8 / BITS_PER_UNIT; | |
9a65cc0a | 1996 | array_type = build_array_type_nelts (elem_type, n_elem); |
1997 | var = create_tmp_var (array_type, NULL); | |
581bf1c2 | 1998 | DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)); |
3d4a0a4b | 1999 | { |
2000 | struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs); | |
2001 | if (pi != NULL && !pi->pt.anything) | |
2002 | { | |
2003 | bool singleton_p; | |
2004 | unsigned uid; | |
2005 | singleton_p = pt_solution_singleton_p (&pi->pt, &uid); | |
2006 | gcc_assert (singleton_p); | |
2007 | SET_DECL_PT_UID (var, uid); | |
2008 | } | |
2009 | } | |
9a65cc0a | 2010 | |
2011 | /* Fold alloca to the address of the array. */ | |
2012 | return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var)); | |
2013 | } | |
2014 | ||
6688f8ec | 2015 | /* Fold the stmt at *GSI with CCP specific information that propagating |
2016 | and regular folding does not catch. */ | |
2017 | ||
2018 | static bool | |
2019 | ccp_fold_stmt (gimple_stmt_iterator *gsi) | |
2020 | { | |
2021 | gimple stmt = gsi_stmt (*gsi); | |
6688f8ec | 2022 | |
94144e68 | 2023 | switch (gimple_code (stmt)) |
2024 | { | |
2025 | case GIMPLE_COND: | |
2026 | { | |
2027 | prop_value_t val; | |
2028 | /* Statement evaluation will handle type mismatches in constants | |
2029 | more gracefully than the final propagation. This allows us to | |
2030 | fold more conditionals here. */ | |
2031 | val = evaluate_stmt (stmt); | |
2032 | if (val.lattice_val != CONSTANT | |
cf8f0e63 | 2033 | || !val.mask.is_zero ()) |
94144e68 | 2034 | return false; |
2035 | ||
b7e55469 | 2036 | if (dump_file) |
2037 | { | |
2038 | fprintf (dump_file, "Folding predicate "); | |
2039 | print_gimple_expr (dump_file, stmt, 0, 0); | |
2040 | fprintf (dump_file, " to "); | |
2041 | print_generic_expr (dump_file, val.value, 0); | |
2042 | fprintf (dump_file, "\n"); | |
2043 | } | |
2044 | ||
94144e68 | 2045 | if (integer_zerop (val.value)) |
2046 | gimple_cond_make_false (stmt); | |
2047 | else | |
2048 | gimple_cond_make_true (stmt); | |
6688f8ec | 2049 | |
94144e68 | 2050 | return true; |
2051 | } | |
6688f8ec | 2052 | |
94144e68 | 2053 | case GIMPLE_CALL: |
2054 | { | |
2055 | tree lhs = gimple_call_lhs (stmt); | |
3064bb7b | 2056 | int flags = gimple_call_flags (stmt); |
15d138c9 | 2057 | tree val; |
94144e68 | 2058 | tree argt; |
2059 | bool changed = false; | |
2060 | unsigned i; | |
2061 | ||
2062 | /* If the call was folded into a constant make sure it goes | |
2063 | away even if we cannot propagate into all uses because of | |
2064 | type issues. */ | |
2065 | if (lhs | |
2066 | && TREE_CODE (lhs) == SSA_NAME | |
3064bb7b | 2067 | && (val = get_constant_value (lhs)) |
2068 | /* Don't optimize away calls that have side-effects. */ | |
2069 | && (flags & (ECF_CONST|ECF_PURE)) != 0 | |
2070 | && (flags & ECF_LOOPING_CONST_OR_PURE) == 0) | |
94144e68 | 2071 | { |
15d138c9 | 2072 | tree new_rhs = unshare_expr (val); |
338cce8f | 2073 | bool res; |
94144e68 | 2074 | if (!useless_type_conversion_p (TREE_TYPE (lhs), |
2075 | TREE_TYPE (new_rhs))) | |
2076 | new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs); | |
338cce8f | 2077 | res = update_call_from_tree (gsi, new_rhs); |
2078 | gcc_assert (res); | |
94144e68 | 2079 | return true; |
2080 | } | |
2081 | ||
fb049fba | 2082 | /* Internal calls provide no argument types, so the extra laxity |
2083 | for normal calls does not apply. */ | |
2084 | if (gimple_call_internal_p (stmt)) | |
2085 | return false; | |
2086 | ||
581bf1c2 | 2087 | /* The heuristic of fold_builtin_alloca_with_align differs before and |
2088 | after inlining, so we don't require the arg to be changed into a | |
2089 | constant for folding, but just to be constant. */ | |
2090 | if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN)) | |
9a65cc0a | 2091 | { |
581bf1c2 | 2092 | tree new_rhs = fold_builtin_alloca_with_align (stmt); |
6e93d308 | 2093 | if (new_rhs) |
2094 | { | |
2095 | bool res = update_call_from_tree (gsi, new_rhs); | |
582a80ed | 2096 | tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0); |
6e93d308 | 2097 | gcc_assert (res); |
582a80ed | 2098 | insert_clobbers_for_var (*gsi, var); |
6e93d308 | 2099 | return true; |
2100 | } | |
9a65cc0a | 2101 | } |
2102 | ||
94144e68 | 2103 | /* Propagate into the call arguments. Compared to replace_uses_in |
2104 | this can use the argument slot types for type verification | |
2105 | instead of the current argument type. We also can safely | |
2106 | drop qualifiers here as we are dealing with constants anyway. */ | |
2de00a2d | 2107 | argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt)); |
94144e68 | 2108 | for (i = 0; i < gimple_call_num_args (stmt) && argt; |
2109 | ++i, argt = TREE_CHAIN (argt)) | |
2110 | { | |
2111 | tree arg = gimple_call_arg (stmt, i); | |
2112 | if (TREE_CODE (arg) == SSA_NAME | |
15d138c9 | 2113 | && (val = get_constant_value (arg)) |
94144e68 | 2114 | && useless_type_conversion_p |
2115 | (TYPE_MAIN_VARIANT (TREE_VALUE (argt)), | |
15d138c9 | 2116 | TYPE_MAIN_VARIANT (TREE_TYPE (val)))) |
94144e68 | 2117 | { |
15d138c9 | 2118 | gimple_call_set_arg (stmt, i, unshare_expr (val)); |
94144e68 | 2119 | changed = true; |
2120 | } | |
2121 | } | |
e16f4c39 | 2122 | |
94144e68 | 2123 | return changed; |
2124 | } | |
6688f8ec | 2125 | |
6872bf3c | 2126 | case GIMPLE_ASSIGN: |
2127 | { | |
2128 | tree lhs = gimple_assign_lhs (stmt); | |
15d138c9 | 2129 | tree val; |
6872bf3c | 2130 | |
2131 | /* If we have a load that turned out to be constant replace it | |
2132 | as we cannot propagate into all uses in all cases. */ | |
2133 | if (gimple_assign_single_p (stmt) | |
2134 | && TREE_CODE (lhs) == SSA_NAME | |
15d138c9 | 2135 | && (val = get_constant_value (lhs))) |
6872bf3c | 2136 | { |
15d138c9 | 2137 | tree rhs = unshare_expr (val); |
6872bf3c | 2138 | if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs))) |
182cf5a9 | 2139 | rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs); |
6872bf3c | 2140 | gimple_assign_set_rhs_from_tree (gsi, rhs); |
2141 | return true; | |
2142 | } | |
2143 | ||
2144 | return false; | |
2145 | } | |
2146 | ||
94144e68 | 2147 | default: |
2148 | return false; | |
2149 | } | |
6688f8ec | 2150 | } |
2151 | ||
41511585 | 2152 | /* Visit the assignment statement STMT. Set the value of its LHS to the |
88dbf20f | 2153 | value computed by the RHS and store LHS in *OUTPUT_P. If STMT |
2154 | creates virtual definitions, set the value of each new name to that | |
75a70cf9 | 2155 | of the RHS (if we can derive a constant out of the RHS). |
2156 | Value-returning call statements also perform an assignment, and | |
2157 | are handled here. */ | |
4ee9c684 | 2158 | |
41511585 | 2159 | static enum ssa_prop_result |
75a70cf9 | 2160 | visit_assignment (gimple stmt, tree *output_p) |
4ee9c684 | 2161 | { |
88dbf20f | 2162 | prop_value_t val; |
88dbf20f | 2163 | enum ssa_prop_result retval; |
4ee9c684 | 2164 | |
75a70cf9 | 2165 | tree lhs = gimple_get_lhs (stmt); |
4ee9c684 | 2166 | |
75a70cf9 | 2167 | gcc_assert (gimple_code (stmt) != GIMPLE_CALL |
2168 | || gimple_call_lhs (stmt) != NULL_TREE); | |
2169 | ||
15d138c9 | 2170 | if (gimple_assign_single_p (stmt) |
2171 | && gimple_assign_rhs_code (stmt) == SSA_NAME) | |
2172 | /* For a simple copy operation, we copy the lattice values. */ | |
2173 | val = *get_value (gimple_assign_rhs1 (stmt)); | |
41511585 | 2174 | else |
75a70cf9 | 2175 | /* Evaluate the statement, which could be |
2176 | either a GIMPLE_ASSIGN or a GIMPLE_CALL. */ | |
04236c3a | 2177 | val = evaluate_stmt (stmt); |
4ee9c684 | 2178 | |
88dbf20f | 2179 | retval = SSA_PROP_NOT_INTERESTING; |
4ee9c684 | 2180 | |
41511585 | 2181 | /* Set the lattice value of the statement's output. */ |
88dbf20f | 2182 | if (TREE_CODE (lhs) == SSA_NAME) |
4ee9c684 | 2183 | { |
88dbf20f | 2184 | /* If STMT is an assignment to an SSA_NAME, we only have one |
2185 | value to set. */ | |
2186 | if (set_lattice_value (lhs, val)) | |
2187 | { | |
2188 | *output_p = lhs; | |
2189 | if (val.lattice_val == VARYING) | |
2190 | retval = SSA_PROP_VARYING; | |
2191 | else | |
2192 | retval = SSA_PROP_INTERESTING; | |
2193 | } | |
4ee9c684 | 2194 | } |
88dbf20f | 2195 | |
2196 | return retval; | |
4ee9c684 | 2197 | } |
2198 | ||
4ee9c684 | 2199 | |
41511585 | 2200 | /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING |
2201 | if it can determine which edge will be taken. Otherwise, return | |
2202 | SSA_PROP_VARYING. */ | |
2203 | ||
2204 | static enum ssa_prop_result | |
75a70cf9 | 2205 | visit_cond_stmt (gimple stmt, edge *taken_edge_p) |
4ee9c684 | 2206 | { |
88dbf20f | 2207 | prop_value_t val; |
41511585 | 2208 | basic_block block; |
2209 | ||
75a70cf9 | 2210 | block = gimple_bb (stmt); |
41511585 | 2211 | val = evaluate_stmt (stmt); |
b7e55469 | 2212 | if (val.lattice_val != CONSTANT |
cf8f0e63 | 2213 | || !val.mask.is_zero ()) |
b7e55469 | 2214 | return SSA_PROP_VARYING; |
41511585 | 2215 | |
2216 | /* Find which edge out of the conditional block will be taken and add it | |
2217 | to the worklist. If no single edge can be determined statically, | |
2218 | return SSA_PROP_VARYING to feed all the outgoing edges to the | |
2219 | propagation engine. */ | |
b7e55469 | 2220 | *taken_edge_p = find_taken_edge (block, val.value); |
41511585 | 2221 | if (*taken_edge_p) |
2222 | return SSA_PROP_INTERESTING; | |
2223 | else | |
2224 | return SSA_PROP_VARYING; | |
4ee9c684 | 2225 | } |
2226 | ||
4ee9c684 | 2227 | |
41511585 | 2228 | /* Evaluate statement STMT. If the statement produces an output value and |
2229 | its evaluation changes the lattice value of its output, return | |
2230 | SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the | |
2231 | output value. | |
48e1416a | 2232 | |
41511585 | 2233 | If STMT is a conditional branch and we can determine its truth |
2234 | value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying | |
2235 | value, return SSA_PROP_VARYING. */ | |
4ee9c684 | 2236 | |
41511585 | 2237 | static enum ssa_prop_result |
75a70cf9 | 2238 | ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p) |
41511585 | 2239 | { |
41511585 | 2240 | tree def; |
2241 | ssa_op_iter iter; | |
4ee9c684 | 2242 | |
41511585 | 2243 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4ee9c684 | 2244 | { |
88dbf20f | 2245 | fprintf (dump_file, "\nVisiting statement:\n"); |
75a70cf9 | 2246 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
4ee9c684 | 2247 | } |
4ee9c684 | 2248 | |
75a70cf9 | 2249 | switch (gimple_code (stmt)) |
4ee9c684 | 2250 | { |
75a70cf9 | 2251 | case GIMPLE_ASSIGN: |
2252 | /* If the statement is an assignment that produces a single | |
2253 | output value, evaluate its RHS to see if the lattice value of | |
2254 | its output has changed. */ | |
2255 | return visit_assignment (stmt, output_p); | |
2256 | ||
2257 | case GIMPLE_CALL: | |
2258 | /* A value-returning call also performs an assignment. */ | |
2259 | if (gimple_call_lhs (stmt) != NULL_TREE) | |
2260 | return visit_assignment (stmt, output_p); | |
2261 | break; | |
2262 | ||
2263 | case GIMPLE_COND: | |
2264 | case GIMPLE_SWITCH: | |
2265 | /* If STMT is a conditional branch, see if we can determine | |
2266 | which branch will be taken. */ | |
2267 | /* FIXME. It appears that we should be able to optimize | |
2268 | computed GOTOs here as well. */ | |
2269 | return visit_cond_stmt (stmt, taken_edge_p); | |
2270 | ||
2271 | default: | |
2272 | break; | |
4ee9c684 | 2273 | } |
4ee9c684 | 2274 | |
41511585 | 2275 | /* Any other kind of statement is not interesting for constant |
2276 | propagation and, therefore, not worth simulating. */ | |
41511585 | 2277 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2278 | fprintf (dump_file, "No interesting values produced. Marked VARYING.\n"); | |
4ee9c684 | 2279 | |
41511585 | 2280 | /* Definitions made by statements other than assignments to |
2281 | SSA_NAMEs represent unknown modifications to their outputs. | |
2282 | Mark them VARYING. */ | |
88dbf20f | 2283 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) |
2284 | { | |
b7e55469 | 2285 | prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } }; |
88dbf20f | 2286 | set_lattice_value (def, v); |
2287 | } | |
4ee9c684 | 2288 | |
41511585 | 2289 | return SSA_PROP_VARYING; |
2290 | } | |
4ee9c684 | 2291 | |
4ee9c684 | 2292 | |
88dbf20f | 2293 | /* Main entry point for SSA Conditional Constant Propagation. */ |
41511585 | 2294 | |
33a34f1e | 2295 | static unsigned int |
61207d43 | 2296 | do_ssa_ccp (void) |
41511585 | 2297 | { |
582a80ed | 2298 | unsigned int todo = 0; |
2299 | calculate_dominance_info (CDI_DOMINATORS); | |
41511585 | 2300 | ccp_initialize (); |
2301 | ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node); | |
33a34f1e | 2302 | if (ccp_finalize ()) |
560965e9 | 2303 | todo = (TODO_cleanup_cfg | TODO_update_ssa); |
582a80ed | 2304 | free_dominance_info (CDI_DOMINATORS); |
2305 | return todo; | |
4ee9c684 | 2306 | } |
2307 | ||
5664499b | 2308 | |
cbe8bda8 | 2309 | namespace { |
2310 | ||
2311 | const pass_data pass_data_ccp = | |
41511585 | 2312 | { |
cbe8bda8 | 2313 | GIMPLE_PASS, /* type */ |
2314 | "ccp", /* name */ | |
2315 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 2316 | true, /* has_execute */ |
2317 | TV_TREE_CCP, /* tv_id */ | |
2318 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
2319 | 0, /* properties_provided */ | |
2320 | 0, /* properties_destroyed */ | |
2321 | 0, /* todo_flags_start */ | |
2322 | ( TODO_verify_ssa | TODO_update_address_taken | |
2323 | | TODO_verify_stmts ), /* todo_flags_finish */ | |
41511585 | 2324 | }; |
4ee9c684 | 2325 | |
cbe8bda8 | 2326 | class pass_ccp : public gimple_opt_pass |
2327 | { | |
2328 | public: | |
9af5ce0c | 2329 | pass_ccp (gcc::context *ctxt) |
2330 | : gimple_opt_pass (pass_data_ccp, ctxt) | |
cbe8bda8 | 2331 | {} |
2332 | ||
2333 | /* opt_pass methods: */ | |
ae84f584 | 2334 | opt_pass * clone () { return new pass_ccp (m_ctxt); } |
31315c24 | 2335 | virtual bool gate (function *) { return flag_tree_ccp != 0; } |
65b0537f | 2336 | virtual unsigned int execute (function *) { return do_ssa_ccp (); } |
cbe8bda8 | 2337 | |
2338 | }; // class pass_ccp | |
2339 | ||
2340 | } // anon namespace | |
2341 | ||
2342 | gimple_opt_pass * | |
2343 | make_pass_ccp (gcc::context *ctxt) | |
2344 | { | |
2345 | return new pass_ccp (ctxt); | |
2346 | } | |
2347 | ||
4ee9c684 | 2348 | |
75a70cf9 | 2349 | |
bdd0e199 | 2350 | /* Try to optimize out __builtin_stack_restore. Optimize it out |
2351 | if there is another __builtin_stack_restore in the same basic | |
2352 | block and no calls or ASM_EXPRs are in between, or if this block's | |
2353 | only outgoing edge is to EXIT_BLOCK and there are no calls or | |
2354 | ASM_EXPRs after this __builtin_stack_restore. */ | |
2355 | ||
2356 | static tree | |
75a70cf9 | 2357 | optimize_stack_restore (gimple_stmt_iterator i) |
bdd0e199 | 2358 | { |
6ea999da | 2359 | tree callee; |
2360 | gimple stmt; | |
75a70cf9 | 2361 | |
2362 | basic_block bb = gsi_bb (i); | |
2363 | gimple call = gsi_stmt (i); | |
bdd0e199 | 2364 | |
75a70cf9 | 2365 | if (gimple_code (call) != GIMPLE_CALL |
2366 | || gimple_call_num_args (call) != 1 | |
2367 | || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME | |
2368 | || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0)))) | |
bdd0e199 | 2369 | return NULL_TREE; |
2370 | ||
75a70cf9 | 2371 | for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i)) |
bdd0e199 | 2372 | { |
75a70cf9 | 2373 | stmt = gsi_stmt (i); |
2374 | if (gimple_code (stmt) == GIMPLE_ASM) | |
bdd0e199 | 2375 | return NULL_TREE; |
75a70cf9 | 2376 | if (gimple_code (stmt) != GIMPLE_CALL) |
bdd0e199 | 2377 | continue; |
2378 | ||
75a70cf9 | 2379 | callee = gimple_call_fndecl (stmt); |
c40a6f90 | 2380 | if (!callee |
2381 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL | |
2382 | /* All regular builtins are ok, just obviously not alloca. */ | |
581bf1c2 | 2383 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA |
2384 | || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN) | |
bdd0e199 | 2385 | return NULL_TREE; |
2386 | ||
2387 | if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE) | |
6ea999da | 2388 | goto second_stack_restore; |
bdd0e199 | 2389 | } |
2390 | ||
6ea999da | 2391 | if (!gsi_end_p (i)) |
bdd0e199 | 2392 | return NULL_TREE; |
2393 | ||
6ea999da | 2394 | /* Allow one successor of the exit block, or zero successors. */ |
2395 | switch (EDGE_COUNT (bb->succs)) | |
2396 | { | |
2397 | case 0: | |
2398 | break; | |
2399 | case 1: | |
34154e27 | 2400 | if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) |
6ea999da | 2401 | return NULL_TREE; |
2402 | break; | |
2403 | default: | |
2404 | return NULL_TREE; | |
2405 | } | |
2406 | second_stack_restore: | |
bdd0e199 | 2407 | |
6ea999da | 2408 | /* If there's exactly one use, then zap the call to __builtin_stack_save. |
2409 | If there are multiple uses, then the last one should remove the call. | |
2410 | In any case, whether the call to __builtin_stack_save can be removed | |
2411 | or not is irrelevant to removing the call to __builtin_stack_restore. */ | |
2412 | if (has_single_use (gimple_call_arg (call, 0))) | |
2413 | { | |
2414 | gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0)); | |
2415 | if (is_gimple_call (stack_save)) | |
2416 | { | |
2417 | callee = gimple_call_fndecl (stack_save); | |
2418 | if (callee | |
2419 | && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL | |
2420 | && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE) | |
2421 | { | |
2422 | gimple_stmt_iterator stack_save_gsi; | |
2423 | tree rhs; | |
bdd0e199 | 2424 | |
6ea999da | 2425 | stack_save_gsi = gsi_for_stmt (stack_save); |
2426 | rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0); | |
2427 | update_call_from_tree (&stack_save_gsi, rhs); | |
2428 | } | |
2429 | } | |
2430 | } | |
bdd0e199 | 2431 | |
75a70cf9 | 2432 | /* No effect, so the statement will be deleted. */ |
bdd0e199 | 2433 | return integer_zero_node; |
2434 | } | |
75a70cf9 | 2435 | |
8a58ed0a | 2436 | /* If va_list type is a simple pointer and nothing special is needed, |
2437 | optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0), | |
2438 | __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple | |
2439 | pointer assignment. */ | |
2440 | ||
2441 | static tree | |
75a70cf9 | 2442 | optimize_stdarg_builtin (gimple call) |
8a58ed0a | 2443 | { |
5f57a8b1 | 2444 | tree callee, lhs, rhs, cfun_va_list; |
8a58ed0a | 2445 | bool va_list_simple_ptr; |
389dd41b | 2446 | location_t loc = gimple_location (call); |
8a58ed0a | 2447 | |
75a70cf9 | 2448 | if (gimple_code (call) != GIMPLE_CALL) |
8a58ed0a | 2449 | return NULL_TREE; |
2450 | ||
75a70cf9 | 2451 | callee = gimple_call_fndecl (call); |
5f57a8b1 | 2452 | |
2453 | cfun_va_list = targetm.fn_abi_va_list (callee); | |
2454 | va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) | |
2455 | && (TREE_TYPE (cfun_va_list) == void_type_node | |
2456 | || TREE_TYPE (cfun_va_list) == char_type_node); | |
2457 | ||
8a58ed0a | 2458 | switch (DECL_FUNCTION_CODE (callee)) |
2459 | { | |
2460 | case BUILT_IN_VA_START: | |
2461 | if (!va_list_simple_ptr | |
2462 | || targetm.expand_builtin_va_start != NULL | |
e7ed5dd7 | 2463 | || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG)) |
8a58ed0a | 2464 | return NULL_TREE; |
2465 | ||
75a70cf9 | 2466 | if (gimple_call_num_args (call) != 2) |
8a58ed0a | 2467 | return NULL_TREE; |
2468 | ||
75a70cf9 | 2469 | lhs = gimple_call_arg (call, 0); |
8a58ed0a | 2470 | if (!POINTER_TYPE_P (TREE_TYPE (lhs)) |
2471 | || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) | |
5f57a8b1 | 2472 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
8a58ed0a | 2473 | return NULL_TREE; |
48e1416a | 2474 | |
389dd41b | 2475 | lhs = build_fold_indirect_ref_loc (loc, lhs); |
b9a16870 | 2476 | rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG), |
75a70cf9 | 2477 | 1, integer_zero_node); |
389dd41b | 2478 | rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs); |
8a58ed0a | 2479 | return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); |
2480 | ||
2481 | case BUILT_IN_VA_COPY: | |
2482 | if (!va_list_simple_ptr) | |
2483 | return NULL_TREE; | |
2484 | ||
75a70cf9 | 2485 | if (gimple_call_num_args (call) != 2) |
8a58ed0a | 2486 | return NULL_TREE; |
2487 | ||
75a70cf9 | 2488 | lhs = gimple_call_arg (call, 0); |
8a58ed0a | 2489 | if (!POINTER_TYPE_P (TREE_TYPE (lhs)) |
2490 | || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) | |
5f57a8b1 | 2491 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
8a58ed0a | 2492 | return NULL_TREE; |
2493 | ||
389dd41b | 2494 | lhs = build_fold_indirect_ref_loc (loc, lhs); |
75a70cf9 | 2495 | rhs = gimple_call_arg (call, 1); |
8a58ed0a | 2496 | if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs)) |
5f57a8b1 | 2497 | != TYPE_MAIN_VARIANT (cfun_va_list)) |
8a58ed0a | 2498 | return NULL_TREE; |
2499 | ||
389dd41b | 2500 | rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs); |
8a58ed0a | 2501 | return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); |
2502 | ||
2503 | case BUILT_IN_VA_END: | |
75a70cf9 | 2504 | /* No effect, so the statement will be deleted. */ |
8a58ed0a | 2505 | return integer_zero_node; |
2506 | ||
2507 | default: | |
2508 | gcc_unreachable (); | |
2509 | } | |
2510 | } | |
75a70cf9 | 2511 | |
f87df69a | 2512 | /* Attemp to make the block of __builtin_unreachable I unreachable by changing |
2513 | the incoming jumps. Return true if at least one jump was changed. */ | |
2514 | ||
2515 | static bool | |
2516 | optimize_unreachable (gimple_stmt_iterator i) | |
2517 | { | |
2518 | basic_block bb = gsi_bb (i); | |
2519 | gimple_stmt_iterator gsi; | |
2520 | gimple stmt; | |
2521 | edge_iterator ei; | |
2522 | edge e; | |
2523 | bool ret; | |
2524 | ||
2525 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
2526 | { | |
2527 | stmt = gsi_stmt (gsi); | |
2528 | ||
2529 | if (is_gimple_debug (stmt)) | |
2530 | continue; | |
2531 | ||
2532 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
2533 | { | |
2534 | /* Verify we do not need to preserve the label. */ | |
2535 | if (FORCED_LABEL (gimple_label_label (stmt))) | |
2536 | return false; | |
2537 | ||
2538 | continue; | |
2539 | } | |
2540 | ||
2541 | /* Only handle the case that __builtin_unreachable is the first statement | |
2542 | in the block. We rely on DCE to remove stmts without side-effects | |
2543 | before __builtin_unreachable. */ | |
2544 | if (gsi_stmt (gsi) != gsi_stmt (i)) | |
2545 | return false; | |
2546 | } | |
2547 | ||
2548 | ret = false; | |
2549 | FOR_EACH_EDGE (e, ei, bb->preds) | |
2550 | { | |
2551 | gsi = gsi_last_bb (e->src); | |
522f73a1 | 2552 | if (gsi_end_p (gsi)) |
2553 | continue; | |
f87df69a | 2554 | |
522f73a1 | 2555 | stmt = gsi_stmt (gsi); |
2556 | if (gimple_code (stmt) == GIMPLE_COND) | |
f87df69a | 2557 | { |
2558 | if (e->flags & EDGE_TRUE_VALUE) | |
2559 | gimple_cond_make_false (stmt); | |
2560 | else if (e->flags & EDGE_FALSE_VALUE) | |
2561 | gimple_cond_make_true (stmt); | |
2562 | else | |
2563 | gcc_unreachable (); | |
a03a52b4 | 2564 | update_stmt (stmt); |
f87df69a | 2565 | } |
2566 | else | |
2567 | { | |
2568 | /* Todo: handle other cases, f.i. switch statement. */ | |
2569 | continue; | |
2570 | } | |
2571 | ||
2572 | ret = true; | |
2573 | } | |
2574 | ||
2575 | return ret; | |
2576 | } | |
2577 | ||
4ee9c684 | 2578 | /* A simple pass that attempts to fold all builtin functions. This pass |
2579 | is run after we've propagated as many constants as we can. */ | |
2580 | ||
65b0537f | 2581 | namespace { |
2582 | ||
2583 | const pass_data pass_data_fold_builtins = | |
2584 | { | |
2585 | GIMPLE_PASS, /* type */ | |
2586 | "fab", /* name */ | |
2587 | OPTGROUP_NONE, /* optinfo_flags */ | |
2588 | true, /* has_execute */ | |
2589 | TV_NONE, /* tv_id */ | |
2590 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
2591 | 0, /* properties_provided */ | |
2592 | 0, /* properties_destroyed */ | |
2593 | 0, /* todo_flags_start */ | |
2594 | ( TODO_verify_ssa | TODO_update_ssa ), /* todo_flags_finish */ | |
2595 | }; | |
2596 | ||
2597 | class pass_fold_builtins : public gimple_opt_pass | |
2598 | { | |
2599 | public: | |
2600 | pass_fold_builtins (gcc::context *ctxt) | |
2601 | : gimple_opt_pass (pass_data_fold_builtins, ctxt) | |
2602 | {} | |
2603 | ||
2604 | /* opt_pass methods: */ | |
2605 | opt_pass * clone () { return new pass_fold_builtins (m_ctxt); } | |
2606 | virtual unsigned int execute (function *); | |
2607 | ||
2608 | }; // class pass_fold_builtins | |
2609 | ||
2610 | unsigned int | |
2611 | pass_fold_builtins::execute (function *fun) | |
4ee9c684 | 2612 | { |
b36237eb | 2613 | bool cfg_changed = false; |
4ee9c684 | 2614 | basic_block bb; |
b1b7c0c4 | 2615 | unsigned int todoflags = 0; |
48e1416a | 2616 | |
65b0537f | 2617 | FOR_EACH_BB_FN (bb, fun) |
4ee9c684 | 2618 | { |
75a70cf9 | 2619 | gimple_stmt_iterator i; |
2620 | for (i = gsi_start_bb (bb); !gsi_end_p (i); ) | |
4ee9c684 | 2621 | { |
75a70cf9 | 2622 | gimple stmt, old_stmt; |
4ee9c684 | 2623 | tree callee, result; |
0a39fd54 | 2624 | enum built_in_function fcode; |
4ee9c684 | 2625 | |
75a70cf9 | 2626 | stmt = gsi_stmt (i); |
2627 | ||
2628 | if (gimple_code (stmt) != GIMPLE_CALL) | |
0a39fd54 | 2629 | { |
896a0c42 | 2630 | /* Remove all *ssaname_N ={v} {CLOBBER}; stmts, |
2631 | after the last GIMPLE DSE they aren't needed and might | |
2632 | unnecessarily keep the SSA_NAMEs live. */ | |
2633 | if (gimple_clobber_p (stmt)) | |
2634 | { | |
2635 | tree lhs = gimple_assign_lhs (stmt); | |
2636 | if (TREE_CODE (lhs) == MEM_REF | |
2637 | && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME) | |
2638 | { | |
2639 | unlink_stmt_vdef (stmt); | |
2640 | gsi_remove (&i, true); | |
2641 | release_defs (stmt); | |
2642 | continue; | |
2643 | } | |
2644 | } | |
75a70cf9 | 2645 | gsi_next (&i); |
0a39fd54 | 2646 | continue; |
2647 | } | |
75a70cf9 | 2648 | callee = gimple_call_fndecl (stmt); |
4ee9c684 | 2649 | if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) |
0a39fd54 | 2650 | { |
75a70cf9 | 2651 | gsi_next (&i); |
0a39fd54 | 2652 | continue; |
2653 | } | |
2654 | fcode = DECL_FUNCTION_CODE (callee); | |
4ee9c684 | 2655 | |
2d18b16d | 2656 | result = gimple_fold_builtin (stmt); |
5a4b7e1e | 2657 | |
2658 | if (result) | |
65b0537f | 2659 | gimple_remove_stmt_histograms (fun, stmt); |
5a4b7e1e | 2660 | |
4ee9c684 | 2661 | if (!result) |
2662 | switch (DECL_FUNCTION_CODE (callee)) | |
2663 | { | |
2664 | case BUILT_IN_CONSTANT_P: | |
2665 | /* Resolve __builtin_constant_p. If it hasn't been | |
2666 | folded to integer_one_node by now, it's fairly | |
2667 | certain that the value simply isn't constant. */ | |
75a70cf9 | 2668 | result = integer_zero_node; |
4ee9c684 | 2669 | break; |
2670 | ||
fca0886c | 2671 | case BUILT_IN_ASSUME_ALIGNED: |
2672 | /* Remove __builtin_assume_aligned. */ | |
2673 | result = gimple_call_arg (stmt, 0); | |
2674 | break; | |
2675 | ||
bdd0e199 | 2676 | case BUILT_IN_STACK_RESTORE: |
75a70cf9 | 2677 | result = optimize_stack_restore (i); |
8a58ed0a | 2678 | if (result) |
2679 | break; | |
75a70cf9 | 2680 | gsi_next (&i); |
8a58ed0a | 2681 | continue; |
2682 | ||
f87df69a | 2683 | case BUILT_IN_UNREACHABLE: |
2684 | if (optimize_unreachable (i)) | |
2685 | cfg_changed = true; | |
2686 | break; | |
2687 | ||
8a58ed0a | 2688 | case BUILT_IN_VA_START: |
2689 | case BUILT_IN_VA_END: | |
2690 | case BUILT_IN_VA_COPY: | |
2691 | /* These shouldn't be folded before pass_stdarg. */ | |
75a70cf9 | 2692 | result = optimize_stdarg_builtin (stmt); |
bdd0e199 | 2693 | if (result) |
2694 | break; | |
2695 | /* FALLTHRU */ | |
2696 | ||
4ee9c684 | 2697 | default: |
75a70cf9 | 2698 | gsi_next (&i); |
4ee9c684 | 2699 | continue; |
2700 | } | |
2701 | ||
f87df69a | 2702 | if (result == NULL_TREE) |
2703 | break; | |
2704 | ||
4ee9c684 | 2705 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2706 | { | |
2707 | fprintf (dump_file, "Simplified\n "); | |
75a70cf9 | 2708 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
4ee9c684 | 2709 | } |
2710 | ||
75a70cf9 | 2711 | old_stmt = stmt; |
75a70cf9 | 2712 | if (!update_call_from_tree (&i, result)) |
0fefde02 | 2713 | { |
2714 | gimplify_and_update_call_from_tree (&i, result); | |
2715 | todoflags |= TODO_update_address_taken; | |
2716 | } | |
de6ed584 | 2717 | |
75a70cf9 | 2718 | stmt = gsi_stmt (i); |
4c5fd53c | 2719 | update_stmt (stmt); |
de6ed584 | 2720 | |
75a70cf9 | 2721 | if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt) |
2722 | && gimple_purge_dead_eh_edges (bb)) | |
b36237eb | 2723 | cfg_changed = true; |
4ee9c684 | 2724 | |
2725 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2726 | { | |
2727 | fprintf (dump_file, "to\n "); | |
75a70cf9 | 2728 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
4ee9c684 | 2729 | fprintf (dump_file, "\n"); |
2730 | } | |
0a39fd54 | 2731 | |
2732 | /* Retry the same statement if it changed into another | |
2733 | builtin, there might be new opportunities now. */ | |
75a70cf9 | 2734 | if (gimple_code (stmt) != GIMPLE_CALL) |
0a39fd54 | 2735 | { |
75a70cf9 | 2736 | gsi_next (&i); |
0a39fd54 | 2737 | continue; |
2738 | } | |
75a70cf9 | 2739 | callee = gimple_call_fndecl (stmt); |
0a39fd54 | 2740 | if (!callee |
75a70cf9 | 2741 | || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL |
0a39fd54 | 2742 | || DECL_FUNCTION_CODE (callee) == fcode) |
75a70cf9 | 2743 | gsi_next (&i); |
4ee9c684 | 2744 | } |
2745 | } | |
48e1416a | 2746 | |
b36237eb | 2747 | /* Delete unreachable blocks. */ |
b1b7c0c4 | 2748 | if (cfg_changed) |
2749 | todoflags |= TODO_cleanup_cfg; | |
48e1416a | 2750 | |
b1b7c0c4 | 2751 | return todoflags; |
4ee9c684 | 2752 | } |
2753 | ||
cbe8bda8 | 2754 | } // anon namespace |
2755 | ||
2756 | gimple_opt_pass * | |
2757 | make_pass_fold_builtins (gcc::context *ctxt) | |
2758 | { | |
2759 | return new pass_fold_builtins (ctxt); | |
2760 | } |